diff --git a/dipy.org/pull/66/.buildinfo b/dipy.org/pull/66/.buildinfo new file mode 100644 index 0000000..1e6b1eb --- /dev/null +++ b/dipy.org/pull/66/.buildinfo @@ -0,0 +1,4 @@ +# Sphinx build info version 1 +# This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done. +config: 3a056360d59d6ae3b3861214b726ba05 +tags: 645f666f9bcd5a90fca523b33c5a78b7 diff --git a/dipy.org/pull/66/.nojekyll b/dipy.org/pull/66/.nojekyll new file mode 100644 index 0000000..e69de29 diff --git a/dipy.org/pull/66/CNAME b/dipy.org/pull/66/CNAME new file mode 100644 index 0000000..871d9fc --- /dev/null +++ b/dipy.org/pull/66/CNAME @@ -0,0 +1 @@ +dipy.org \ No newline at end of file diff --git a/dipy.org/pull/66/_images/AWF_v1.png b/dipy.org/pull/66/_images/AWF_v1.png new file mode 100644 index 0000000..4a2c96a Binary files /dev/null and b/dipy.org/pull/66/_images/AWF_v1.png differ diff --git a/dipy.org/pull/66/_images/Code_ODF.png b/dipy.org/pull/66/_images/Code_ODF.png new file mode 100644 index 0000000..cc51326 Binary files /dev/null and b/dipy.org/pull/66/_images/Code_ODF.png differ diff --git a/dipy.org/pull/66/_images/DM-MNIST-112epoch.png b/dipy.org/pull/66/_images/DM-MNIST-112epoch.png new file mode 100644 index 0000000..1ef4ee3 Binary files /dev/null and b/dipy.org/pull/66/_images/DM-MNIST-112epoch.png differ diff --git a/dipy.org/pull/66/_images/DM-MNIST-DDIM300-108epoch.png b/dipy.org/pull/66/_images/DM-MNIST-DDIM300-108epoch.png new file mode 100644 index 0000000..ad73269 Binary files /dev/null and b/dipy.org/pull/66/_images/DM-MNIST-DDIM300-108epoch.png differ diff --git a/dipy.org/pull/66/_images/DSC03995.JPG b/dipy.org/pull/66/_images/DSC03995.JPG new file mode 100644 index 0000000..4659633 Binary files /dev/null and b/dipy.org/pull/66/_images/DSC03995.JPG differ diff --git a/dipy.org/pull/66/_images/Diffusion_tensor_measures_from_DTI_and_DKI.png b/dipy.org/pull/66/_images/Diffusion_tensor_measures_from_DTI_and_DKI.png new file mode 100644 index 0000000..95e8987 Binary files /dev/null and b/dipy.org/pull/66/_images/Diffusion_tensor_measures_from_DTI_and_DKI.png differ diff --git a/dipy.org/pull/66/_images/Diffusion_tensor_measures_from_DTI_and_DKI_A.png b/dipy.org/pull/66/_images/Diffusion_tensor_measures_from_DTI_and_DKI_A.png new file mode 100644 index 0000000..58e4479 Binary files /dev/null and b/dipy.org/pull/66/_images/Diffusion_tensor_measures_from_DTI_and_DKI_A.png differ diff --git a/dipy.org/pull/66/_images/Diffusion_tensor_measures_from_DTI_and_DKI_B.png b/dipy.org/pull/66/_images/Diffusion_tensor_measures_from_DTI_and_DKI_B.png new file mode 100644 index 0000000..8adb158 Binary files /dev/null and b/dipy.org/pull/66/_images/Diffusion_tensor_measures_from_DTI_and_DKI_B.png differ diff --git a/dipy.org/pull/66/_images/F0.png b/dipy.org/pull/66/_images/F0.png new file mode 100644 index 0000000..0334a05 Binary files /dev/null and b/dipy.org/pull/66/_images/F0.png differ diff --git a/dipy.org/pull/66/_images/F1.png b/dipy.org/pull/66/_images/F1.png new file mode 100644 index 0000000..0ef30f3 Binary files /dev/null and b/dipy.org/pull/66/_images/F1.png differ diff --git a/dipy.org/pull/66/_images/F2.png b/dipy.org/pull/66/_images/F2.png new file mode 100644 index 0000000..c00e488 Binary files /dev/null and b/dipy.org/pull/66/_images/F2.png differ diff --git a/dipy.org/pull/66/_images/F3.png b/dipy.org/pull/66/_images/F3.png new file mode 100644 index 0000000..48d02fc Binary files /dev/null and b/dipy.org/pull/66/_images/F3.png differ diff --git a/dipy.org/pull/66/_images/F4.png b/dipy.org/pull/66/_images/F4.png new file mode 100644 index 0000000..8160970 Binary files /dev/null and b/dipy.org/pull/66/_images/F4.png differ diff --git a/dipy.org/pull/66/_images/F5.png b/dipy.org/pull/66/_images/F5.png new file mode 100644 index 0000000..3029d9e Binary files /dev/null and b/dipy.org/pull/66/_images/F5.png differ diff --git a/dipy.org/pull/66/_images/Figure1_Midterm.png b/dipy.org/pull/66/_images/Figure1_Midterm.png new file mode 100644 index 0000000..ec9e6fb Binary files /dev/null and b/dipy.org/pull/66/_images/Figure1_Midterm.png differ diff --git a/dipy.org/pull/66/_images/Figure2_midterm.png b/dipy.org/pull/66/_images/Figure2_midterm.png new file mode 100644 index 0000000..53c15ab Binary files /dev/null and b/dipy.org/pull/66/_images/Figure2_midterm.png differ diff --git a/dipy.org/pull/66/_images/Kurtosis_tensor_standard_measures.png b/dipy.org/pull/66/_images/Kurtosis_tensor_standard_measures.png new file mode 100644 index 0000000..16b00b1 Binary files /dev/null and b/dipy.org/pull/66/_images/Kurtosis_tensor_standard_measures.png differ diff --git a/dipy.org/pull/66/_images/Kurtosis_tensor_standard_measures_A.png b/dipy.org/pull/66/_images/Kurtosis_tensor_standard_measures_A.png new file mode 100644 index 0000000..dfe5866 Binary files /dev/null and b/dipy.org/pull/66/_images/Kurtosis_tensor_standard_measures_A.png differ diff --git a/dipy.org/pull/66/_images/Kurtosis_tensor_standard_measures_B.png b/dipy.org/pull/66/_images/Kurtosis_tensor_standard_measures_B.png new file mode 100644 index 0000000..c18b25d Binary files /dev/null and b/dipy.org/pull/66/_images/Kurtosis_tensor_standard_measures_B.png differ diff --git a/dipy.org/pull/66/_images/Kurtosis_tensor_standard_measures_final_post.png b/dipy.org/pull/66/_images/Kurtosis_tensor_standard_measures_final_post.png new file mode 100644 index 0000000..9a3fd55 Binary files /dev/null and b/dipy.org/pull/66/_images/Kurtosis_tensor_standard_measures_final_post.png differ diff --git a/dipy.org/pull/66/_images/MK_comparison.png b/dipy.org/pull/66/_images/MK_comparison.png new file mode 100644 index 0000000..7f66caf Binary files /dev/null and b/dipy.org/pull/66/_images/MK_comparison.png differ diff --git a/dipy.org/pull/66/_images/MK_sigularities_compared_to_MK_nm_zoom.png b/dipy.org/pull/66/_images/MK_sigularities_compared_to_MK_nm_zoom.png new file mode 100644 index 0000000..9373213 Binary files /dev/null and b/dipy.org/pull/66/_images/MK_sigularities_compared_to_MK_nm_zoom.png differ diff --git a/dipy.org/pull/66/_images/MK_sigularities_resolved.png b/dipy.org/pull/66/_images/MK_sigularities_resolved.png new file mode 100644 index 0000000..8d477a4 Binary files /dev/null and b/dipy.org/pull/66/_images/MK_sigularities_resolved.png differ diff --git a/dipy.org/pull/66/_images/MK_singularities_compared_to_MK_nm.png b/dipy.org/pull/66/_images/MK_singularities_compared_to_MK_nm.png new file mode 100644 index 0000000..ecc26bc Binary files /dev/null and b/dipy.org/pull/66/_images/MK_singularities_compared_to_MK_nm.png differ diff --git a/dipy.org/pull/66/_images/ODF_final_code.png b/dipy.org/pull/66/_images/ODF_final_code.png new file mode 100644 index 0000000..edf1451 Binary files /dev/null and b/dipy.org/pull/66/_images/ODF_final_code.png differ diff --git a/dipy.org/pull/66/_images/Picture1.png b/dipy.org/pull/66/_images/Picture1.png new file mode 100644 index 0000000..26ea220 Binary files /dev/null and b/dipy.org/pull/66/_images/Picture1.png differ diff --git a/dipy.org/pull/66/_images/Picture2.png b/dipy.org/pull/66/_images/Picture2.png new file mode 100644 index 0000000..4f774c2 Binary files /dev/null and b/dipy.org/pull/66/_images/Picture2.png differ diff --git a/dipy.org/pull/66/_images/Picture2A.png b/dipy.org/pull/66/_images/Picture2A.png new file mode 100644 index 0000000..bf1f9f4 Binary files /dev/null and b/dipy.org/pull/66/_images/Picture2A.png differ diff --git a/dipy.org/pull/66/_images/Picture3.png b/dipy.org/pull/66/_images/Picture3.png new file mode 100644 index 0000000..89eb0f9 Binary files /dev/null and b/dipy.org/pull/66/_images/Picture3.png differ diff --git a/dipy.org/pull/66/_images/Picture4.png b/dipy.org/pull/66/_images/Picture4.png new file mode 100644 index 0000000..a559000 Binary files /dev/null and b/dipy.org/pull/66/_images/Picture4.png differ diff --git a/dipy.org/pull/66/_images/Picture5.png b/dipy.org/pull/66/_images/Picture5.png new file mode 100644 index 0000000..d77ec58 Binary files /dev/null and b/dipy.org/pull/66/_images/Picture5.png differ diff --git a/dipy.org/pull/66/_images/Picture6.png b/dipy.org/pull/66/_images/Picture6.png new file mode 100644 index 0000000..eeae18b Binary files /dev/null and b/dipy.org/pull/66/_images/Picture6.png differ diff --git a/dipy.org/pull/66/_images/Picture7.png b/dipy.org/pull/66/_images/Picture7.png new file mode 100644 index 0000000..0713e61 Binary files /dev/null and b/dipy.org/pull/66/_images/Picture7.png differ diff --git a/dipy.org/pull/66/_images/Picture8.png b/dipy.org/pull/66/_images/Picture8.png new file mode 100644 index 0000000..6f8f86f Binary files /dev/null and b/dipy.org/pull/66/_images/Picture8.png differ diff --git a/dipy.org/pull/66/_images/RK_numerical_noise_free.png b/dipy.org/pull/66/_images/RK_numerical_noise_free.png new file mode 100644 index 0000000..c6c0772 Binary files /dev/null and b/dipy.org/pull/66/_images/RK_numerical_noise_free.png differ diff --git a/dipy.org/pull/66/_images/Step1.png b/dipy.org/pull/66/_images/Step1.png new file mode 100644 index 0000000..e6ae0c1 Binary files /dev/null and b/dipy.org/pull/66/_images/Step1.png differ diff --git a/dipy.org/pull/66/_images/Step2.png b/dipy.org/pull/66/_images/Step2.png new file mode 100644 index 0000000..f4f743a Binary files /dev/null and b/dipy.org/pull/66/_images/Step2.png differ diff --git a/dipy.org/pull/66/_images/Vij_equation.png b/dipy.org/pull/66/_images/Vij_equation.png new file mode 100644 index 0000000..c795e7c Binary files /dev/null and b/dipy.org/pull/66/_images/Vij_equation.png differ diff --git a/dipy.org/pull/66/_images/adversarial_ae_with_abr.png b/dipy.org/pull/66/_images/adversarial_ae_with_abr.png new file mode 100644 index 0000000..7068394 Binary files /dev/null and b/dipy.org/pull/66/_images/adversarial_ae_with_abr.png differ diff --git a/dipy.org/pull/66/_images/conditional_vae_architecture_diagram.png b/dipy.org/pull/66/_images/conditional_vae_architecture_diagram.png new file mode 100644 index 0000000..133c9bc Binary files /dev/null and b/dipy.org/pull/66/_images/conditional_vae_architecture_diagram.png differ diff --git a/dipy.org/pull/66/_images/conditioning_validation_using_mse.png b/dipy.org/pull/66/_images/conditioning_validation_using_mse.png new file mode 100644 index 0000000..06caa76 Binary files /dev/null and b/dipy.org/pull/66/_images/conditioning_validation_using_mse.png differ diff --git a/dipy.org/pull/66/_images/corpuscallosum_axial.png b/dipy.org/pull/66/_images/corpuscallosum_axial.png new file mode 100644 index 0000000..8876213 Binary files /dev/null and b/dipy.org/pull/66/_images/corpuscallosum_axial.png differ diff --git a/dipy.org/pull/66/_images/cvae_first_reconstruction_result.png b/dipy.org/pull/66/_images/cvae_first_reconstruction_result.png new file mode 100644 index 0000000..caf138e Binary files /dev/null and b/dipy.org/pull/66/_images/cvae_first_reconstruction_result.png differ diff --git a/dipy.org/pull/66/_images/denoise_b0.png b/dipy.org/pull/66/_images/denoise_b0.png new file mode 100644 index 0000000..681314b Binary files /dev/null and b/dipy.org/pull/66/_images/denoise_b0.png differ diff --git a/dipy.org/pull/66/_images/denoise_b0_piesno.png b/dipy.org/pull/66/_images/denoise_b0_piesno.png new file mode 100644 index 0000000..f7c8c9b Binary files /dev/null and b/dipy.org/pull/66/_images/denoise_b0_piesno.png differ diff --git a/dipy.org/pull/66/_images/denoise_b2000.png b/dipy.org/pull/66/_images/denoise_b2000.png new file mode 100644 index 0000000..807f219 Binary files /dev/null and b/dipy.org/pull/66/_images/denoise_b2000.png differ diff --git a/dipy.org/pull/66/_images/dipy-logo-2.png b/dipy.org/pull/66/_images/dipy-logo-2.png new file mode 100644 index 0000000..4668353 Binary files /dev/null and b/dipy.org/pull/66/_images/dipy-logo-2.png differ diff --git a/dipy.org/pull/66/_images/dipy1.png b/dipy.org/pull/66/_images/dipy1.png new file mode 100644 index 0000000..4620546 Binary files /dev/null and b/dipy.org/pull/66/_images/dipy1.png differ diff --git a/dipy.org/pull/66/_images/dipy10.jpeg b/dipy.org/pull/66/_images/dipy10.jpeg new file mode 100644 index 0000000..8a7a8c2 Binary files /dev/null and b/dipy.org/pull/66/_images/dipy10.jpeg differ diff --git a/dipy.org/pull/66/_images/dipy11.jpeg b/dipy.org/pull/66/_images/dipy11.jpeg new file mode 100644 index 0000000..de09be2 Binary files /dev/null and b/dipy.org/pull/66/_images/dipy11.jpeg differ diff --git a/dipy.org/pull/66/_images/dipy12.jpeg b/dipy.org/pull/66/_images/dipy12.jpeg new file mode 100644 index 0000000..f2be1f0 Binary files /dev/null and b/dipy.org/pull/66/_images/dipy12.jpeg differ diff --git a/dipy.org/pull/66/_images/dipy13.png b/dipy.org/pull/66/_images/dipy13.png new file mode 100644 index 0000000..f798d0f Binary files /dev/null and b/dipy.org/pull/66/_images/dipy13.png differ diff --git a/dipy.org/pull/66/_images/dipy14.jpg b/dipy.org/pull/66/_images/dipy14.jpg new file mode 100644 index 0000000..9388dc8 Binary files /dev/null and b/dipy.org/pull/66/_images/dipy14.jpg differ diff --git a/dipy.org/pull/66/_images/dipy2.png b/dipy.org/pull/66/_images/dipy2.png new file mode 100644 index 0000000..f989f0a Binary files /dev/null and b/dipy.org/pull/66/_images/dipy2.png differ diff --git a/dipy.org/pull/66/_images/dipy3.jpeg b/dipy.org/pull/66/_images/dipy3.jpeg new file mode 100644 index 0000000..2485390 Binary files /dev/null and b/dipy.org/pull/66/_images/dipy3.jpeg differ diff --git a/dipy.org/pull/66/_images/dipy4.jpeg b/dipy.org/pull/66/_images/dipy4.jpeg new file mode 100644 index 0000000..0d4377f Binary files /dev/null and b/dipy.org/pull/66/_images/dipy4.jpeg differ diff --git a/dipy.org/pull/66/_images/dipy5.jpeg b/dipy.org/pull/66/_images/dipy5.jpeg new file mode 100644 index 0000000..d9cd65d Binary files /dev/null and b/dipy.org/pull/66/_images/dipy5.jpeg differ diff --git a/dipy.org/pull/66/_images/dipy6.png b/dipy.org/pull/66/_images/dipy6.png new file mode 100644 index 0000000..5e931a9 Binary files /dev/null and b/dipy.org/pull/66/_images/dipy6.png differ diff --git a/dipy.org/pull/66/_images/dipy7.png b/dipy.org/pull/66/_images/dipy7.png new file mode 100644 index 0000000..003b0ea Binary files /dev/null and b/dipy.org/pull/66/_images/dipy7.png differ diff --git a/dipy.org/pull/66/_images/dipy8.jpeg b/dipy.org/pull/66/_images/dipy8.jpeg new file mode 100644 index 0000000..ca67184 Binary files /dev/null and b/dipy.org/pull/66/_images/dipy8.jpeg differ diff --git a/dipy.org/pull/66/_images/dipy9.jpeg b/dipy.org/pull/66/_images/dipy9.jpeg new file mode 100644 index 0000000..6d66087 Binary files /dev/null and b/dipy.org/pull/66/_images/dipy9.jpeg differ diff --git a/dipy.org/pull/66/_images/dki_dirs_final_post.png b/dipy.org/pull/66/_images/dki_dirs_final_post.png new file mode 100644 index 0000000..ee4bf52 Binary files /dev/null and b/dipy.org/pull/66/_images/dki_dirs_final_post.png differ diff --git a/dipy.org/pull/66/_images/dki_geometries.png b/dipy.org/pull/66/_images/dki_geometries.png new file mode 100644 index 0000000..b10ddb3 Binary files /dev/null and b/dipy.org/pull/66/_images/dki_geometries.png differ diff --git a/dipy.org/pull/66/_images/dki_odfs_final_post.png b/dipy.org/pull/66/_images/dki_odfs_final_post.png new file mode 100644 index 0000000..ad421f1 Binary files /dev/null and b/dipy.org/pull/66/_images/dki_odfs_final_post.png differ diff --git a/dipy.org/pull/66/_images/dki_peaks.png b/dipy.org/pull/66/_images/dki_peaks.png new file mode 100644 index 0000000..6738859 Binary files /dev/null and b/dipy.org/pull/66/_images/dki_peaks.png differ diff --git a/dipy.org/pull/66/_images/dm3d-monai-B8-DM500.png b/dipy.org/pull/66/_images/dm3d-monai-B8-DM500.png new file mode 100644 index 0000000..b20b8e0 Binary files /dev/null and b/dipy.org/pull/66/_images/dm3d-monai-B8-DM500.png differ diff --git a/dipy.org/pull/66/_images/dm3d-monai-training-curves.png b/dipy.org/pull/66/_images/dm3d-monai-training-curves.png new file mode 100644 index 0000000..60a23bb Binary files /dev/null and b/dipy.org/pull/66/_images/dm3d-monai-training-curves.png differ diff --git a/dipy.org/pull/66/_images/dm3d-reconst-D200-D300.png b/dipy.org/pull/66/_images/dm3d-reconst-D200-D300.png new file mode 100644 index 0000000..5026964 Binary files /dev/null and b/dipy.org/pull/66/_images/dm3d-reconst-D200-D300.png differ diff --git a/dipy.org/pull/66/_images/dm3d-training-curves.png b/dipy.org/pull/66/_images/dm3d-training-curves.png new file mode 100644 index 0000000..e090f39 Binary files /dev/null and b/dipy.org/pull/66/_images/dm3d-training-curves.png differ diff --git a/dipy.org/pull/66/_images/docker_issue_fury.png b/dipy.org/pull/66/_images/docker_issue_fury.png new file mode 100644 index 0000000..3111cb0 Binary files /dev/null and b/dipy.org/pull/66/_images/docker_issue_fury.png differ diff --git a/dipy.org/pull/66/_images/eigenvalues.png b/dipy.org/pull/66/_images/eigenvalues.png new file mode 100644 index 0000000..c869c0a Binary files /dev/null and b/dipy.org/pull/66/_images/eigenvalues.png differ diff --git a/dipy.org/pull/66/_images/fibercup_better_results.png b/dipy.org/pull/66/_images/fibercup_better_results.png new file mode 100644 index 0000000..046f56f Binary files /dev/null and b/dipy.org/pull/66/_images/fibercup_better_results.png differ diff --git a/dipy.org/pull/66/_images/fibercup_preliminary_results.png b/dipy.org/pull/66/_images/fibercup_preliminary_results.png new file mode 100644 index 0000000..704b995 Binary files /dev/null and b/dipy.org/pull/66/_images/fibercup_preliminary_results.png differ diff --git a/dipy.org/pull/66/_images/fibercup_replicated.png b/dipy.org/pull/66/_images/fibercup_replicated.png new file mode 100644 index 0000000..347eb01 Binary files /dev/null and b/dipy.org/pull/66/_images/fibercup_replicated.png differ diff --git a/dipy.org/pull/66/_images/formula_.png b/dipy.org/pull/66/_images/formula_.png new file mode 100644 index 0000000..3851b15 Binary files /dev/null and b/dipy.org/pull/66/_images/formula_.png differ diff --git a/dipy.org/pull/66/_images/gODF_equa.png b/dipy.org/pull/66/_images/gODF_equa.png new file mode 100644 index 0000000..daa3140 Binary files /dev/null and b/dipy.org/pull/66/_images/gODF_equa.png differ diff --git a/dipy.org/pull/66/_images/geometry_of_dki_tensors.png b/dipy.org/pull/66/_images/geometry_of_dki_tensors.png new file mode 100644 index 0000000..18f074b Binary files /dev/null and b/dipy.org/pull/66/_images/geometry_of_dki_tensors.png differ diff --git a/dipy.org/pull/66/_images/gsoc-logo.png b/dipy.org/pull/66/_images/gsoc-logo.png new file mode 100644 index 0000000..635b90d Binary files /dev/null and b/dipy.org/pull/66/_images/gsoc-logo.png differ diff --git a/dipy.org/pull/66/_images/inigo_preliminary_vae_result_fibercup.png b/dipy.org/pull/66/_images/inigo_preliminary_vae_result_fibercup.png new file mode 100644 index 0000000..02547e4 Binary files /dev/null and b/dipy.org/pull/66/_images/inigo_preliminary_vae_result_fibercup.png differ diff --git a/dipy.org/pull/66/_images/inigo_vanilla_autoencoder.png b/dipy.org/pull/66/_images/inigo_vanilla_autoencoder.png new file mode 100644 index 0000000..5c8b50f Binary files /dev/null and b/dipy.org/pull/66/_images/inigo_vanilla_autoencoder.png differ diff --git a/dipy.org/pull/66/_images/inigo_variational_autoencoder.png b/dipy.org/pull/66/_images/inigo_variational_autoencoder.png new file mode 100644 index 0000000..9b2c066 Binary files /dev/null and b/dipy.org/pull/66/_images/inigo_variational_autoencoder.png differ diff --git a/dipy.org/pull/66/_images/latent_space_comparison_VAE_cVAE_colored_by_streamline_length.png b/dipy.org/pull/66/_images/latent_space_comparison_VAE_cVAE_colored_by_streamline_length.png new file mode 100644 index 0000000..e2691d9 Binary files /dev/null and b/dipy.org/pull/66/_images/latent_space_comparison_VAE_cVAE_colored_by_streamline_length.png differ diff --git a/dipy.org/pull/66/_images/piesno_DKI.png b/dipy.org/pull/66/_images/piesno_DKI.png new file mode 100644 index 0000000..4705f45 Binary files /dev/null and b/dipy.org/pull/66/_images/piesno_DKI.png differ diff --git a/dipy.org/pull/66/_images/piesno_mask.png b/dipy.org/pull/66/_images/piesno_mask.png new file mode 100644 index 0000000..c86f1af Binary files /dev/null and b/dipy.org/pull/66/_images/piesno_mask.png differ diff --git a/dipy.org/pull/66/_images/python-logo.png b/dipy.org/pull/66/_images/python-logo.png new file mode 100644 index 0000000..2bc7ed2 Binary files /dev/null and b/dipy.org/pull/66/_images/python-logo.png differ diff --git a/dipy.org/pull/66/_images/streamlines_short_long.png b/dipy.org/pull/66/_images/streamlines_short_long.png new file mode 100644 index 0000000..d16b5aa Binary files /dev/null and b/dipy.org/pull/66/_images/streamlines_short_long.png differ diff --git a/dipy.org/pull/66/_images/tensor_ellipsoids.png b/dipy.org/pull/66/_images/tensor_ellipsoids.png new file mode 100644 index 0000000..1b4f2b4 Binary files /dev/null and b/dipy.org/pull/66/_images/tensor_ellipsoids.png differ diff --git a/dipy.org/pull/66/_images/vae_conditioning_validation.png b/dipy.org/pull/66/_images/vae_conditioning_validation.png new file mode 100644 index 0000000..44e8436 Binary files /dev/null and b/dipy.org/pull/66/_images/vae_conditioning_validation.png differ diff --git a/dipy.org/pull/66/_images/vanilla_vae_120_epoch_results.png b/dipy.org/pull/66/_images/vanilla_vae_120_epoch_results.png new file mode 100644 index 0000000..ef0cf1e Binary files /dev/null and b/dipy.org/pull/66/_images/vanilla_vae_120_epoch_results.png differ diff --git a/dipy.org/pull/66/_images/vq-vae-results.png b/dipy.org/pull/66/_images/vq-vae-results.png new file mode 100644 index 0000000..a57b574 Binary files /dev/null and b/dipy.org/pull/66/_images/vq-vae-results.png differ diff --git a/dipy.org/pull/66/_images/vqvae-f3-higher-epochs.png b/dipy.org/pull/66/_images/vqvae-f3-higher-epochs.png new file mode 100644 index 0000000..6535dc5 Binary files /dev/null and b/dipy.org/pull/66/_images/vqvae-f3-higher-epochs.png differ diff --git a/dipy.org/pull/66/_images/vqvae-monai-B12-CC.png b/dipy.org/pull/66/_images/vqvae-monai-B12-CC.png new file mode 100644 index 0000000..43ad5ea Binary files /dev/null and b/dipy.org/pull/66/_images/vqvae-monai-B12-CC.png differ diff --git a/dipy.org/pull/66/_images/vqvae-monai-B12-both.png b/dipy.org/pull/66/_images/vqvae-monai-B12-both.png new file mode 100644 index 0000000..28e2f63 Binary files /dev/null and b/dipy.org/pull/66/_images/vqvae-monai-B12-both.png differ diff --git a/dipy.org/pull/66/_images/vqvae-reconstructions-comparison.png b/dipy.org/pull/66/_images/vqvae-reconstructions-comparison.png new file mode 100644 index 0000000..62822c5 Binary files /dev/null and b/dipy.org/pull/66/_images/vqvae-reconstructions-comparison.png differ diff --git a/dipy.org/pull/66/_images/vqvae3d-monai-B10-ICNR.png b/dipy.org/pull/66/_images/vqvae3d-monai-B10-ICNR.png new file mode 100644 index 0000000..e4f41ab Binary files /dev/null and b/dipy.org/pull/66/_images/vqvae3d-monai-B10-ICNR.png differ diff --git a/dipy.org/pull/66/_images/vqvae3d-monai-B10.png b/dipy.org/pull/66/_images/vqvae3d-monai-B10.png new file mode 100644 index 0000000..0f5b4ea Binary files /dev/null and b/dipy.org/pull/66/_images/vqvae3d-monai-B10.png differ diff --git a/dipy.org/pull/66/_images/vqvae3d-monai-B5.png b/dipy.org/pull/66/_images/vqvae3d-monai-B5.png new file mode 100644 index 0000000..c194da2 Binary files /dev/null and b/dipy.org/pull/66/_images/vqvae3d-monai-B5.png differ diff --git a/dipy.org/pull/66/_images/vqvae3d-monai-training-plots.png b/dipy.org/pull/66/_images/vqvae3d-monai-training-plots.png new file mode 100644 index 0000000..020d298 Binary files /dev/null and b/dipy.org/pull/66/_images/vqvae3d-monai-training-plots.png differ diff --git a/dipy.org/pull/66/_images/vqvae3d-monai-training.png b/dipy.org/pull/66/_images/vqvae3d-monai-training.png new file mode 100644 index 0000000..d9c5701 Binary files /dev/null and b/dipy.org/pull/66/_images/vqvae3d-monai-training.png differ diff --git a/dipy.org/pull/66/_images/vqvae3d-reconst-f2.png b/dipy.org/pull/66/_images/vqvae3d-reconst-f2.png new file mode 100644 index 0000000..27ed0e2 Binary files /dev/null and b/dipy.org/pull/66/_images/vqvae3d-reconst-f2.png differ diff --git a/dipy.org/pull/66/_images/vqvae3d-reconst-f3.png b/dipy.org/pull/66/_images/vqvae3d-reconst-f3.png new file mode 100644 index 0000000..2c90387 Binary files /dev/null and b/dipy.org/pull/66/_images/vqvae3d-reconst-f3.png differ diff --git a/dipy.org/pull/66/_images/vqvae3d-training-curves.png b/dipy.org/pull/66/_images/vqvae3d-training-curves.png new file mode 100644 index 0000000..0f117a0 Binary files /dev/null and b/dipy.org/pull/66/_images/vqvae3d-training-curves.png differ diff --git a/dipy.org/pull/66/_sources/blog.rst.txt b/dipy.org/pull/66/_sources/blog.rst.txt new file mode 100644 index 0000000..0f5f52c --- /dev/null +++ b/dipy.org/pull/66/_sources/blog.rst.txt @@ -0,0 +1,3 @@ +==== +Blog +==== \ No newline at end of file diff --git a/dipy.org/pull/66/_sources/calendar.rst.txt b/dipy.org/pull/66/_sources/calendar.rst.txt new file mode 100644 index 0000000..fe0eb83 --- /dev/null +++ b/dipy.org/pull/66/_sources/calendar.rst.txt @@ -0,0 +1,19 @@ +.. _calendar: + +======== +Calendar +======== + +You can stay updated with upcoming DIPY_ events. Checkout our events calendar. + +.. raw:: html + + + + +Get Calendar +-------------- +You can also add DIPY_ calendar to your google calendar with this `link. `_ + +.. include:: links_names.inc + \ No newline at end of file diff --git a/dipy.org/pull/66/_sources/index.rst.txt b/dipy.org/pull/66/_sources/index.rst.txt new file mode 100644 index 0000000..6cc36b7 --- /dev/null +++ b/dipy.org/pull/66/_sources/index.rst.txt @@ -0,0 +1,6 @@ +.. toctree:: + :maxdepth: 2 + :hidden: + + blog + calendar diff --git a/dipy.org/pull/66/_sources/posts/2015/2015_01_15_eleftherios_gsoc_announcement.rst.txt b/dipy.org/pull/66/_sources/posts/2015/2015_01_15_eleftherios_gsoc_announcement.rst.txt new file mode 100644 index 0000000..a3e12de --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2015/2015_01_15_eleftherios_gsoc_announcement.rst.txt @@ -0,0 +1,69 @@ +Google Summer of Code 2015 Announcement +======================================= + +.. post:: January 15 2015 + :author: Eleftherios Garyfallidis + :tags: google + :category: gsoc announcement + +We are happy to announce our application for the Google Summer of Code 2015. + +If you are interested in participating as a student, please read `this `_ first. + +GSoC is a project that enables students to learn by contributing to an open-source project, while receiving a stipend from Google, and mentorship from open-source software developers. For details about this year's GSoC, please refer to `this page `_. + +All participants should have a basic knowledge of scientific programming in Python. +Recommended book `Python for Data Analysis by Wes McKinney `_. + +Here are the projects we offer to mentor this summer: + +1. **3D visualizations** + + Description: The main tool for 3D visualization in dipy is the dipy.viz.fvtk module. + This creates `beautiful images `_, but the functionality is currently limited, and we would like to expand it. This project will create a more generic API that allows visualization of peaks, ODFs, volumes and streamlines in the correct space. Also implement VTK's network visualization in fvtk. Get creative! Many other things can be done here! For example enabling recording of 3D animations of the brain, create glass effects etc. + + Difficulty: high. + + Skills required: acquaintance with VTK is an advantage, knowledge of 3D graphics is required. + + Mentors: `Eleftherios Garyfallidis `_ and `Ariel Rokem `_ and `Matthew Brett `_. + +2. **Use directional information to improve dMRI registration** + + Description: Currently in DIPY we have a framework for nonlinear registration based on the idea of Symmetric Normalization `SyN `_. This framework allows to create new similarity metrics (e.g. cross correlation, or mutual information) and let the optimization of SyN to warp the images. Now, in diffusion MRI we can have in each voxel orientation distributions. The goal of the project is to use additionally this orientation information to drive the registration. So now we do not only warp but also re-orient the orientation distributions while warping. In other words, you will have to create a new orientation distribution based metric which will work inside our existing SyN framework. `This paper `_ is a must read. + + Difficulty: high + + Skills required: expertise in registration; acquaintance with diffusion modelling. + + Mentors: `Matthew Brett `_ and `Eleftherios Garyfallidis `_ and `Matthew Brett `_. + +3. **Diffusion Kurtosis Imaging** + + Description: `Diffusion Kurtosis Imaging `_, or DKI, is a method that estimates the parameters of higher-order statistics in DWI data with multiple b-value measurements (such as measurements from the `Human Connectome Project `_. This allows us to make inferences about properties of the tissue that are not readily available with other methods, such as DTI. We have already `begun `_ the work on an implementation of this algorithm, but the work needs to be completed, and there is still much to do here. + + Difficulty: high. + + Skills required: acquaintance with diffusion MRI. + + Mentors: `Ariel Rokem `_ and `Eleftherios Garyfallidis `_ and `Matthew Brett `_. + +4. **Offline quality assurance (QA) using a publicly available dataset** + + Description: The ultimate demonstration of a tool is in its use in realistic and important cases. The analysis of high-quality publicly available data-sets (e.g. from the `Human Connectome Project `_) is one compelling case. The goal of this project, is to create a pipeline for analysis of such a data-set, and to reproducibly execute this analysis as a way to benchmark the tools available through dipy, and perform QA, to detect regressions in the performance of these tools. This will also be a public show-case of the project, as a way to interest new users. + + Difficulty: intermediate. + + Skills required: acquaintance with diffusion MRI, and with dipy. + + Mentors: `Ariel Rokem `_ and `Eleftherios Garyfallidis `_ and `Matthew Brett `_. + +5. **Tissue classifiers for tracking** + + Description: Research in the last couple of years has shown that using a tissue classifier in tracking can be of great benefit for creating more accurate representations of the underlying white matter anatomy. The goal of this project will be to create accurate tissue classifiers to guide tracking. So this is basically an image segmentation task. To do that, we will have to implement a couple of popular algorithms using T1-weighted and/or invent a new one using DWI data. Sounds fun? + + Difficulty: intermediate. + + Skills required: acquaintance with diffusion MRI and image segmentation. + + Mentors: `Eleftherios Garyfallidis `_ and `Ariel Rokem `_ and `Matthew Brett `_ \ No newline at end of file diff --git a/dipy.org/pull/66/_sources/posts/2015/2015_05_17_Rafael.rst.txt b/dipy.org/pull/66/_sources/posts/2015/2015_05_17_Rafael.rst.txt new file mode 100644 index 0000000..3412306 --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2015/2015_05_17_Rafael.rst.txt @@ -0,0 +1,56 @@ +First post after acceptance! =) +=============================== + +.. post:: May 17 2015 + :author: Rafael Henriques + :tags: google + :category: gsoc + +Personal Note +------------- + +Hi all, + +I am please to inform that my project proposal was accepted to the Google Summer of Code! + +Congrats to everyone that was also accepted!!! This definitely will be an exciting summer! + +As I mentioned on my last post, I will be implementing some exiting MRI techniques which allows us to see brain connectivity in vivo - how awesome is that? + +The following weeks I will give you more details about this. Keep tuned and you could explore the brain with me! + +Greetings from Cambridge (UK), + +`Rafael N.H. `__ +PhD Student at the University of Cambridge + + +Before the Student Coding Period +-------------------------------- + +I am currently working on some simulations that will be useful for testing the imaging techniques that I will be implementing. + +I start this work before applying to the GSoC (https://github.com/nipy/dipy/pull/582), and in the last weeks I have been improving it. At the moment, simulations are almost complete - codes are running without errors and they are written in PEP8 standards. Now I only have to add some automatic debugging scripts using Nose python testing. + +During the following week, I will discuss the work done so far with my mentors (in particular I want to discuss some minor changes on the current scripts) and fix problems that I am facing in creating the automatic debugging scripts. + +Minor details to discuss with mentors: + +1) Suggestion on changes on the default values of the simulations modules + +2) Discuss if is better to remove some unnecessary inputs or have redundant computing steps. + +3) Discuss the definition of some important variables that will be used in future steps. + +Problems to fix during this week: + +1) Resolve problems in recognizing paths where the new version of modules are locally located. + +2) Fix error when trying to run Nose: + +.. code-block:: zsh + :linenos: + + Cannot run $ nosetests test_voxel.py + + ERROR: Failure: ImportError (No module named runspeed) diff --git a/dipy.org/pull/66/_sources/posts/2015/2015_05_24_Rafael.rst.txt b/dipy.org/pull/66/_sources/posts/2015/2015_05_24_Rafael.rst.txt new file mode 100644 index 0000000..af03aae --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2015/2015_05_24_Rafael.rst.txt @@ -0,0 +1,60 @@ +Time to start mapping brain connections and looking to brain properties in vivo +=============================================================================== + +.. post:: May 24 2015 + :author: Rafael Henriques + :tags: google + :category: gsoc + +Hi all, + +Tomorrow we are starting the coding period :), so it is time for some details about my project and tell you what was done in the community bonding period. + +1) How can we study brain connections and brain's tissue properties in vivo? - A simple introduction for non experts + +Trajectory of neuronal connections (tractography) and quantification of tissue properties in the living human brain can be obtain from measures of water diffusion using MRI scans. To give you an example how this is done, I will first start by describing one of the simplest technique - the diffusion tensor imaging (DTI). + +By combining the information of several diffusion weighted images, DTI models the water diffusion for each image element using a tensor which can be represented by an ellipsoid (see Figure below). + +.. figure:: /_static/images/gsoc/2015/rafael/tensor_ellipsoids.png + :alt: tensor_ellipsoids + :width: 400px + :align: center + + Figure 1. Diffusion tensors computed from all voxels of a real brain image. This image was produced using Dipy as described in Dipy's website. + +From figure 1 we can see that diffusion is larger is some directions. In fact the direction of larger diffusion can be related to the direction of brain's white matter fibers. The axon myelin sheaths restricts the water diffusion and thus diffusion is smaller on the directions perpendicular to fibers. On the other hand, the diffusion parallel to fibers is less restricted and therefore matching the direction of fibers. + +Based on this, 3D virtual reconstruction of brain connection can be obtain using specific tracking algorithms - a procedure which is named fiber tracking. An example of this 3D maps obtain from a real brain dataset is shown below. + +.. figure:: /_static/images/gsoc/2015/rafael/corpuscallosum_axial.png + :alt: corpuscallosum_axial + :width: 400px + :align: center + + Figure 2. Example of corpus callosum fibers. These fibers connect the left and right fiber hemispheres. This image was produced using Dipy as described in Dipy's website. + +Nowadays, DTI is still one of the diffusion weighted techniques most used in both clinical applications and in many research studies, however it is not always accurate. DTI cannot account properly for the crossing of different populations of white-matter fiber connections. Moreover, it ignores the non-Gaussian properties of diffusion in biological tissues which can be used to derive interesting and important measures of tissue properties. + + +1) Project proposal + +In this project, I will be implementing an alternative the diffusion-weighted technique named the diffusion kurtosis imaging (DKI) in an open source software project, the Diffusion Imaging in Python (Dipy). DKI overcomes the two major limitations of DTI: +It quantifies the non-Gaussian properties of water diffusion in biological tissues by modelling the kurtosis tensor (KT) which can be used to derive important tissue measures as the density of axonal fibers. +Relative to the diffusion tensor, KT is also shown to offer a better characterization of the spatial arrangement of tissue microstructure and can be used as a basis for more robust tractography. Particularly, DKI based tractography is sensitive to resolve crossing fibers. + +3) What is done so far + +As an update of what I posted previously (see Post #2), I finished the work on DKI's simulations - procedures that will be useful for testing the codes that I will be implementing during this summer. In particular, as my mentor suggested, I added some automatic debugging scripts using Nose python testing. These scripts are now insuring that the kurtosis tensor is symmetry (as expected) and that simulations are able to currently produce the diffusion tensor and kurtosis tensor in both cases of well aligned and crossing fibers. + +Many thanks to my mentor for teaching me how to work with nose python testing. In particular, the useful tip running the nose tests and knowing which lines the testing scripts are covering by using the following command: + +.. code-block:: zsh + :linenos: + + nosetests -v dipy/sims/tests/test_voxel.py --with-coverage --cover-package=dipy + + +4) Next steps + +After merging the DKI simulations to Dipy's master brunch, I will start working on the DKI reconstruction modules, based on some preliminary preparation work previously submitted by other dipy contributors. At the end of the week, I intend to finish the first part of the DKI reconstruction modules - the KT estimation from diffusion-weighted signals. For this I will implement the standard ordinary linear least-squares (OLS) solution of DKI. \ No newline at end of file diff --git a/dipy.org/pull/66/_sources/posts/2015/2015_06_05_Rafael.rst.txt b/dipy.org/pull/66/_sources/posts/2015/2015_06_05_Rafael.rst.txt new file mode 100644 index 0000000..1717606 --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2015/2015_06_05_Rafael.rst.txt @@ -0,0 +1,98 @@ +First report (1st week of coding, challenges and ISMRM conference) +================================================================== + +.. post:: June 05 2015 + :author: Rafael Henriques + :tags: google + :category: gsoc + +The coding period started in a challenging way. + +As I mentioned on my previous post, I started the coding period by merging the work done during the community bonding period to the main Dipy master repositories. This was not as trivial as expected since some latest updates on the Dipy master repositories was causing conflicts with my codes. + +Rebasing +-------- + +To solve this conflicts, I used git rebase (I want to thank my mentor Dr Ariel Rokem for his useful tips on how to do this). For more details on rebasing you can find a nice tutorial here. To summarize, below you can find the essential steps to rebase a branch: + +1) make sure that the master branch of your computer have the latest changes, for this: + +.. code-block:: zsh + :linenos: + + git checkout master + git pull upstream master + +2) start rebasing by moving the work done on your branch to the updated version of the master: + +.. code-block:: zsh + :linenos: + + git checkout your_branch + git rebase master + +3) If there is a conflict, automatic rebasing stops so you can manually update the files. The parts of the script with conflict will be pointed as the lines between markers >>>> and ====. + +4) After manually resolving a conflict, you can add the corrected files and continue rebasing using: + +.. code-block:: zsh + :linenos: + + git add file + git rebase --continue + +5) When rebasing is accomplished, you can update the changes to your fork by typing: + +.. code-block:: zsh + :linenos: + + git push -f origin your_branch + + +After rebasing Problem #1 +^^^^^^^^^^^^^^^^^^^^^^^^^ + +After rebasing, I noticed some problem on Dipy's master compilation. Fortunately, with the help of all the amazing Dipy team, this problem was quickly addressed (for more information see here). + +After rebasing Problem #2 - the right order to reconstruct the diffusion tensor +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +This is a good example why testing modules are so important. + +After solving problem #1, one of my testing modules was failing since the simulated diffusion tensors were given unexpected results (to know what is a diffusion tensor read my previous post). + +Basically on my simulations, the diffusion tensors are reconstructed from eigenvalues and eigenvectors decomposition. After some hours of debugging, I realized the cause of the logic error. The eigenvalues given by an updated master's function was transpose relatively to its previous version. So, for the ones also working with second order tensors, make sure that when reconstructing the tensor from its eigenvalues and eigenvectors decomposition you use the matrix multiplication in the following order: + +.. code-block:: zsh + :linenos: + + import numpy as np + from numpy import dot + +:: + + Tensor = dot(dot(R.T, np.diag(mevals)), R) + +where +:: + + R = [eigenvector1, eigenvector2, eigenvector3] + +and +:: + + mevals = [eigenvalue1, eigenvalue2, eigenvalue3] + +ISMRM Conference +---------------- + +As I mentioned on my proposal, this week I also attended the 23rd annual meeting of the International Society of Magnetic Resonance in Medicine (ISMRM). The conference was very productive. In particular, I had some nice discussions with the top experts on diffusion kurtosis imaging (DKI) and their feedback will be taken into account so that my work during the summer of code is done according to the most recent state of art of the field. + +It was also great to personally meet the Dipy developers and promoting the open source software. I had very nice feedback from different research groups and there were many new researchers interested in using Dipy and/or willing to collaborated with its development. Soon I will post some photos of the conference =). + +Next steps +---------- + +My mentor suggested a small change on the example codes for the DKI simulation usage. I am currently finalizing this, so soon I will be posting the final version of the DKI simulations. + +In the following days, I will also create a pull request with the work started on DKI reconstructions modules. As mentioned on my proposal, the implementation of these modules are the objective for my midterm evaluation. \ No newline at end of file diff --git a/dipy.org/pull/66/_sources/posts/2015/2015_06_19_Rafael.rst.txt b/dipy.org/pull/66/_sources/posts/2015/2015_06_19_Rafael.rst.txt new file mode 100644 index 0000000..03cddba --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2015/2015_06_19_Rafael.rst.txt @@ -0,0 +1,118 @@ +Progress Report (DKI simulations merged and DKI real data fitted) +================================================================= + +.. post:: June 19 2015 + :author: Rafael Henriques + :tags: google + :category: gsoc + +I have done great progresses on the 2 last weeks of coding!!! In particular, two major achievements were accomplished: + +- By solving the couple of problems mentioned on my previous post, the DKI simulations were finally merged to the Dipy's master repository. +- The first part of the reconstruction modules to process DKI in real brain data was finalized. + +The details of these two achievements and the project's next step are posted on the below sections. + +1) DKI simulations on Dipy's master repository +---------------------------------------------- + +Just to give an idea of the work done, I am posting an example of how to use the DKI simulations that I developed. More details on the mathematical basis of these simulations can be found `here `__. + +1.1) Import python modules and defining MRI parameters +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +First of all, we have to import relevant modules (see code lines below). The main DKI simulations function multi_tensor_dki can be imported from the Dipy simulations' sub-module dipy.sims.voxel (line 19 shown below). + +.. image:: /_static/images/gsoc/2015/rafael/Picture1.png + :align: center + +.. raw:: html + +
+ +To perform the simulations, some parameters of the MRI acquisition have to be considered. For instance, the intensity of the MRI's diffusion-weighted signal depends on the diffusion-weighting used on the MRI scanner (measured as the b-value) and the directions that the diffusion measurement are done (measured as the b-vectores). This information, for example, can be obtain from Dipy's real dataset samples. + +.. image:: /_static/images/gsoc/2015/rafael/Picture2.png + :align: center + +.. raw:: html + +
+ +Dipy's dataset 'small_64D' was acquired with only one diffusion-weighting intensity. Since DKI requires data from more than one non zero b-value, a second b-values is artificially added. + +.. image:: /_static/images/gsoc/2015/rafael/Picture3.png + :align: center + +.. raw:: html + +
+ +To convert the artificial produced b-values and b-vectors to the format assumed by Dipy's functions, the function gradient_table has to be called. + +.. image:: /_static/images/gsoc/2015/rafael/Picture4.png + :align: center + +1.2) Defining biological parameters +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Having all the scanner parameters set, the biophysical parameters of the simulates have to be defined. + +Simulations are based on multi-compartmental models, which allow us to take into account brain's white matter heterogeneity. For example, to simulate two crossing fibers with two different media (representing intra and extra-cellular media), a total of four heterogeneous components are taken into account. The diffusion parameters of each compartment are defined below (the first two compartments correspond to the intra and extra cellular media for the first fiber population while the others correspond to the media of the second fiber population). + +.. image:: /_static/images/gsoc/2015/rafael/Picture5.png + :align: center + +.. raw:: html + +
+ +The orientation of each fiber is saved in polar coordinates. To simulate crossing fibers at 70 degrees +the compartments of the first fiber are aligned to the x-axis while the compartments of the second fiber are aligned to the x-z plane with an angular deviation of 70 degrees from the first one. + +.. image:: /_static/images/gsoc/2015/rafael/Picture6.png + :align: center + +.. raw:: html + +
+ +Finally, the volume fractions of the compartment are defined. + +.. image:: /_static/images/gsoc/2015/rafael/Picture7.png + :align: center + +1.3) Using DKI simulation main function +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Having defined the parameters for all tissue compartments, the elements of the diffusion tensor (dt), the elements of the kurtosis tensor (kt) and the DW signals simulated from the DKI model (signal_dki) can be obtained using the function multi_tensor_dki. + +.. image:: /_static/images/gsoc/2015/rafael/Picture8.png + :align: center + +.. raw:: html + +
+ +As I mentioned in my previous post, these simulations are useful for testing the performance of DKI reconstruction codes that I am currently working on. In particular, when we apply the reconstruction modules to the signal_dki, the estimated diffusion and kurtosis tensors have to match the ground truth kt and dt produced here. + +2) Progresses on the development of the DKI reconstruction module +----------------------------------------------------------------- + +Finalizing DKI reconstruction module is the milestone that I proposed to achieved before the mid-term evaluation. Basically, the work done on this is on schedule! + +Since DKI is an extension of DTI, classes of the DKI modules were defined from inheritance of the classes defined on Dipy's DTI module (a nice post can be found `here `__ for more details on class inheritance). Having established this inheritance, DKI modules are compatible to all standard diffusion statistical measures previously defined in Dipy. + +I carried on with the development of the DKI module by implementing the estimation of the diffusion and kurtosis tensors from the DKI model. Two strategies were implemented - the DKI's ordinary linear least square (OLS) solution, which corresponds to a simple but less computational demanding approach, and the weighted DKI's linear least square (WLS) solution, which is considered to be one of the most robust estimation approaches in the `recent DKI literature `__. + +Currently, I am validating DKI implementation using the nose testing modules. Both implementations of the OLS and WLS solutions seem to produce the ground truth diffusion and kurtosis tensors when applied on the diffusion signal simulated from my DKI simulation modules. In addition, DKI modules are also producing the expected standard diffusion parameter images when applied to real data (see Figure 1). + +.. figure:: /_static/images/gsoc/2015/rafael/Diffusion_tensor_measures_from_DTI_and_DKI.png + :alt: Diffusion_tensor_measures_from_DTI_and_DKI + :align: center + + Figure 1. Comparison between real brain parameter maps of the diffusion fractional anisotropy (FA), mean diffusivity (MD), axial diffusivity (AD), and radial diffusivity (RD) obtain from the DKI modules (upper panels) and the DTI module (lower panels). + +From the figure, we can see that the DT standard diffusion measures from DKI are noisier than the DTI measurements. `This is a well known pitfall of DKI `__. Since it involves the fit of a larger number of parameters, DKI is more sensitive to noise than DTI. Nevertheless, diffusion measures from DKI were shown to have a `better precision `__ (i.e. less sensitive to bias). Moreover, as I have been mentioning on my previous posts, DKI allows the estimation of the standard kurtosis measures. + +3) Next Steps +------------- + +Before the mid-term evaluation, a first version of the DKI reconstruction will be completed with the implementation of the standard kurtosis measures, as the mean, axial and radial kurtosis from the already estimated kurtosis tensors. Details of the usage of the DKI reconstruction modules and the meaning of the standard kurtosis measures will be summarized on my next post. \ No newline at end of file diff --git a/dipy.org/pull/66/_sources/posts/2015/2015_07_02_Rafael.rst.txt b/dipy.org/pull/66/_sources/posts/2015/2015_07_02_Rafael.rst.txt new file mode 100644 index 0000000..320d380 --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2015/2015_07_02_Rafael.rst.txt @@ -0,0 +1,65 @@ +Mid-Term Summary +================ + +.. post:: July 02 2015 + :author: Rafael Henriques + :tags: google + :category: gsoc + +We are now at the middle of the GSoC 2015 coding period, so it is time to summarize the progress done so far and update the plan for the work of the second half part of the program. + +Progress summary +---------------- + +Overall a lot was achieved! As planned on my project proposal, during the first half of the coding period, I finalized the implementation of the first version of the diffusion kurtosis imaging (DKI) reconstruction module. Moreover, some exciting extra steps were done! + +Accomplishing the first steps of the project proposal +----------------------------------------------------- + +1) The first accomplished achievement was merging the work done on the community bonding period to the main master Dipy repository. This work consisted on some DKI simulation modules that can be used to study the expected ground truth kurtosis values of white matter brain fibers. In this project, these simulations were useful to test the real brain DKI processing module. The documentation of this work can be already found in Dipy's website. + +2) The second achievement was finalizing the procedures to fit the DKI model on real brain data. This was done from inheritance of a module class already implemented in Dipy, which contains the implementation of the simpler diffusion tensor model (for more details on this you can see my previous post). Completion of the DKI fitting procedure was followed by implementation of functions to compute the ordinary linear least square fit solution of the DKI model. By establishing the inheritance between the DKI and diffusion tensor modules, duplication of code was avoided and the standard diffusion tensor measures were automatically incorporated. The figure below shows an example of these standard measures obtained from the new DKI module after the implementation of the relevant fitting functions. + +.. figure:: /_static/images/gsoc/2015/rafael/Figure1_Midterm.png + :alt: Real brain standard diffusion tensor measures + :align: center + + Figure 1 - Real brain standard diffusion tensor measures obtained from the DKI module, which included the diffusion fractional anisotropy (FA), the mean diffusivity (MK), the axial diffusivity (AD) and the radial diffusivity (RD). The raw brain dataset used for the images reconstruction was kindly provided by Maurizio Marrale (University of Palermo). + +3) Finally, from the DKI developed fitting functions, standard measures of kurtosis were implemented. These were based on the analytical solutions proposed by `Tabesh and colleagues `__ which required, for instance, the implementation of sub-functions to rotate 4D matrices and to compute `Carlson's incomplete elliptical integrals `__. Having implemented the analytical solution of standard kurtosis measure functions, I accomplished all the work proposed for the first half of the GSoC. Below I am showing the first real brain images reconstructed kurtosis from the new implemented modules. + +.. figure:: /_static/images/gsoc/2015/rafael/Figure2_midterm.png + :alt: Real brain standard kurtosis tensor measures + :align: center + + Figure 2 - Real brain standard kurtosis tensor measures obtained from the DKI module, which included the mean kurtosis (MK), the axial kurtosis (AK), and radial kurtosis (RK). The raw brain dataset used for the images reconstruction was kindly provided by Maurizio Marrale (University of Palermo). + +Extra steps accomplished +------------------------ + +Some extra steps were also accomplished during the first half of the GSoC program. In particular, from the feedback that I obtained at the International Society for Magnetic Resonance in Medicine (ISMRM) conference (`see my fourth post `__), I decided to implement an additional DKI fitting solution - the weighted linear least square DKI fit solution. This fit is considered to be one of the most robust fitting approaches in recent DKI literature (for more details see my `previous post `__). Therefore, having this method implemented, I am insuring that the new Dipy's DKI modules are implemented according to the most advanced DKI state of art. + +To show how productive was the ISMRM conference for the project, I am sharing you a photo that I took at the conference with one of the head developers of Dipy - Eleftherios Garyfallidis. + +.. figure:: /_static/images/gsoc/2015/rafael/DSC03995.JPG + :alt: Photo from ISMRM conference + :align: center + + Figure 3 - Photo taken at the ISMRM conference - I am wearing the Dipy's T-shirt at the right side of the photo and in the left side you can see the head Dipy's developer Eleftherios Garyfallidis. + +Next steps +---------- + +After discussing with my mentor, we agreed that we should dedicate more time on the first part of the project proposal, i.e. improving the DKI reconstruction module. Due to the huge extent of code and the math complexity of this module, I will dedicate a couple of weeks more in improving the module's performance, quality of code testing and documentation. In this way, we decided to postpone the two last milestones initially planned for the second half term of the GSoC to the last three weeks of the GSoC coding period. + +The next steps of the updated project plan are as described in the following points: + +1) Merge the pull requests that contain the new DKI modules with the master's Dipy repository. To facilitate the revision of the implemented functions by the mentoring organization, I will split my initial pull request into smaller pull requests. + +2) At the same time that the previous developed code is reviewed, I will implement new features on the functions for estimating kurtosis parameters to reduce processing time. For instance, I will implement some optional variables that allow each method to receive a Boolean mask to point the image voxels to be processed. This will save the time wasted on processing unnecessary voxels as from the background. + +3) I will also implement simpler numerical methods for a faster estimation of the standard DKI measures. These numerical methods are expected to be less accurate that the analytical solutions already implemented, however they provide alternatives less computationally demanding. Moreover, they will provide a simpler mathematical framework which will be used to further validate the analytical solutions. + +4) Further improvements of the weighed linear least square solution will be performed. In particular, the weights' estimations used on the fit will be improved by an iterative algorithm as described on `recent DKI literature `__. + +5) Finally, the procedures to estimate from DKI concrete biophysical measures and white matter fiber directions will be implemented as I described on the initial project proposal. \ No newline at end of file diff --git a/dipy.org/pull/66/_sources/posts/2015/2015_07_08_Rafael.rst.txt b/dipy.org/pull/66/_sources/posts/2015/2015_07_08_Rafael.rst.txt new file mode 100644 index 0000000..64d8c73 --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2015/2015_07_08_Rafael.rst.txt @@ -0,0 +1,19 @@ +Artifacts in Dipy's sample data Sherbrooke's 3 shells +===================================================== + +.. post:: July 08 2015 + :author: Rafael Henriques + :tags: google + :category: gsoc + +Hi all, + +Just to report an issue that I am currently trying to figure out! + +As I showed on my `previous post `__ my first diffusion kurtosis reconstructions are looking very good. However when I try to process the Dipy's multi-shell dataset sample Sherbrooke 3 shells, kurtosis measures seem to be widely corrupted by implausible high negative values see figure below: + +.. figure:: /_static/images/gsoc/2015/rafael/Kurtosis_tensor_standard_measures.png + :alt: Real brain standard diffusion tensor measures + :align: center + + Fig.1 - Diffusion kurtosis standard measures obtain from the Sherbrooke 3 shells Dipy's sample dataset. \ No newline at end of file diff --git a/dipy.org/pull/66/_sources/posts/2015/2015_07_09_Rafael.rst.txt b/dipy.org/pull/66/_sources/posts/2015/2015_07_09_Rafael.rst.txt new file mode 100644 index 0000000..95958a1 --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2015/2015_07_09_Rafael.rst.txt @@ -0,0 +1,89 @@ +Perpendicular directions samples relative to a given vector +=========================================================== + +.. post:: July 09 2015 + :author: Rafael Henriques + :tags: google + :category: gsoc + +As I mentioned in the mid-term summary, one of my next steps is to implement some numerical methods to compute the standard kurtosis measures to evaluate their analytical solution. + +The numerical method for the perpendicular kurtosis requires samples of perpendicular directions to a given vector v. + +I am posting here the mathematical basis of this function which will be implemented in module dipy.core.geometry and named as perpendicular_directions. + +Function's Algorithm +-------------------- + +Inputs: + - Vector v: Perpendicular directions are sampled relative to this vector. + - N: Number of perpendicular directions + +Step 1) The N directions are first sampled in the unit circumference parallel to the y-z plane (plane normal to the x-axis), as shown the figure below. + +.. figure:: /_static/images/gsoc/2015/rafael/Step1.png + :alt: First step of perpendicular_directions algorithm + :align: center + + Fig 1. First step of perpendicular_directions algorithm. + +Coordinates of the perpendicular directions are therefore initialized as: + +.. figure:: /_static/images/gsoc/2015/rafael/F0.png + :alt: equation_1 + :align: center + + (Eq. 1) + +where ai are the angles sampled for [0, 2*pi [. To perform N samples, the angle between two adjacent directions is given by 2*pi / N. + + +Step 2) Sampled directions are then rotated and aligned to the plane normal to vector v (see figure below). + +.. figure:: /_static/images/gsoc/2015/rafael/Step2.png + :alt: Second step of perpendicular_directions algorithm + :align: center + + Fig 2. Second step of perpendicular_directions algorithm. + +Mathematically, this is done by multiplying each perpendicular directions ni by a rotational matrix. The final perpendicular directions di are given by: + +.. figure:: /_static/images/gsoc/2015/rafael/F1.png + :alt: equation_2 + :align: center + + (Eq. 2) + +The rotational matrix in Eq.2 is constructed as the reference of frame basis in which the first basis axis is the vector v, while the other two basis axis are any pair of orthogonal directions pair relative to vector v. These orthogonal vectors are named here as vector e and vector k. For the implementation of function perpendicular_directions, vectors e and k are estimated using the following procedure: + +1) The direction of e is defined as the normalized vector defined by the cross product between vector v and the unit vector aligned to x-axis, i.e [1, 0, 0]. After normalizing, the final coordinates of e are: + +.. figure:: /_static/images/gsoc/2015/rafael/F2.png + :alt: equation_3 + :align: center + + (Eq. 3) + +2) k is directly defined as the cross product between vectors v and e. The coordinates of this vector are: + +.. figure:: /_static/images/gsoc/2015/rafael/F3.png + :alt: equation_4 + :align: center + + (Eq. 4) + +From equations 2, 3 and 4, the coordinates of the perpendicular directions relative to vector v are give as: + +.. figure:: /_static/images/gsoc/2015/rafael/F4.png + :alt: equation_5 + :align: center + + (Eq. 5) + +Note that Eq. 5 has a singularity when vector v is aligned to the x-axis. To resolve this singularity, perpendicular directions are first defined in the x-y plane and vector e is computed as the normalized vector given by the cross product between vector v and the unit vector aligned to the y-axis, i.e [0, 1, 0]. Following this, the coordinates of the perpendicular directions are given as: + +.. figure:: /_static/images/gsoc/2015/rafael/F5.png + :alt: equation_6 + :align: center + + (Eq. 6) diff --git a/dipy.org/pull/66/_sources/posts/2015/2015_07_24_Rafael.rst.txt b/dipy.org/pull/66/_sources/posts/2015/2015_07_24_Rafael.rst.txt new file mode 100644 index 0000000..d5bcc4a --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2015/2015_07_24_Rafael.rst.txt @@ -0,0 +1,123 @@ +Progress Report on Diffusion Kurtosis Imaging (DKI) Implementation +================================================================== + +.. post:: July 24 2015 + :author: Rafael Henriques + :tags: google + :category: gsoc + +Progress is going as planned in my mid-term summary :). + +A short summary of what was done in the last weeks is described in the points below: + +1. The functions to fit the diffusion kurtosis tensor are already merged to the main Dipy's repository (you can see the merged work `here `__). +2. The functions to extract kurtosis statistics were submitted in a separate `pull request `__. Great advances on the validation of these functions were done according to the next steps pointed in the mid-term summary. Particularly, I completed the comparisons between the analytical solutions with simpler numerical methods (for nice figures of these comparisons, see below the subsection "Advances on the implementation of DKI statistics"). +3. At the same time that I was waiting for the revision of the work done on kurtosis tensor fitting and statistic estimation, I started working on functions to estimate the direction of brain white matter fibers from diffusion kurtosis imaging. This work is happening in a new created `pull request `__. For the mathematical framework of this implementation and some nice figures of the work done so far, you can see below the subsection "DKI based fiber estimates". + +Advances on the implementation of DKI statistics +------------------------------------------------ + +Improving the speed performance of functions +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +As mentioned on the last points of my mid-term summary, some features on the functions for estimating kurtosis statistics to reduce the processing time were added. At the time of the mid-term evaluation, I was planning to add some optional inputs to receive a mask pointing the relevant voxels to process. However, during the last weeks, I decided that a cleaver way to avoid processing unnecessary background voxels was to create a subfunction that automatically detects these voxels (detecting where all diffusion tensor elements are zero) and exclude them. In addition, I also vectorized parts of the codes (for details on this, see directly the discussion on the relevant `pull request `__ page). Currently, reprocessing the kurtosis measures shown in Figure 1 of my `post #6 `__ is taking around: + +- Mean Kurtosis - 14 mins +- Radial Kurtosis - 7 mins +- Axial Kurtosis - 1 min + +Using ipython `profiling `__ techniques, I also detected the parts of the codes that are more computationally demanding. Currently, I have been discussing with members of my mentoring organization the possibility of converting this function in `cython `__. + +Comparison between mean kurtosis analytical and approximated solutions +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Mean Kurtosis (MK) corresponds to the average of the kurtosis values along all spatial directions. Therefore, an easy way to estimate MK is to sample directional kurtosis values in evenly sampled directions and compute their average. This procedure can be very easy to implement, however it has some pitfalls as requiring a sufficient number of direction samples and being dependent on the performance of the direction sampling algorithms. Fortunately, this pitfalls can be overcome using an analytical solution that was proposed by `Tabesh and colleagues `__. + +In previous steps of my GSoC project, I had already implemented the MK estimation functions according to the analytical solution. However, I decided to implement also the directional average since it could be useful to evaluate the analytical approach. In the figure below, I run this numerical estimate for different number of directions, to analyse how many directions are required so that the directional kurtosis average approaches the analytical mean kurtosis solution. + +.. figure:: /_static/images/gsoc/2015/rafael/MK_comparison.png + :alt: Comparison between MK analytical and numerical solutions + :align: center + + Figure 1 - Comparison between the MK analytical (blue) and numerical solutions (red). The numerical solution is computed relative to a different number of direction samples (x-axis). + +From the figure above, we can see that the numerical approach never reaches a stable solution. Particularly, large deviations are still observed even when a large number of directions is sampled. After a careful analysis, I noticed that this was caused by imperfections on the `sphere dispersion algorithm strategies `__ to sample evenly distributed directions. + +Due to the poor performance, I decided to completely remove the MK numerical solution from the DKI implementation modules. This solution is only used on the code testing procedures. + +Comparison between radial kurtosis analytical and approximated solutions +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Radial kurtosis corresponds to the average of the kurtosis values along the perpendicular directions of the principal axis, i.e. the direction of non-crossing fibers. Tabesh and colleagues also proposed an analytical solution for this kurtosis statistic. I implemented this solution in Dipy on my previous steps of the GSoC project. Nevertheless, based on the algorithm described in my `post #8 `__, radial kurtosis can be estimated as the average of exactly evenly perpendicular direction samples. The figure below shows the comparison between the analytical solution and the approximated solution for a different number of perpendicular direction samples. + +.. figure:: /_static/images/gsoc/2015/rafael/RK_numerical_noise_free.png + :alt: Comparison between RK analytical and numerical solutions + :align: center + + Figure 2 - Comparison between the RK analytical (blue) and numerical solutions (green). The numerical solution is computed relative to a different number of direction samples (x-axis). + +Since, opposite to the MK case, the algorithm to sample perpendicular directions does not depend on sphere dispersion algorithm strategies, the numerical method for the RK equals the exact analytical solution after a small number of sample directions. + +Future directions of the DKI statistic implementation +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Having finalized the validation of the DKI statistic implementation, the last step of the DKI standard statistic implementation is to replace the data used on the sample usage script by an `HCP-like dataset `__. As mentioned on my `post #7 `__, the reconstructions of the dataset currently used on this example seems to be corrupted by artifacts. After discussing with an expert of the `NeuroImage mailing list `__, these artefacts seem to be caused by an insufficient SNR for fitting the diffusion kurtosis model. + +DKI based fiber direction estimates +----------------------------------- + +Mathematical framework +^^^^^^^^^^^^^^^^^^^^^^ + +This fiber direction estimation is done based on the orientation distribution function as proposed by `Jensen and colleagues (2014) `__. The orientation distribution function (ODF) gives the probability that a fiber is aligned to a given direction and it can be estimated from the diffusion and kurtosis tensors using the following formula: + +.. figure:: /_static/images/gsoc/2015/rafael/Picture2A.png + :align: center + +where α is the radial weighting power, Uij is the element ij of the dimensionless tensor U which is defined as the mean diffusivity times the inverse of the diffusion tensor (U = MD x iDT), Vij is defined as + +.. figure:: /_static/images/gsoc/2015/rafael/Vij_equation.png + :align: center + +and ODFg the Gaussian ODF contribution which is given by: + +.. figure:: /_static/images/gsoc/2015/rafael/gODF_equa.png + :align: center + +Implementation in python 1 +^^^^^^^^^^^^^^^^^^^^^^^^^^ + +In python, this expression can be easily implemented using the following command lines: + +.. figure:: /_static/images/gsoc/2015/rafael/Code_ODF.png + :align: center + +(Note: For a description of what `from_lower_triangular` does, see `Dipy's DTI module `__). + +Results +^^^^^^^ + +In the figure below, I show a ODF example obtained from the simulation of two white matter crossing fibers. + +.. figure:: /_static/images/gsoc/2015/rafael/dki_geometries.png + :alt: DKI-ODF obtained from two simulated crossing fibers + :align: center + + Figure 3 - DKI-ODF obtained from two simulated crossing fibers. Maxima of the ODF correspond to the direction of the crossing fibers. + +From the figure above, we can see that the ODF has two directions with maxima amplitudes which correspond to the directions where fibers are aligned. + +Implementation in python 2 +^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The lines of codes previously presented correspond to a feasible implementation of Jensen and colleagues formula. However, for the implementation of the DKI-ODF in dipy, I decided to expand the four for loops and use kurtosis tensor symmetry to simplify this expansion. The resulting code is as follows: + +.. figure:: /_static/images/gsoc/2015/rafael/ODF_final_code.png + :align: center + +This implementation of the ODF can look less optimized, but in fact it involves a smaller number of operations relative to the four for loops of the algorithm in "Implementation in python 1". Particularly, this version of the code is more than 3 times faster! + +Future directions of the DKI-ODF implementation +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +An algorithm to find the maxima of the DKI-ODF will be implemented. The direction of maxima ODF will be used as the estimates of fiber direction that will be useful to obtain DKI based tractography maps (for a reminder of what is a tractography map, see my `post #3 `__). \ No newline at end of file diff --git a/dipy.org/pull/66/_sources/posts/2015/2015_08_07_Rafael.rst.txt b/dipy.org/pull/66/_sources/posts/2015/2015_08_07_Rafael.rst.txt new file mode 100644 index 0000000..a43afc0 --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2015/2015_08_07_Rafael.rst.txt @@ -0,0 +1,52 @@ +Progress Report on Diffusion Kurtosis Imaging (DKI) Implementation +================================================================== + +.. post:: August 07 2015 + :author: Rafael Henriques + :tags: google + :category: gsoc + +We are almost getting to the end of the GSoC coding period 😭. + +The good news is that progress is still going at full speed!!! I finalized the work on the `standard kurtosis statistics estimation `__, and great progress was done on the white matter `fiber direction estimates `__ from diffusion kurtosis imaging (DKI). Details can be found below! + +Implementation of DKI statistics is now complete!!! +--------------------------------------------------- + +As I planned in my previous post, in the last couple of weeks, I created a sample usage script for the DKI statistic estimation module using data acquired with similar parameters to the `Human Connectome Project `__. Figures, for both diffusion and kurtosis standard statistics are looking very good (see below) and these are great news. These results show that my implemented module can be used on the analysis of one of the largest worldwide projects which aims to map the human brain connections. + +.. figure:: /_static/images/gsoc/2015/rafael/Diffusion_tensor_measures_from_DTI_and_DKI_A.png + :alt: Comparison of DKI and DTI measures + :align: center + + Figure 1. Real brain parameter maps of the diffusion fractional anisotropy (FA), mean diffusivity (MD), axial diffusivity (AD), and radial diffusivity (RD) obtained from a HCP-like dataset using the DKI modules (upper panels) and the DTI module (lower panels). Despite DKI involves the estimation of a larger number of parameters, the quality of the diffusion standard measures of the HCP-like dataset from DKI seem to be comparable with the standard diffusion measures from DTI. This dataset was kindly supplied by `Valabregue Romain, CENIR, ICM, Paris `__. + +.. figure:: /_static/images/gsoc/2015/rafael/Kurtosis_tensor_standard_measures_A.png + :alt: DKI specific measures + :align: center + + Figure 2 - Real brain parameter maps of the mean kurtosis (MK), axial kurtosis (AK), and radial kurtosis (RK) obtained from a HCP-like dataset using the DKI module. These are the maps specific to DKI. The dataset for these reconstructions was kindly supplied by `Valabregue Romain, CENIR, ICM, Paris `__. + +I also dramatically improved the speed performance of the kurtosis statistics estimation modules! In my previous post, I mentioned that I had optimized the codes in the way that all three standard kurtosis statistics are processed within 30 min. Now all three standard kurtosis statistics can be computed within 1 min. Reprocessing the kurtosis measures shown in Figure 1 of my post #6 is now taking: + +- Mean kurtosis - 32 sec (before 14 mins) +- Radial kurtosis - 12 sec (before 7 mins) +- Axial kurtosis - 42 sec (before 1 min) + +Advances on the DKI based fiber direction estimates +--------------------------------------------------- + +Based on the DKI-ODF described in my previous post, a procedure to extract the fiber direction estimates was implemented. This was done using the quasi-Newton algorithms available on Scipy's optimization module. For an example of the fiber direction estimates using the implemented procedure, we show above the estimates obtained from real brain voxels of the corpus callosum: + +.. figure:: /_static/images/gsoc/2015/rafael/dki_peaks.png + :alt: DKI fiber direction estimates + :align: center + + Figure 3 - Sagittal view of the direction estimates of horizontal corpus callosum fibers obtained from the DKI-ODF. + +Last steps for the Google Summer of Code 2015 +--------------------------------------------- + +1. The work on the DKI based fiber direction estimates will be finalized by making the fiber estimates compatible with the tractography methods already implemented in Dipy. In this way, I will be able to reproduce the first DKI based tractography in HCP-like data. + +2. With the procedures to estimate the standard kurtosis statistics and DKI based fiber estimates, I will finish the work proposed in my proposal by implementing some novel DKI measures which can be related to concrete biophysical parameters. \ No newline at end of file diff --git a/dipy.org/pull/66/_sources/posts/2015/2015_08_13_Rafael.rst.txt b/dipy.org/pull/66/_sources/posts/2015/2015_08_13_Rafael.rst.txt new file mode 100644 index 0000000..db121a9 --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2015/2015_08_13_Rafael.rst.txt @@ -0,0 +1,27 @@ +Further improvements on the diffusion standard statistics +========================================================= + +.. post:: August 13 2015 + :author: Rafael Henriques + :tags: google + :category: gsoc + +As I mentioned in my last post, I used the implemented modules to process data acquired with similar parameters to one of the largest worldwide projects, the Human Connectome project. Considering that I was fitting the diffusion kurtosis model with practically no pre-processing steps, which are normally required on diffusion kurtosis imaging, kurtosis reconstructions were looking very good (see Figure 2 of my last post). + +Despite this, some image artifacts were present, likely being a consequence of `gibbs artifacts `_ and `MRI noise `_. In particular, some low intensity voxels were present in regions where we expect that MK and RK is high. To correct these artifacts, I decided to add a pre-processing step that denoises diffusion-weighted data (to see the coding details of this, see directly on my `pull request `_). + +Before fitting DKI on the denoised data, these are the amazing kurtosis maps that I obtained: + +.. figure:: /_static/images/gsoc/2015/rafael/Kurtosis_tensor_standard_measures_B.png + :alt: Improved Kurtosis Maps + :align: center + + Figure 1 - Real brain parameter maps of the mean kurtosis (MK), axial kurtosis (AK), and radial kurtosis (RK) obtained from a HCP-like dataset using the DKI module. These are the maps specific to DKI. The dataset for these reconstructions was kindly supplied by `Valabregue Romain, CENIR, ICM, Paris `__. + +You can also see the standard diffusion measures obtained from my implemented DKI module and compared to the DTI module previously implemented: + +.. figure:: /_static/images/gsoc/2015/rafael/Diffusion_tensor_measures_from_DTI_and_DKI_B.png + :alt: Improved DKI and DTI Comparison + :align: center + + Figure 2. Real brain parameter maps of the diffusion fractional anisotropy (FA), mean diffusivity (MD), axial diffusivity (AD), and radial diffusivity (RD) obtained from a HCP-like dataset using the DKI modules (upper panels) and the DTI module (lower panels). Despite DKI involves the estimation of a larger number of parameters, the quality of the diffusion standard measures of the HCP-like dataset from DKI seem to be comparable with the standard diffusion measures from DTI. This dataset was kindly supplied by `Valabregue Romain, CENIR, ICM, Paris `__. \ No newline at end of file diff --git a/dipy.org/pull/66/_sources/posts/2015/2015_08_14_Rafael.rst.txt b/dipy.org/pull/66/_sources/posts/2015/2015_08_14_Rafael.rst.txt new file mode 100644 index 0000000..6aaa028 --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2015/2015_08_14_Rafael.rst.txt @@ -0,0 +1,76 @@ +Attempt to further improve the diffusion standard statistics +============================================================ + +.. post:: August 14 2015 + :author: Rafael Henriques + :tags: google + :category: gsoc + +The denoising strategy that I used to improve the diffusion standard statistics (see my last post), required the estimation of the noise standard deviation (sigma). As a first approach, I used a simple sigma estimation procedure that was specifically developed for T1-weighted images. Thus, this might not be the most adequate approach for diffusion-weighted images. + +Particularly, I noticed that sigma estimates had a dependency on the b-values (smaller b-values were related to higher sigma). Example of computed sigma for given b-values are shown below: + +- b-value = 0 => sigma around 810 +- b-value = 200 => sigma around 510 +- b-value = 400 => sigma around 390 +- b-value = 1000 => sigma around 268 +- b-value = 2000 => sigma around 175 + +Comparing the original diffusion-weighted images with the denoised versions, I notice that, for the smaller b-values, some image texture was present when computing the difference between original and denoised version of the image. This suggests that sigma values for smaller b-values are overestimated. + +.. figure:: /_static/images/gsoc/2015/rafael/denoise_b0.png + :alt: Denoising comparison for b-value 0 + :align: center + + Figure 1. - Diffusion-weighted image with b-values set to 0. Left panel shows the image before being denoised while the middle panel shows the denoised image. The difference between both images is shown in left. Some image structure can be identified on the image difference, which suggest that important information is being removed on the denoising process. + +.. figure:: /_static/images/gsoc/2015/rafael/denoise_b2000.png + :alt: Denoising comparison for b-value 2000 + :align: center + + Figure 2. - Diffusion-weighted image with b-values set to 2000. Left panels show the image before being denoised while the middle panels shows the denoised image. The difference between both images is shown in left. Brain structure is not significantly identified on the image difference. + +PIESNO +------ + +Given the issue mentioned above, I tried to replace the noise estimation procedure with a technique specifically developed for diffusion-weighted images - a technique called `PIESNO `__. +This technique can be imported and used from DIPY using the following commands: + +.. code-block:: python + + from dipy.denoise.noise_estimate import piesno + sigma, background_mask = piesno(data, N=4, return_mask=True) + +The noise standard given by PIESNO for all axial images was around 156. As expected this value is smaller than the previous sigma estimates suggesting that these were indeed overestimated. + +Despite this value seems to be the most accurate estimate for the denoising procedure, I noticed that only a small amount of background voxels, used to compute sigma, was automatically detected by PIESNO. + +.. figure:: /_static/images/gsoc/2015/rafael/piesno_mask.png + :alt: Background voxels detected by PIESNO + :align: center + + Figure 3 - Background voxels detected by PIESNO. These voxels were the ones used to estimate the noise standard deviation. + +Computing again the difference between the original and denoised version of the data. I also notice that the denoising procedure performance was still dependent on the b-value. In particular, for a b-value=0 the procedure seems only to denoise the middle of the image. Since sigma was maintained constant, this dependency with the b-value seem to be caused by the denoising algorithm itself. + +.. figure:: /_static/images/gsoc/2015/rafael/denoise_b0_piesno.png + :alt: Denoising comparison for b-value 0 using PIESNO + :align: center + + Figure 4. - Diffusion-weighted image with b-values set to 0. Left panels shows the image before being denoised while the middle panels shows the denoised image. Noise estimation for the denoising procedure is now done using PIESNO. The difference between both images is shown in left. Some image structure can be identified on the image difference, which suggest that important information is being removed on the denoising process. + +.. figure:: /_static/images/gsoc/2015/rafael/denoise_b2000.png + :alt: Denoising comparison for b-value 2000 using PIESNO + :align: center + + Figure 5. - Diffusion-weighted image with b-values set to 2000. Left panels shows the image before being denoised while the middle panels shows the denoised image. Noise estimation for the denoising procedure is now done using PIESNO. The difference between both images is shown in left. Brain structure is not significantly identified on the image difference. + +Below are the final versions of the kurtosis standard measures obtained after adjusting the sigma of the denoising procedure: + +.. figure:: /_static/images/gsoc/2015/rafael/piesno_DKI.png + :alt: Final kurtosis standard measures + :align: center + + Figure 6 - Real brain parameter maps of the mean kurtosis (MK), axial kurtosis (AK), and radial kurtosis (RK) obtain from a HCP-like dataset using the DKI module. These are the maps specific to DKI. The dataset for these reconstructions was kindly supplied by `Valabregue Romain, CENIR, ICM, Paris `__. + +Noise artifacts are present when PIESNO is used, therefore for the DKI reconstruction I decided to keep the previous denoising approach as default. \ No newline at end of file diff --git a/dipy.org/pull/66/_sources/posts/2015/2015_08_16_Rafael.rst.txt b/dipy.org/pull/66/_sources/posts/2015/2015_08_16_Rafael.rst.txt new file mode 100644 index 0000000..fde7c05 --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2015/2015_08_16_Rafael.rst.txt @@ -0,0 +1,76 @@ +Start wrapping up - Test singularities of kurtosis statistics +============================================================= + +.. post:: August 16 2015 + :author: Rafael Henriques + :tags: google + :category: gsoc + +As we are reaching the end of the GSoC coding period, I am starting to wrap up the code that I developed this summer. + +When reviewing the code implementing the kurtosis standard statistics, I detected some problems on the performance of the analytical solution of the mean kurtosis function. + +In this post, I am reporting how I overcame these issues! This post is extremely relevant for those interested in knowing the full details of the implementation of kurtosis standard measures. + +Problematic performance near to function singularities +------------------------------------------------------ + +As I mentioned in previous posts, the function to compute the mean kurtosis was implemented according to an analytical solution proposed by `Tabesh et al. `__, 2011. The mathematical formulas of this analytical solution have some singularities, particularly for cases where the diffusion tensor has equal eigenvalues. + +To illustrate these singularities, I am plotting below the diffusion and kurtosis tensors of crossing fiber simulations with different intersection angles. Simulations were performed based on the modules implemented during the GSoC coding period. + +.. figure:: /_static/images/gsoc/2015/rafael/geometry_of_dki_tensors.png + :alt: Diffusion and kurtosis tensors for crossing fibers + :align: center + + Figure 1 - Diffusion tensor (upper panels) and kurtosis tensors (lower panels) for crossing fibers intersecting at different angles (the ground truth fiber directions are shown in red). + +The values of the eigenvalues of the diffusion tensor as a function of the intersection angle are shown below. + +.. figure:: /_static/images/gsoc/2015/rafael/eigenvalues.png + :alt: Diffusion eigenvalues vs intersection angle + :align: center + + Figure 2 - Diffusion eigenvalues as function of crossing fibers intersection angle. First eigenvalue is plotted in red while the second and third are plotted in green and blue. + +From the figure above, we can detect two problematic cases for the MK analytical solution: + +1. When intersection angle is zero (i.e. when the two fibers are aligned), the second diffusion eigenvalue is equal to the third eigenvalue. +2. When the intersection angle is 90 degrees, the first diffusion eigenvalue is equal to the second eigenvalue. + +Based on the work done by Tabesh et al., 2011, these MK estimation singularities can be mathematically resolved by detecting the problematic cases and using specific formulas for each detected situation. In the previous version of my codes, I was detecting the cases where two or three eigenvalues were equal by analyzing if their differences were three orders of magnitude larger than system's epsilon. For example, to automatically check if the first eigenvalue equals the second eigenvalue, the following lines of code were used: + +.. code-block:: python + + import numpy as np + er = np.finfo(L1.ravel()[0]).eps * 1e3 + cond1 = (abs(L1 - L2) < er) + +Although my testing modules were showing that this procedure was successfully solving the singularities for eigenvalue differences three orders of magnitude smaller than the system's epsilon, when plotting MK as a function of the intersection angle, some unexpected underestimates were present in the regions near the singularities (see the figures below). + +.. figure:: /_static/images/gsoc/2015/rafael/MK_singularities_compared_to_MK_nm.png + :alt: MK values vs crossing angle (full range) + :align: center + + Figure 3 - MK values as function of the crossing angle. The blue line shows the MK values estimated from the analytical solution while the red line shows the MK values estimated from a numerical method described in previous posts. + +.. figure:: /_static/images/gsoc/2015/rafael/MK_sigularities_compared_to_MK_nm_zoom.png + :alt: MK values vs crossing angle (zoomed) + :align: center + + Figure 4 - MK values as function of the crossing angle (range between 85 and 90 degrees). The blue line shows the MK values estimated from the analytical solution while the red line shows the MK values estimated from a numerical method described in previous posts. This figure was produced for a better visualization of the underestimations still present near to the crossing angle of 90 degrees. + +After some analysis, I noticed that MK underestimations were still present if eigenvalues were not 2% different from each other. Given this, I was able to solve this underestimation by adjusting the criteria of eigenvalue comparison. As an example, to compare the first eigenvalue with the second, the following lines of code are now used: + +:: + + er = 2.5e-2 # difference (in %) between two eigenvalues to be considered as different + cond1 = (abs((L1 - L2) / L1) > er) + +Below, I am showing the new MK estimates as a function of the crossing angle, where all underestimations seem to be corrected. Moreover, discontinuities on the limits between the problematic and the non-problematic eigenvalue regime are relatively small. The most significant differences are now between different MK estimation methods (for details on the difference between these methods revisit `post #9 `__). + +.. figure:: /_static/images/gsoc/2015/rafael/MK_sigularities_resolved.png + :alt: Corrected MK values vs crossing angle + :align: center + + Figure 5 - Corrected MK values as function of the crossing angle. The blue line shows the MK values estimated from the analytical solution while the red line shows the MK values estimated from the numerical method described in previous posts. \ No newline at end of file diff --git a/dipy.org/pull/66/_sources/posts/2015/2015_08_24_Rafael.rst.txt b/dipy.org/pull/66/_sources/posts/2015/2015_08_24_Rafael.rst.txt new file mode 100644 index 0000000..83632b3 --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2015/2015_08_24_Rafael.rst.txt @@ -0,0 +1,95 @@ +Final Project Report +==================== + +.. post:: August 24 2015 + :author: Rafael Henriques + :tags: google + :category: gsoc + +Hi all! + +The GSoC coding period is now over! + +Having participated in the GSoC was an amazing experience. In general, all objectives of my project were accomplished. Now, the scientific and wider imaging processing community can have access to the first open source DKI processing modules. As the results of this project showed (see for example my `post #10 `__ and `post #11 `__), these modules can be used to analyse data of large worldwide collaborative projects such as the Human Connectome Project (HCP). Moreover, I had a great time working with members of my mentoring organization - I learned a lot from them and I will definitely continue contributing to Dipy in the following years. + +Below you can find my final project report. + +Project summary +--------------- + +In sum, this project was organized in 4 main phases: + +1. Finishing the work done on functions to simulate signal from the DKI model +2. Implementing methods for estimating the diffusion kurtosis tensor and derived measures +3. Adding a procedure to estimate biophysical parameters from DKI +4. Developing techniques to estimate fiber directions from real DKI data + +The details of the work done on each phase is described below: + +DKI based simulations +^^^^^^^^^^^^^^^^^^^^^ + +In this part of the project, I implemented the DKI simulates that were important to test the performance of all functions created in the other steps of the project. Part of this work was done before the GSoC coding period and its finalization was reported in the mid-term summary. Just to highlight the relevance of these simulations, during the GSoC coding period, 19 `nose tests functions `__ were created in which 13 were based on DKI based simulates. Moreover, DKI simulations were also useful for selecting, optimizing and debugging DKI methods (see for example `post #9 `__ and `post #13 `__). + +DKI reconstruction modules +^^^^^^^^^^^^^^^^^^^^^^^^^^ + +As I proposed in my initial project plan, having a final version of the DKI fitting modules and estimation of diffusion kurtosis statistics was the main goal to achieve for the mid-term evaluation. Since these modules provide the base for the work of the other parts of the project, I decided to dedicate some more time of the second half part of the GSoC coding period to improve the diffusion kurtosis statistics functions. These improvements are summarized in the following points: + +- The analytical solutions of the mean and radial kurtosis were validated using two numerical methods (`post #9 `__). +- The performance of the functions were improved so that all standard kurtosis statistics can be computed within 1 min (`post #10 `__) +- I also explored some Dipy's pre-processing steps that dramatically improved the quality of the DKI reconstructions (`post #11 `__ and `post #12 `__). +- I added some nosetests to ensure that all code lines of DKI reconstruction modules were covered by nosetest units. From this, I detected some problems with singularities on the function computing the mean kurtosis, which were solved as reported in `post #13 `__). +- The sample usage script of these modules was adapted according to a new DKI dataset which was required with similar parameters to the HCP. + +Below we show the kurtosis statistics images obtained from the HCP-like data using the DKI reconstruction modules before (upper panels of Figure 1) and after (lower panels of Figure 1) the improvements done on the second half part of the GSoC term. + +.. figure:: /_static/images/gsoc/2015/rafael/Kurtosis_tensor_standard_measures_final_post.png + :alt: DKI statistics comparison + :align: center + + Figure 1 - Diffusion Kurtosis statistics of the HCP-like data obtained from the implemented DKI reconstructions before (upper panels) and after (lower panels) the optimization done on the second half part of the GSoC coding period. Optimized functions seem to correct the artifacts present on the white matter regions as from the splenium of the corpus callosum. + +The final version of the DKI modules can be found in the following `pull request `__. + +DKI based biological measures +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Given the extra work done on the previous step, the implementation of the DKI biological measures was rescheduled to the last couple of weeks of the GSoC period. These measures were obtained from the DKI based model proposed by `Fieremans et al., (2011) `__, which allows the estimation of concrete biophysical parameters from brain regions of well aligned fibers. Until the end of the coding period, great advances were done on this module. For example, Figure 2 shows the estimated values of axonal water fraction (the proportion of water presented inside the fibers) for voxels containing well-aligned fibers of the splenium and genu of the corpus callosum obtained from the current version of this DKI biophysical model. + +.. figure:: /_static/images/gsoc/2015/rafael/AWF_v1.png + :alt: Axonal water fraction + :align: center + + Figure 2 - Axonal water fraction values of the splenium and genu of the corpus callosum (red-yellow colormap values) plotted over the first b-value=0 of the HCP-like diffusion-weighted dataset. + +Unfortunately, since the final version of these functions depends on the other pull requests that are currently being revised, the work done on the implementation of the biophysical models was not finalized, and thus it will not be submitted as part of the GSoC code sample. However, I intend to finalize these codes soon after the GSoC. If you are interested in looking at the final version of the biophysical metric estimations, keep tuned to the updates done at the `DKI reconstructions pull request `__. + +DKI based fiber direction estimation methods +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +As planned in the project proposal, in the second half part of the GSoC coding period, I developed a procedure to predict the fiber direction estimates from DKI. This was done by first estimating an orientation distribution function (ODF) which gives the probability that a fiber direction is aligned to a specific spatial direction (post #9). From the ODF, fiber directions can be estimated by finding the maxima values of the ODF (post #10). In the last couple of weeks, I accomplished a final version of this procedure by writing its sample usage script, where real brain data ODF and fiber directions are estimated. Visualizations of these estimates are shown in Figure 3 and 4. + +.. figure:: /_static/images/gsoc/2015/rafael/dki_odfs_final_post.png + :alt: DKI based ODF + :align: center + + Figure 3 - DKI based orientation distribution function (ODF) computed for voxels of portion of the HCP-like data. + +.. figure:: /_static/images/gsoc/2015/rafael/dki_dirs_final_post.png + :alt: Fiber directions + :align: center + + Figure 4 - Fiber directions computed by detecting the directions of maxima ODF. The multiple direction estimates from some voxels show that DKI is able to resolve crossing fibers. + +The final version of the modules containing the function to estimate fiber directions from DKI can be found in the following pull request. + +Skills gained on GSoC +--------------------- + +- With the supervision of the members of my mentoring organization, I dramatically improved my programming skills. +- I learned all required steps to work on collaborative projects such as Dipy. Particularly, I learned how to share, update and comment my work using Github's development framework. +- I learned how to use ipython notebook to create sample script examples, and using ipython profiling techniques to check and improve function performance. +- Now I know how to use testing units, such as the nosetest units, which allows me to automatically check bugs on the functions that I am implementing. +- I also learned how to improve functions using cython. +- Finally, I got familiarized with Dipy's structure and how to use their functions. This is useful knowledge for my personal future research. \ No newline at end of file diff --git a/dipy.org/pull/66/_sources/posts/2016/2016_02_01_eleftherios_gsoc_announcement.rst.txt b/dipy.org/pull/66/_sources/posts/2016/2016_02_01_eleftherios_gsoc_announcement.rst.txt new file mode 100644 index 0000000..69285fe --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2016/2016_02_01_eleftherios_gsoc_announcement.rst.txt @@ -0,0 +1,94 @@ +How to become a part of DIPY's Google Summer of Code 2016 +========================================================= + +.. post:: February 01 2016 + :author: Eleftherios Garyfallidis + :tags: google + :category: gsoc announcement + +GSoC is a program that allows students to learn by contributing to an open-source project, while receiving a fellowship from Google, and mentorship from open-source software developers. For details about this year's GSoC, please refer to `this page `_. + +Before considering becoming part of the Dipy GSoC, please read about our `expectations `_. + +All participants should have basic knowledge of scientific computing and development in Python. For a comprehensive introduction to these topics, please refer to the book `Effective Computation in Physics `_ by Katy Huff and Anthony Scopatz. + +Projects +-------- + +1. **Continuous quality assurance (QA) in cloud computing environment** + + Description: The ultimate demonstration of a tool is in its use in realistic and important cases. The analysis of high-quality publicly available data-sets (e.g. from the `Human Connectome Project `_) is one compelling case. The goal of this project, is to create a pipeline for analysis of such a data-set, and to reproducibly execute this analysis on a cloud computing resource, as a way to benchmark the tools available through dipy, and perform QA, to detect regressions in the performance of these tools. This will also be a public show-case of the project, as a way to interest new users. + + Difficulty: intermediate. + + Skills required: acquaintance with diffusion MRI, and with dipy. Acquaintance with cloud computing is a plus. + + Mentors: `Ariel Rokem `_ and `Eleftherios Garyfallidis `_. + +2. **CHARMED: biophysical modeling of multi b-value data** + + Description: The `CHARMED model `_ describes the diffusion signal as a combination of hindered and restricted components. This advanced model, when applied to data with multiple b-values, can be used to make inferences about tissue structure and biophysics. The GSoC project will focus on an efficient and well-tested implementation of the CHARMED model in the Dipy reconstruction module. + + Difficulty: intermediate + + Mentors: `Ariel Rokem `_ and Rafael Henriques. + +3. **Develop a new DIPY website with more interactive features (project is full)** + + Description: The current `DIPY `_ website is based on Sphinx and allows for only one documentation to be online (the development version). One of the tasks of this project will be to create a new github repository which will be only for Dipy's website. Right now the website is under the doc folder of the dipy repository. In this new repository a new responsive website will be created which upon other things will allow for hosting documentations for multiple versions. Additionally, the new website will allow for direct insertion of news and connections and updates to social media. Most importantly, new algorithms are expected to be developed that will increase UX. More details soon. + + Difficulty: intermediate + + Skills required: Django, bootstrap, javascript, sphinx and expertise in web development + + Project is full: We had already more than 40 excellent people applying for this project and it will be impossible to interview more of them. **So, this specific project is now closed for new applicants, contacting us after 2nd of March**. Please look and apply to the other exciting projects. + + Mentors: `Jean-Christophe Houde `_ and `Eleftherios Garyfallidis `_ + +4. **DKI enhancements** + + Description: diffusion kurtosis imaging (DKI) is an extension of the classic DTI model. In the previous GSoC, `Rafael Henriques implemented the DKI model fitting and estimation `_. This project proposes to extend our current implementation of diffusion kurtosis with a few different improvements. The first extension will allow us to estimate additional parameters of white matter "integrity" based on the diffusion kurtosis model (see `Fieremans et al. paper `_). The second extension will allow us to use the DKI model for tractography (see `tractography paper `_). Finally, we will also implement the REKINDLE algorithm, which allows robust fitting of DKI parameters (see `REKINDLE paper `_). + + Difficulty: high -- knowledge in diffusion MRI preferred + + Mentors: `Ariel Rokem `_ and Rafael Henriques + +5. **IVIM: Simultaneous modeling of perfusion and diffusion** + + Description: The IVIM model uniquely describes the diffusion and perfusion from data with multiple b-values (see `Le Bihan et al. paper `_ or `Luciani et al. paper `_). It has been used to investigate brain disease, stroke, aging, and liver fibrosis among other medical and neuroscience applications. This project proposes porting a `previous implementation of IVIM processing by Eric Peterson `_ into Dipy. Further extensions would be to implement the Jacobian for speed improvements in the nonlinear fitting and improvements in the fitting algorithm to improve robustness. + + Difficulty: intermediate + + Mentors: `Ariel Rokem `_, Eric Peterson and `Rafael Henriques `_. + +6. **Scifi UI using Python-VTK in DIPY.VIZ** + + Description: The main idea will be to develop new futuristic widgets directly using VTK (Visualization Toolkit) without calling any external libraries. So, no Qt! Only VTK which is written already in OpenGL. Here are some recent tutorials to have a look http://dipy.org/examples_index.html#id15 and start playing with. + Those new widgets are useful because we want to use them to navigate in tractographies and allow neurosurgeons and other neuroscientists to have a unique impression and user experience when using our tools. We also want to be lightweight and as multiplaform as possible. + Have you watched Guardians of the Galaxy? We want to create with this project the very basic tools so that in some years, we can do something like that https://vimeo.com/103533906 but of course applied for tractography exploration not for space travelling. For example, some of the tasks will be to develop a filedialogue, a sliding panel with buttons and add dynamic actor menus (3D menus on objects - like in games). + + Difficulty: high + + Skills required: Python, OpenGL and VTK + + Mentors: `Marc-Alexandre Côté `_ and `Eleftherios Garyfallidis `_ + +7. **Automatic denoising and robust brain extraction** + + Description: Create a method for automatic denoising of diffusion MRI and structural MR datasets. Currently we need to estimate the noise of the signal which is often a bit troublesome. Local PCA will be the main method to try to implement in this project but the harder task will be to do so efficiently in Python/Cython without extra dependencies. After implementing this method, the next task will be to create a more robust brain extraction method from what is currently implemented in DIPY. For this task the student will have to think of his own strategies and take decisions on which methods to combine or implement to do so. + + Difficulty: high + + Skills required: Python, Numpy, diffusion MRI, signal processing, DIPY + + Mentors: `Eleftherios Garyfallidis `_, Omar Ocegueda, `Julio Villalon `_ and `Rafael Henriques `_. + +8. **Eddy current correction** + + Description: Eddy currents are artifacts that affect diffusion MRI measurements. A common preprocessing step is to correct for these artifacts. In this project, we will implement a `popular algorithm for eddy current correction `_. + + Difficulty: moderate + + Skills required: Familiarity with diffusion MRI, numpy, scipy. + + Mentors: `Ariel Rokem `_ and Bob Dougherty. \ No newline at end of file diff --git a/dipy.org/pull/66/_sources/posts/2016/2016_05_16_Bishakh.rst.txt b/dipy.org/pull/66/_sources/posts/2016/2016_05_16_Bishakh.rst.txt new file mode 100644 index 0000000..5bc8ba8 --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2016/2016_05_16_Bishakh.rst.txt @@ -0,0 +1,29 @@ +Google Summer of Code with Dipy +=============================== + +.. post:: May 16 2016 + :author: Bishakh Ghosh + :tags: google + :category: gsoc + +I know I am a bit late with this blogpost, but as you probably guessed from the title I made it into Google Summer of Code 2016!! +Throughout this summer I will be working with DIPY under the Python Software Foundation. + +So how did I make it? +--------------------- + +To be frank, although I dreamt of getting into GSOC from 10th standard I never tried it wholeheartedly before. And it was partly because I did not know how and where to start. But this time I was determined and more familiar with different open source projects and I started early getting involved with the community. After trying many organizations I finally found one where I could contribute something, be it tiny code cleanups or small enhancements. And trust me it feels just amazing when your first patch (`pull request `__) gets merged into the master branch! Then I selected a project in this organization, prepared an application and in the whole process my mentors helped me a lot with their valuable suggestions. And after that here I am! :) + +Project Overview +---------------- + +The aim of my project is to develop a new website for Dipy from scratch with a custom content management system and admin functionality for maintenance. Another key feature of the website will be continuous generation of documentation from the dipy repository and linking with the website. This means that whenever a new version of dipy will be released the website will be automatically updated with the new documentation. Some other features include a visualization of web analytics and github data to showcase the fact that the dipy project is spreading worldwide and a tool to generate documentation of command line utilities. + +The backend of the website will be built using django, and some other python libraries like `markdown `_ and `python-social-auth `_. For visualization I plan to use `D3js `_ library. For me the most challenging and interesting part of the project will be continuous generation of documentation. There can be many ways this can be achieved. For now we have thought of a process in which for every commit or release a build server will be triggered which will build the documentation using sphinx and this documentation will then be uploaded to the website. In this process the documentation of the command line utilities will also have to be generated and that is a challenge of its own. + +Community Bonding Period +------------------------ + +This part of the Google Summer of Code (April 23, 2016 - May 22, 2016) is called Community Bonding Period and I am discussing and refining the ideas with my mentors. We have weekly meetings and frequent communication through email and gitter. I have also set up my development environment and getting ready to start work. Although I have developed several small projects using `django `_ for my college and clubs I have never tried anything of this scale. So I am learning about the different challenges of deployment, security and scalability. I am trying to get familiar with the best practices and design patterns of django and learning how to test my code. + +Hope to have an amazing summer! :) \ No newline at end of file diff --git a/dipy.org/pull/66/_sources/posts/2016/2016_06_10_Bishakh.rst.txt b/dipy.org/pull/66/_sources/posts/2016/2016_06_10_Bishakh.rst.txt new file mode 100644 index 0000000..d3f6dcd --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2016/2016_06_10_Bishakh.rst.txt @@ -0,0 +1,96 @@ +Google Summer of Code Progress June 10 +====================================== + +.. post:: June 10 2016 + :author: Bishakh Ghosh + :tags: google + :category: gsoc + +It has been about 20 days since the coding period has begun. I have made some decent progress with the backend of the Dipy website. +The target that was set according to the timeline of my proposal was setting up an authentication system and login with GitHub in Django along with custom admin panel views for content management. + +For now, the new Dipy website is hosted temporarily at http://dipy.herokuapp.com/ for testing purposes. The login system and the content management system are almost complete. I have already started designing the frontend. The corresponding code can be found in this `pull request `__. + +Details of Backend Developed So Far +----------------------------------- + +For login with GitHub and Google Plus, I have used `python-social-auth `_. After a user logs in, their content editing permission is determined by checking if they have 'push' permission in the dipy_web repository on GitHub. + +This is done by fetching repository information from GitHub API with the user's access token: + +.. code-block:: none + + GET https://api.github.com/orgs/:org/repos + +The response contains permission information like: + +.. code-block:: json + + "permissions": { + "admin": false, + "push": false, + "pull": true + } + +So if a user has ``push:true`` permission, then they have push access to the dipy_web repository and that user is granted permission to edit the content of the website. + +Now there are several types of content and each type has its own model: + +1. Website Sections: The static website sections that are positioned in different pages. +2. News Posts +3. Publications + +Website Sections +^^^^^^^^^^^^^^^^ + +The website sections contain some identifiers like which page it belongs to and in which position it should be placed. The content body of the website section is written in markdown. To change the markdown to HTML, the `markdown `_ library is used. The model's ``save()`` method is overridden so that each time it is edited, the new HTML is generated from the markdown. The HTML is filtered using the `bleach `_ library. + +.. code-block:: python + + def save(self, *args, **kwargs): + html_content = markdown.markdown(self.body_markdown, + extensions=['codehilite']) + print(html_content) + # bleach is used to filter html tags like """ % (tracking_id,) + return {'google_analytics': tracking_code} + +What's next +----------- + +* We have to add more documentation versions (the older ones) and add a hover button in the documentation pages to hop from one documentation version to another just like the django documentations. +* We have to design a gallery page that will contain images, videos and tutorials. +* I am currently working on a github data visualization page for visualization of dipy contributors and activity in the dipy repository. + +Will be back with more updates soon! :) \ No newline at end of file diff --git a/dipy.org/pull/66/_sources/posts/2016/2016_08_07_Bishakh.rst.txt b/dipy.org/pull/66/_sources/posts/2016/2016_08_07_Bishakh.rst.txt new file mode 100644 index 0000000..cccb38b --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2016/2016_08_07_Bishakh.rst.txt @@ -0,0 +1,205 @@ +Google Summer of Code Progress August 7 +======================================= + +.. post:: August 07 2016 + :author: Bishakh Ghosh + :tags: google + :category: gsoc + +Yay! We have dynamically generated gallery and tutorials page now! + +Progress so far +--------------- + +The major changes are in the gallery and in the new tutorials page. + +Gallery Page +^^^^^^^^^^^^ + +Instead of showing the manually entered images from the admin panel, the gallery now fetches all images from all the tutorials in the latest documentation. + +This is actually done using by scraping the tutorials page from the json docs. + +Although the docs are now built in json format but still the body is represented as an HTML string. As a result there was no way out other than parsing the HTML. And the best HTML parsing library that I know of is Beautiful Soup. + +.. code-block:: python + + def get_doc_examples_images(): + """ + Fetch all images in all examples in latest documentation + """ + doc = DocumentationLink.objects.filter(displayed=True)[0] + version = doc.version + path = 'examples_index' + repo_info = (settings.DOCUMENTATION_REPO_OWNER, + settings.DOCUMENTATION_REPO_NAME) + base_url = "http://%s.github.io/%s/" % repo_info + url = base_url + version + "/" + path + ".fjson" + response = requests.get(url) + if response.status_code == 404: + url = base_url + version + "/" + path + "/index.fjson" + response = requests.get(url) + if response.status_code == 404: + return [] + url_dir = url + if url_dir[-1] != "/": + url_dir += "/" + + # parse the content to json + response_json = response.json() + bs_doc = BeautifulSoup(response_json['body'], 'html.parser') + all_links = bs_doc.find_all('a') + + examples_list = [] + for link in all_links: + if(link.get('href').startswith('../examples_built')): + rel_url = "/".join(link.get('href')[3:].split("/")[:-1]) + example_url = base_url + version + "/" + rel_url + ".fjson" + example_response = requests.get(example_url) + example_json = example_response.json() + example_title = strip_tags(example_json['title']) + + # replace relative image links with absolute links + example_json['body'] = example_json['body'].replace( + "src=\"../", "src=\"" + url_dir) + + # extract title and all images + example_bs_doc = BeautifulSoup(example_json['body'], 'html.parser') + example_dict = {} + example_dict['title'] = example_title + example_dict['link'] = '/documentation/' + version + "/" + path + "/" + link.get('href') + example_dict['description'] = example_bs_doc.p.text + example_dict['images'] = [] + for tag in list(example_bs_doc.find_all('img')): + example_dict['images'].append(str(tag)) + examples_list.append(example_dict) + return examples_list + +And all the extracted images are displayed in the honeycomb gallery. + +.. figure:: /_static/images/gsoc/2016/bishakh/dipy12.jpeg + :alt: dipy gallery page + :align: center + + Dipy gallery page + +Tutorials Page +^^^^^^^^^^^^^^ + +Although each version of documentation has a list of tutorials separately, we wanted a dedicated page which will contain the tutorials with thumbnails and descriptions and they will be grouped into several sections. So similar to the gallery page I parsed the tutorials index page and went into each tutorial and fetched the thumbnails and descriptions. Then this list of tutorials is displayed as an expandable list of groups. + +.. code-block:: python + + def get_examples_list_from_li_tags(base_url, version, path, li_tags): + """ + Fetch example title, description and images from a list of li tags + containing links to the examples + """ + + examples_list = [] + url_dir = base_url + version + "/" + path + ".fjson/" + + for li in li_tags: + link = li.find("a") + if(link.get('href').startswith('../examples_built')): + example_dict = {} + # get images + rel_url = "/".join(link.get('href')[3:].split("/")[:-1]) + example_url = base_url + version + "/" + rel_url + ".fjson" + example_response = requests.get(example_url) + example_json = example_response.json() + example_title = strip_tags(example_json['title']) + + # replace relative image links with absolute links + example_json['body'] = example_json['body'].replace( + "src=\"../", "src=\"" + url_dir) + + # extract title and all images + example_bs_doc = BeautifulSoup(example_json['body'], 'html.parser') + example_dict = {} + example_dict['title'] = example_title + example_dict['link'] = '/documentation/' + version + "/" + path + "/" + link.get('href') + example_dict['description'] = example_bs_doc.p.text + example_dict['images'] = [] + for tag in list(example_bs_doc.find_all('img')): + example_dict['images'].append(str(tag)) + examples_list.append(example_dict) + return examples_list + + + def get_doc_examples(): + """ + Fetch all examples (tutorials) in latest documentation + """ + doc_examples = [] + doc = DocumentationLink.objects.filter(displayed=True)[0] + version = doc.version + path = 'examples_index' + repo_info = (settings.DOCUMENTATION_REPO_OWNER, + settings.DOCUMENTATION_REPO_NAME) + base_url = "http://%s.github.io/%s/" % repo_info + url = base_url + version + "/" + path + ".fjson" + response = requests.get(url) + if response.status_code == 404: + url = base_url + version + "/" + path + "/index.fjson" + response = requests.get(url) + if response.status_code == 404: + return [] + url_dir = url + if url_dir[-1] != "/": + url_dir += "/" + + # parse the content to json + response_json = response.json() + bs_doc = BeautifulSoup(response_json['body'], 'html.parser') + + examples_div = bs_doc.find("div", id="examples") + all_major_sections = examples_div.find_all("div", + class_="section", + recursive=False) + + for major_section in all_major_sections: + major_section_dict = {} + major_section_title = major_section.find("h2") + major_section_dict["title"] = str(major_section_title) + major_section_dict["minor_sections"] = [] + major_section_dict["examples_list"] = [] + all_minor_sections = major_section.find_all("div", + class_="section", + recursive=False) + + if len(all_minor_sections) == 0: + # no minor sections, only examples_list + all_li = major_section.find("ul").find_all("li") + major_section_dict[ + "examples_list"] = get_examples_list_from_li_tags(base_url, + version, + path, + all_li) + else: + for minor_section in all_minor_sections: + minor_section_dict = {} + minor_section_title = minor_section.find("h3") + minor_section_dict["title"] = str(minor_section_title) + minor_section_dict["examples_list"] = [] + + all_li = minor_section.find("ul").find_all("li") + minor_section_dict[ + "examples_list"] = get_examples_list_from_li_tags(base_url, + version, + path, + all_li) + major_section_dict["minor_sections"].append(minor_section_dict) + doc_examples.append(major_section_dict) + return doc_examples + +.. figure:: /_static/images/gsoc/2016/bishakh/dipy13.png + :alt: dipy tutorials page + :align: center + + Dipy tutorials page + +What next? +---------- + +The github statistics visualizations page is one major task. Another major task is somehow make the automatically generated gallery and tutorials page editable so that we can change the thumbnails or descriptions. Also the coding period is about to end in 2 weeks so documenting the code and merging all pull requests is a priority. \ No newline at end of file diff --git a/dipy.org/pull/66/_sources/posts/2016/2016_08_19_Bishakh.rst.txt b/dipy.org/pull/66/_sources/posts/2016/2016_08_19_Bishakh.rst.txt new file mode 100644 index 0000000..4dd025a --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2016/2016_08_19_Bishakh.rst.txt @@ -0,0 +1,42 @@ +Google Summer of Code Progress August 19 +======================================== + +.. post:: August 19 2016 + :author: Bishakh Ghosh + :tags: google + :category: gsoc + +So we are at the end of this awesome summer and this post is about the progress in my final weeks of GSOC 2016! And the major addition in this period is the development stats visualization page. + +GitHub stats visualization +-------------------------- + +As we had planned, the new Dipy website needed a page to highlight the growing number of developers and their contributions to the Dipy project. And finally we have achieved that with a `separate django app `_ that creates visualizations with data pulled from GitHub API, and for drawing some neat graphs I have used the `Chart.js `_ library. + +.. figure:: /_static/images/gsoc/2016/bishakh/dipy14.jpg + :alt: dipy github visualization page + :align: center + + Dipy GitHub visualization page + +And hey it's a separate django app! +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +So it can be integrated easily into any other django project! Simply copy the ``github_visualization`` folder into your project and add ``github_visualization`` to the ``INSTALLED_APPS`` list in ``settings.py``. + +Now you just need to add a couple of lines to the template in which you want to show the visualizations. + +.. code-block:: html + + + {% load github_stats_tags %} + + + {% include "github_visualization/github_stats_includes.html" %} + + + {% github_stats_block "username" "repository_name" %} + +Just change the 'username' and 'repository_name' to point to the GitHub repository you want to see visualizations for. + +The work was submitted through the pull request `#15 `_. \ No newline at end of file diff --git a/dipy.org/pull/66/_sources/posts/2018/2018_01_24_eleftherios_gsoc_announcement.rst.txt b/dipy.org/pull/66/_sources/posts/2018/2018_01_24_eleftherios_gsoc_announcement.rst.txt new file mode 100644 index 0000000..4bae541 --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2018/2018_01_24_eleftherios_gsoc_announcement.rst.txt @@ -0,0 +1,157 @@ +Google Summer of Code 2018 +========================== + +.. post:: January 24 2018 + :author: Eleftherios Garyfallidis + :tags: google + :category: gsoc announcement + +Introduction to DIPY +==================== + +DIPY is a free and open source software library for computational neuroanatomy and medical data science. DIPY contains algorithms for diffusion magnetic resonance imaging (dMRI) analysis and tractography but also contains implementations of other computational imaging methods such as denoising and registration that are applicable to the greater medical imaging and image processing communities. Additionally, DIPY is an international project which brings together scientists across labs and countries to share their state-of-the-art code and expertise in the same codebase, accelerating scientific research in medical imaging. DIPY is participating in GSoC this year for the 3rd time under the umbrella of the Python Software Foundation (PSF). + +How to become a part of DIPY's Google Summer of Code 2018 +========================================================= + +GSoC is a program that allows students to learn by contributing to an open-source project, while receiving a fellowship from Google, and mentorship from open-source software developers. For details about this year's GSoC, please refer to `this page `_. + +Before considering becoming part of the DIPY GSoC, please read about our `expectations `_. + +All participants should have basic knowledge of scientific computing and development in Python. For a comprehensive introduction to these topics, please refer to the book `Effective Computation in Physics `_ by Katy Huff and Anthony Scopatz. +However, you should be already familiar with data analysis using Python and Numpy before applying. + +Be happy to ask questions directly in our Gitter channel https://gitter.im/nipy/dipy + +Advice +------ + +Potential candidates should take a look at the guidelines on `how to contribute to DIPY `_. Making a small enhancement/bugfix/documentation fix/etc to DIPY already before applying for the GSoC is a requirement from the PSF; it can help you get some idea how things would work during the GSoC. The fix does not need to be related to your proposal. We have and will continue adding some beginner friendly issues in github. + +Projects +======== + +1. **DIPY workflows and Quality Assurance** + + Description: Create new dipy.workflows and make those executable in different platforms. DIPY has a unique + system that allows to create command line interfaces in a systematic and precise way to run across platforms. + DIPY uses existing technology such as the default argument parser of Python but enhances the parser using a + software engineering process called introspection. Our IntrospectiveParser allows to generate workflows that + can be executed both by the command line and using Python scripts. In this work, you will have to: + + * Take existing tutorials and generate new workflows from them. Test the workflows with new data and generate + automated reports. + + * Help with simplifying installation in the different operating systems. + + Difficulty: easy to intermediate + + Skills required: Numpy, Python, pyinstaller (or similar), medical imaging. + + Mentors: `Serge Koudoro `_ and `Eleftherios Garyfallidis `_ + +2. **Extend Visualization - Focus in UI** + + Description: In this project you will build scifi-like 3D and 2D user interfaces inspired from Guardians of + the Galaxy `video `_. Dipy.viz provides many visualization + capabilities. However we were not happy with interactive capabilities found in existing GUIs. For this reason + we built our own UI engine. No Qt! Everything is integrated in the VTK scene. See example below that was + generated during our 2016 GSoC participation. This is an example of an orbital orbital menu. + + .. image:: http://i.giphy.com/b0pJ7djNSIWFa.gif + :align: center + + In this project you will extend this work and add more futuristic widgets. The motto of this project is make + everything interactive without performance issues. See also figure of Project 5. + + Difficulty: intermediate + + Skills required: Python, OpenGL and VTK + + Mentors: `David Reagan `_ and `Ranveer Aggarwal `_ and + `Eleftherios Garyfallidis `_ + +3. **Improve performance of nonrigid image registration** + + Description: We have some really nice code for nonrigid registration that needs to be parallelized. The code + is written in Python and Cython. You will need to work primarily on adding multithreading (OpenMP) + capabilities in our Symmetric Normalization framework. Start by playing with the following tutorials + + https://github.com/nipy/dipy/blob/master/doc/examples/affine_registration_3d.py + https://github.com/nipy/dipy/blob/master/doc/examples/syn_registration_2d.py + https://github.com/nipy/dipy/blob/master/doc/examples/syn_registration_3d.py + + Difficulty: intermediate. + + Skills required: Familiarity with OpenMP, Cython, Python, Numpy. + + Mentor: `Serge Koudoro `_ and `Eleftherios Garyfallidis `_ + +4. **Extend Clustering Framework** + + Description: QuickBundles and QuickBundlesX are extremely fast algorithms that can be used in a series of + fields and datasets. We initially used this algorithm to cluster streamlines. Your job will be to extend our + existing framework to new datasets. For, example implement new metrics that allow to cluster surfaces, + images, text or other. Also, you will have to work on a research component of the algorithm that is related + to reducing the number of clusters in dense datasets. + + Difficulty: intermediate + + Skills required: Python/Cython, machine learning. Especially, unsupervised learning. Knowledge of scikit- + learn is an advantage. + + Mentor: `Eleftherios Garyfallidis `_ and `Serge Koudoro `_ + +5. **Extend Visualization - Focus in GLSL** + + Description: Our new visualization engine supports GLSL shading language. Join our effort to built stunning + visualizations of brain images and other scientific datasets. You will have to program vertex and fragment + shaders to generate different effects on VTK polydata. For examples, see code `here `_. Here is an example without shaders + + .. image:: https://media.giphy.com/media/l49JEQvtIForHhvlC/giphy.gif + :width: 600 + :height: 400 + :align: center + + You will have to update the code to enable shading when needed and if supported by the current computing + system. Please also check tutorials starting with viz here + + https://github.com/nipy/dipy/tree/master/doc/examples + + Difficulty: high + + Skills required: GLSL, Python, OpenGL and VTK + + Mentors: `David Reagan `_, `Ranveer Aggarwal `_ and + `Eleftherios Garyfallidis `_ + +6. **Implement new models for microstructure imaging** + + Description: This is a model fitting project. You will be required to extend our new microstructure + framework. You will be able to implement models such as Multi Tensor, NODDI, Axcaliber, CHARMED, Ball & + Sticks, Ball & Rackets all with three crossings and also all the combinations of Zeppelin, Cylinder, Dot and + Ball compartments. See MRI example below. How would you model these tiny structures? + + .. image:: https://media3.giphy.com/media/4F2eiACLhUaZy/giphy.gif + :width: 460 + :height: 460 + :align: center + + Difficulty: high + + Skills required: MSc or PhD level, mathematical optimization, Python, Numpy, Cython (bonus) + + Mentors: `Maryam Afzali `_, `Mauro Zucchelli `_ and `Eleftherios Garyfallidis `_ + +7. **Extend and QA tracking framework** + + Description: Tractography is one of the great challenges in medical imaging. In DIPY we have implemented + different tracking algorithms including deterministic, probabilistic and particle filtering algorithms. You + will have to extend dipy.tracking with machine learning based algorithms. Also you will need to test your + algorithm with different datasets of different resolutions. + + Difficulty: high + + Skills required: Python/Cython, knowledge of tractography. Available only for MSc or PhD students. + + Mentor: `Eleftherios Garyfallidis `_, `Serge Koudoro `_, `Gabriel Girard `_ and `Ariel Rokem `_. \ No newline at end of file diff --git a/dipy.org/pull/66/_sources/posts/2020/2020_02_05_serge_gsoc_announcement.rst.txt b/dipy.org/pull/66/_sources/posts/2020/2020_02_05_serge_gsoc_announcement.rst.txt new file mode 100644 index 0000000..714cf72 --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2020/2020_02_05_serge_gsoc_announcement.rst.txt @@ -0,0 +1,78 @@ +Google Summer of Code 2020 +========================== + +.. post:: February 05 2020 + :author: Serge Koudoro + :tags: google + :category: gsoc announcement + +Introduction to DIPY +==================== + +DIPY is a free and open-source software library for the analysis of 3D/4D+ imaging in Python. It contains generic methods for spatial normalization, signal processing, machine learning, statistical analysis and visualization of medical images. Additionally, it contains specialized methods for computational anatomy including diffusion, perfusion, and structural imaging. DIPY has many users from computational neuroanatomy and medical data science field. DIPY is an international project which brings together scientists across labs and countries to share their state-of-the-art code and expertise in the same codebase, accelerating scientific research in medical imaging. DIPY is participating in GSoC this year for the 4th time. + +How to become a part of DIPY's Google Summer of Code 2020 +========================================================= + +GSoC is a program that allows students to learn by contributing to an open-source project while receiving a fellowship from Google, and mentorship from open-source software developers. For details about this year's GSoC, please refer to `this page `_. This year, DIPY is participating in GSoC under the umbrella of International Neuroinformatics Coordinating Facility (`INCF `_). More information can be found at INCF's `GSoC 2020 page `_. + +Before considering becoming part of the DIPY GSoC, please read about our expectations. + +All participants should have basic knowledge of scientific computing and development in Python. For a comprehensive introduction to these topics, please refer to the book `Effective Computation in Physics `_ by Katy Huff and Anthony Scopatz. However, you should be already familiar with data analysis using Python and Numpy before applying. + +Be happy to ask questions directly in our Gitter channel https://gitter.im/nipy/dipy + +Advice +------ + +Potential candidates should take a look at the guidelines on how to `contribute to DIPY `_. Making a small enhancement/bugfix/documentation fix/etc to DIPY already before applying for the GSoC can help you get some idea how things would work during the GSoC. The fix does not need to be related to your proposal. We have and will continue adding some beginner-friendly issues in Github. You can see some of them `here `_. + +Projects +======== + +DIPY support for DICOM files +---------------------------- + +**Description:** Magnetic resonance imaging (MRI) is often stored in Digital Imaging and Communications in Medicine (DICOM) format. We want to provide DIPY users support for reading and writing to DICOM files. And also have an additional option to convert from DICOM to NIFTI file format. + +* Understand how DICOM file format works. +* Add support in DIPY to read and write DICOM files. +* Understand and create a new DIPY command-line interface (Workflow). +* Implement a robust DICOM to NIFTI conversion method. +* (Optional) Create a workflow to connect and get data from a PACS server. + +**Difficulty:** Intermediate + +**Skills required:** Python/Cython, Medical Imaging. + +**Mentors:** `Bramsh Chandio `_ and `Eleftherios Garyfallidis `_ + +Machine learning-based MRI registration +--------------------------------------- + +**Description:** Develop a machine learning-based registration framework that makes use of the Diffeomorphic Registration implemented in DIPY. The goal is to train a CNN model that can compute the deformation field in an unsupervised setting. This project will also involve leveraging the reconstruction module in DIPY to perform image fusion via inter-modality registration. + +* Understand Diffeomorphic Registration. Look at ``dipy.align`` module and DIPY tutorial. +* Understand advanced Image Reconstruction Models such as Free-Water-DTI, DKI, etc. in DIPY +* Implement a Deep Neural Net (e.g. a 2D/ 3D CNN) or a Multivariate Model for Co-learning + +**Difficulty:** High + +**Skills required:** Python, Deep Learning, Tensorflow, Registration, Strong Math Skills. + +**Mentors:** `Shreyas Fadnavis `_ and `Bramsh Chandio `_ + +Extend DIPY Horizon workflow +---------------------------- + +**Description:** Extend ``dipy_horizon`` workflow by adding more options for the visualization of the diffusion data. DIPY Horizon is a workflow that enables to visualize diffusion data such as dMRI, tractograms, white matter bundles and more from command line. This project requires student to add support for different types of visualizations in the horizon workflow. + +* Add support to visualize orientation distribution functions (odfs) generated from diffusion data +* Create an option in the horizon workflow to project anatomical measures such as functional anisotropy (FA), mean diffusivity (MD), etc on the white matter tracts and visualize them +* Add Qt functionality in dipy_horizon workflow + +**Difficulty:** Easy / Intermediate + +**Skills required:** Python, VTK, Qt. + +**Mentors:** `Bramsh Chandio `_ and `Eleftherios Garyfallidis `_ \ No newline at end of file diff --git a/dipy.org/pull/66/_sources/posts/2020/2020_05_04_serge_gsod_announcement.rst.txt b/dipy.org/pull/66/_sources/posts/2020/2020_05_04_serge_gsod_announcement.rst.txt new file mode 100644 index 0000000..6aa68f3 --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2020/2020_05_04_serge_gsod_announcement.rst.txt @@ -0,0 +1,102 @@ +Google Season of Docs 2020 +========================== + +.. post:: May 04 2020 + :author: Serge Koudoro + :tags: google + :category: gsod announcement + +DIPY + FURY project ideas for GSoD'20 +===================================== + +Welcome, and thank you for showing your interest in `DIPY(Diffusion Imaging in Python) `_ and `FURY(Free Unified Rendering in Python) `_ projects! On this page, we will first provide more information about DIPY and FURY, then the current state of both projects' documentation, and we will finish by describing two project ideas in detail. We want to point out that you are more than welcome to bring your ideas; we'd love to discuss with you any ideas that improve our online presence or documentation. + +**Please note that** `the Google Season of Docs `_ **is a program for writers with previous experience to show for the application. If you are a student, please consider** `Google Summer of Code `_ **instead.** + +Technical Writers +----------------- + +Welcome technical writers! Do not hesitate to contact us as early as possible! You can use any project chat channels for your first contact (see links on the documentation section). Also, you can find more information on the following links: + +- `Guidelines `_ +- `Application Timeline `_ +- `How to write a good proposal `_ + +What are DIPY and FURY? +----------------------- + +**About DIPY**: DIPY is a free and open-source software library for the analysis of 3D/4D+ imaging in Python. It contains generic methods for spatial normalization, signal processing, machine learning, statistical analysis, and visualization of medical images. Additionally, it contains specialized methods for computational anatomy including diffusion, perfusion, and structural imaging. DIPY has many users from the computational neuroanatomy and medical data science fields. DIPY is an international project which brings together scientists across labs and countries to share their state-of-the-art code and expertise in the same codebase, accelerating scientific research in medical imaging + +**About FURY**: FURY is a free and open-source software library for scientific visualization and 3D animations. FURY contains many tools for visualizing a series of scientific data including graph and imaging data. FURY is a DIPY spin-off. + +About our Documentation +----------------------- + ++-------------------+------------------------------------------------+------------------------------------------------+ +| | DIPY | FURY | ++===================+================================================+================================================+ +| Latest version | https://dipy.org | https://fury.gl | ++-------------------+------------------------------------------------+------------------------------------------------+ +| tutorials | https://dipy.org/tutorials/ | http://fury.gl/latest/auto_tutorials/index.html| ++-------------------+------------------------------------------------+------------------------------------------------+ +| reference guide | https://dipy.org/documentation/latest/reference| http://fury.gl/latest/reference/index.html | ++-------------------+------------------------------------------------+------------------------------------------------+ +| developer docs | https://dipy.org/documentation/latest/devel/ | http://fury.gl/latest/symlink/contributing.html| ++-------------------+------------------------------------------------+------------------------------------------------+ +| GitHub repository | https://github.com/dipy/dipy | https://github.com/fury-gl/fury | ++-------------------+------------------------------------------------+------------------------------------------------+ + +**The state of DIPY documentation**: We have complete reference documentation for most of the functions and classes exposed to users, although most of the functions are missing a usage example. Our User and Developer Guide needs to be updated and be made more consistent. Also, we need to create the documentation of our new CLI feature. Overall, offering a better experience will be valuable for our users! + +**The state of FURY documentation**: FURY is a recent project that offers flexibility. We have complete reference documentation for most functions, and an increasing number of tutorials have been created but FURY suffers from a lack of a Developer and User Guides. + +**DIPY + FURY documentation generation**: All our documentation and websites are built with `Sphinx `_. Sphinx generates static websites (making them easy to deploy) and provides extensive functionality to transform plain-text *reStructuredText* documents to HTML, as well as extract and cross-link documentation automatically from docstrings in Python source code. Reference documentation follows the `NumPy docstring standard `_. A detailed guide on how to document functions, classes, and other objects can be found `here `_. + +Note for DIPY: A Django instance manages dynamic content and loads the static page generated by Sphinx. + +**DIPY + FURY approach to documentation work**: Documentation tasks and issues are maintained on our GitHub issue tracker for `DIPY `_ and `FURY `_. Changes to the documentation are made via pull requests on GitHub, and reviewed with our standard review process which is the same for documentation and code (see `DIPY contributing guide `_ or `FURY contributing guide `_). Any new feature should be documented and followed by a tutorial. There is no dedicated "documentation manager" so every developer can review, improve, or comment on the improvement. + +Contact +------- + +As a community-driven project we try to have all conversations about DIPY and FURY in public: + +- All discussions related to the *development* of DIPY (which includes GSoD) can occur on the `DIPY mailing list `_. Please register and post to that list for discussing a GSoD proposal or idea. Also, you can use `our chat on gitter `_. +- All discussions related to the *development* of FURY (which includes GSoD) can occur on the `FURY mailing list `_. Please register and post to that list for discussing a GSoD proposal or idea. Also, you can use `our chat on Discord `_. + +Projects +-------- + +- **Project Idea 1**: *[DIPY] High-level restructuring and end-user focus.* +- **Project Idea 2**: *[FURY] Create User and Developer Guides. Update missing docstrings/doctests* + +Projects in Details: +==================== + +Project idea 1: High-level restructuring and end-user focus +----------------------------------------------------------- + +**Potential Mentors**: Ariel Rokem, Eleftherios Garyfallidis, Jon Haitz Legarreta Gorroño + +**Description**: DIPY serves many kinds of users: students that have a first time contact with the neuroimaging field, educators, researchers, medical doctors, software developers. In summary, DIPY's 10 year documentation needs to be reshaped and improved. We want to provide ways to guide those users to the parts of the documentation most relevant to them. We would love to work with a technical writer that can help us address this challenge. + +**Possible topics** include: + +- Improving the structure and content of https://dipy.org/ +- Reviewing and improving the structure of the documentation +- Producing a roadmap or list of work items for engaging the community in further documentation work +- Rewriting the User and Developer Guides. +- Adding non-textual images (illustrations, animations, graphics) to enhance the textual explanations +- Improving consistency across the documentation. +- Create documentation for the new command-line interface. + +Project idea 2: [FURY] Create User and Developer Guides. Update missing docstrings/doctests +------------------------------------------------------------------------------------------- + +**Potential Mentors**: Serge Koudoro, Eleftherios Garyfallidis + +This project will be split into 2 parts: + +**Create User and Developer Guides**: Despite a lot of tutorials and demos, FURY is missing a User and Developer Guides to explain some basic concepts of our library. What is a vertex? What is a primitive? The technical writer will have to work closely with the core team to provide a boost to our current documentation and facilitate user learning and early adoption. + +**Missing docstrings/doctests**: Every public function in FURY should have a docstring with examples (doctests). At present few of them have an example on the docstring. The part of this project would consist of identifying which functions are missing documentation and adding them. \ No newline at end of file diff --git a/dipy.org/pull/66/_sources/posts/2020/2020_12_05_Areesha.rst.txt b/dipy.org/pull/66/_sources/posts/2020/2020_12_05_Areesha.rst.txt new file mode 100644 index 0000000..0a9e985 --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2020/2020_12_05_Areesha.rst.txt @@ -0,0 +1,149 @@ +GSoD: High-level restructuring and end-user focus +================================================= + +.. post:: December 05 2020 + :author: Areesha Tariq + :tags: google + :category: gsod + +Organization: DIPY +------------------ + +DIPY is the paragon 3D/4D+ imaging library in Python. Contains generic methods for spatial normalization, signal processing, machine learning, statistical analysis, and visualization of medical images. Additionally, it contains specialized methods for computational anatomy including diffusion, perfusion, and structural imaging. + +Mentors: +-------- + +- `Serge Koudoro `__ +- `Jon Haitz Legarreta Gorroño `__ +- `Eleftherios Garyfallidis `__ +- `Ariel Rokem `__ + +Technical Writer: +----------------- + +- `Areesha Tariq `__ + +Short Description of the Work Done: +----------------------------------- + +During Google Season of Docs, I worked with my mentors to create documentation for the new command-line interface (CLI) workflows available in DIPY. I created documentation to show how these CLI workflows can be used to process diffusion-weighted Magnetic Resonance Imaging (MRI) data with the following purposes: + +- Fetching datasets +- Denoising +- Registration +- Gibbs unringing +- Local fiber orientation reconstruction +- Tracking + +I worked on improving the consistency across the documentation to make it easier to understand and maintain. I made some small improvements and minor fixes as well. I worked on the homepage of DIPY by updating a section with non-textual images. + +Documentation that was developed: +--------------------------------- + +Command-Line Interfaces: +^^^^^^^^^^^^^^^^^^^^^^^^ + +DIPY's command-line interfaces provide a terminal-friendly interface to a set of complete, end-to-end DIPY Python-based workflows. The philosophy behind CLI-exposed workflows is to provide users with a means to exploit the capabilities of DIPY using the terminal, without requiring any additional coding, and completing all necessary steps from the input volumes to the result with a single command. + +I documented command-line interfaces (CLI) for multiple workflows and their methods: + +1. Fetching datasets + https://github.com/dipy/dipy/pull/2248 + Tutorial to view and fetch the datasets available in DIPY. + +2. Denoising + https://github.com/dipy/dipy/pull/2244 + Tutorial to denoise diffusion-weighted MR images using DIPY for the following methods: + - Denoising using Local PCA + - Denoising using Marcenko-Pastur PCA + - Denoising using NLMEANS + +3. Registration + https://github.com/dipy/dipy/pull/2277 + Tutorial to perform image-based and streamline-based registration using DIPY for the following registration methods: + - Affine Registration + - Symmetric Diffeomorphic Registration + - Streamline-based Registration + +4. Gibbs unringing + https://github.com/dipy/dipy/pull/2273 + Tutorial to remove Gibbs ringing artefacts from MR images using DIPY. + +5. Local fiber orientation reconstruction + https://github.com/dipy/dipy/pull/2275 + Tutorial to perform local fiber orientation reconstruction using DIPY for the following reconstruction methods: + - Constrained Spherical Deconvolution (CSD) + - Mean Apparent Propagator (MAP)-MRI + - Diffusion Tensor Imaging (DTI) + - Diffusion Kurtosis Imaging (DKI) + - Constant Solid Angle (CSA) + - Intravoxel Incoherent Motion (IVIM) + +6. Tracking + https://github.com/dipy/dipy/pull/2292 + Tutorial to perform fiber tracking using DIPY for the following tracking methods: + - Local Fiber Tracking + - EuDX Tracking + - Deterministic Tracking + - Probabilistic Tracking + - Closest Peaks Tracking + - Particle Filtering Tracking (PFT) + +Outputs of CLI workflows: +^^^^^^^^^^^^^^^^^^^^^^^^^ + +https://github.com/dipy/dipy_data/pulls/areeshatariq +The results that I got from CLI-exposed workflows are added in the dipy_data repository. + +Improve Consistency: +^^^^^^^^^^^^^^^^^^^^ + +- https://github.com/dipy/dipy/pull/2211 +- https://github.com/dipy/dipy/pull/2218 +- https://github.com/dipy/dipy/pull/2286 +- https://github.com/dipy/dipy/pull/2287 +- https://github.com/dipy/dipy/pull/2294 +- https://github.com/dipy/dipy/pull/2295 + +Small Improvements/ Minor Fixes: +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +- https://github.com/dipy/dipy/pull/2212 +- https://github.com/dipy/dipy/pull/2220 +- https://github.com/dipy/dipy/pull/2246 +- https://github.com/dipy/dipy/pull/2261 +- https://github.com/dipy/dipy/pull/2282 +- https://github.com/dipy/dipy/pull/2283 +- https://github.com/dipy/dipy/pull/2284 +- https://github.com/dipy/dipy/pull/2296 + +DIPY Homepage: +^^^^^^^^^^^^^^ + +- https://github.com/dipy/dipy_web/pull/88 +- https://github.com/dipy/dipy_web/pull/90 + +I updated a homepage section by working on the icons and re-designing the section containing an overview of the CLI workflows documentation. + +Medium Blogposts: +^^^^^^^^^^^^^^^^^ + +https://medium.com/@areeshatariq02 +I wrote blog posts for some of the CLI-based workflows to engage users and let them know about the developed tutorials. At the time of writing this report, they are being edited together with the mentors, and are aimed to be published as posts on the medium.com website. + +Current State of the Project: +----------------------------- + +Most of the documentation and the PRs that were made during the project have been merged and were included in the 1.3 release of DIPY (https://github.com/dipy/dipy/releases/tag/1.3.0). The last few PRs are also merged and will be included in the 1.4 release of DIPY (due by December, 28th 2020; https://github.com/dipy/dipy/milestone/8). + +Thanks to the documentation generated in the framework of the GSoD 2020, the DIPY CLI workflows have gained visibility, and have now their own section on DIPY's website (https://dipy.org/documentation/1.3.0./interfaces/). The workflows have started to attract the attention of the community, reflected by the increase in related questions posted in DIPY's gitter room (https://gitter.im/dipy/dipy). + +Learnings and Challenges: +------------------------- + +- Magnetic Resonance Imaging was a completely new concept to me. I had not worked with MRI data before. To know the basic concepts, my mentors provided me with some very helpful reading material. +- While developing the tutorials for CLIs, I myself learned a lot about how useful DIPY is for the analysis of data from diffusion-weighted Magnetic Resonance Imaging (dMRI). +- I learned about DIPY's command-line interfaces that provide an interface to DIPY Python-based workflows and are very easy-to-use commands. +- DIPY uses Sphinx to build the project documentation which is a Python-based documentation generator. I got experience of using reStructuredText which is the default plaintext markup language used by Sphinx. +- I got to know more about the open-source world. Making contributions through GitHub made me comfortable with using git to continue contributing to open-source projects, or to work with git-based versioning systems. \ No newline at end of file diff --git a/dipy.org/pull/66/_sources/posts/2021/2021_01_15_serge_gsoc_announcement.rst.txt b/dipy.org/pull/66/_sources/posts/2021/2021_01_15_serge_gsoc_announcement.rst.txt new file mode 100644 index 0000000..67a6598 --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2021/2021_01_15_serge_gsoc_announcement.rst.txt @@ -0,0 +1,106 @@ +Google Summer of Code 2021 +========================== + +.. post:: January 15 2021 + :author: Serge Koudoro + :tags: google + :category: gsoc announcement + +Introduction to DIPY +==================== + +DIPY is a free and open-source software library for the analysis of 3D/4D+ imaging in Python. It contains generic methods for spatial normalization, signal processing, machine learning, statistical analysis, and visualization of medical images. Additionally, it contains specialized methods for computational anatomy including diffusion, perfusion, and structural imaging. DIPY has many users from computational neuroanatomy and the medical data science field. DIPY is an international project which brings together scientists across labs and countries to share their state-of-the-art code and expertise in the same codebase, accelerating scientific research in medical imaging. DIPY is participating in GSoC this year for the 5th time. + +How to become a part of DIPY's Google Summer of Code 2021 +========================================================= + +GSoC is a program that allows students to learn by contributing to an open-source project while receiving a fellowship from Google, and mentorship from open-source software developers. For details about this year's GSoC, please refer to `this page `_. This year, DIPY is participating in GSoC under the umbrella of the International Neuroinformatics Coordinating Facility (`INCF `_). More information can be found at INCF's `GSoC 2021 page `_. + +Before considering becoming part of the DIPY GSoC, please read about our expectations. + +All participants should have basic knowledge of scientific computing and development in Python. For a comprehensive introduction to these topics, please refer to the book `Effective Computation in Physics `_ by Katy Huff and Anthony Scopatz. However, you should be already familiar with data analysis using Python and Numpy before applying. + +Be happy to ask questions directly in our Gitter channel https://gitter.im/nipy/dipy + +Advice +------ + +Potential candidates should take a look at the guidelines on how to `contribute to DIPY `_. Making a small enhancement/bugfix/documentation fix/etc to DIPY already before applying for the GSoC can help you get some idea how things would work during the GSoC. The fix does not need to be related to your proposal. We have and will continue adding some beginner-friendly issues in Github. You can see some of them `here `_. + +Projects +======== + +Population-based MRI Template Creation +-------------------------------------- + +**Description:** Implement a method to create a population-based MRI template. Given an input of several subjects' MRI, create one standard template MRI for the population. The method will utilize the MRI registration framework available in DIPY. + +**Steps:** + +* Understand MRI data +* Implement template creation method +* Write DIPY workflow of the method +* Test it on different data sets + +**Difficulty:** Intermediate + +**Skills required:** Python, Image Registration, Image Processing. + +**Mentors:** `Bramsh Qamar Chandio `_, `Shreyas Fadnavis `_, and `Jong Sung Park `_ + +Population-specific Tractography Bundle Atlas Creation +------------------------------------------------------ + +**Description:** Implement a method to create a population-specific Tractography Bundle Atlas. Given an input of several subjects' segmented white matter tracts, create one standard atlas of bundles for the population. The method will utilize the streamline-based registration framework available in DIPY. + +**Steps:** + +* Understand Diffusion Tensor Imaging and Tractography data +* Implement Bundle Atlas creation method +* Write DIPY workflow of the method +* Test it on different data sets + +**Difficulty:** Hard + +**Skills required:** Python, Registration. + +**Skills preferred:** Experience with Diffusion Tensor Imaging + +**Mentors:** `Bramsh Qamar Chandio `_, `Shreyas Fadnavis `_, and `Jong Sung Park `_ + +Extend DIPY Horizon workflow for Visualization +---------------------------------------------- + +**Description:** Extend ``dipy_horizon`` workflow by adding more options for the visualization of the diffusion data. DIPY Horizon is a workflow that enables to visualize diffusion data such as dMRI, tractograms, white matter bundles, and more from the command line. This project requires student to add support for different types of file formats and visualizations in the horizon workflow. + +**Steps:** + +* Add support to visualize orientation distribution functions (odfs) generated from diffusion data +* Create an option in the horizon workflow to project anatomical measures such as functional anisotropy (FA), mean diffusivity (MD), etc on the white matter tracts and visualize them +* Add region-of-interest (ROI) capacity for streamline filtering in Horizon. +* Add Qt functionality in dipy_horizon workflow + +**Difficulty:** Intermediate + +**Skills required:** Python, VTK, Qt. + +**Mentors:** `Bramsh Qamar Chandio `_, `Shreyas Fadnavis `_, and `Jong Sung Park `_ + +DIPY-Tract-or-Treat: DIPY DTI Post-Processing Pipeline +------------------------------------------------------ + +**Description:** DIPY has several methods for reconstruction, tractography, bundle extraction, and tratometry. The idea of this project is to combine them all into one command-line interface that does reconstruction, tractography, bundle extraction, and bundle analytics. Users will have the option to select among different methods and design their pipeline from the list of available options. + +**Steps:** + +* Understand DIPY and its workflows thoroughly +* Create a command-line interface (workflow) to create a pipeline of different existing methods. +* Test in on data + +**Difficulty:** Intermediate + +**Skills required:** Python + +**Skills preferred:** Experience with Diffusion Tensor Imaging + +**Mentors:** `Bramsh Qamar Chandio `_, `Shreyas Fadnavis `_, and `Jong Sung Park `_ \ No newline at end of file diff --git a/dipy.org/pull/66/_sources/posts/2022/2022_04_05_serge_gsoc_announcement.rst.txt b/dipy.org/pull/66/_sources/posts/2022/2022_04_05_serge_gsoc_announcement.rst.txt new file mode 100644 index 0000000..4848369 --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2022/2022_04_05_serge_gsoc_announcement.rst.txt @@ -0,0 +1,83 @@ +Google Summer of Code 2022 +========================== + +.. post:: April 05 2022 + :author: Serge Koudoro + :tags: google + :category: gsoc announcement + +Introduction to DIPY +==================== + +DIPY is a free and open-source software library for the analysis of 3D/4D+ imaging in Python. It contains generic methods for spatial normalization, signal processing, machine learning, statistical analysis, and visualization of medical images. Additionally, it contains specialized methods for computational anatomy including diffusion, perfusion, and structural imaging. DIPY has many users from computational neuroanatomy and the medical data science field. DIPY is an international project which brings together scientists across labs and countries to share their state-of-the-art code and expertise in the same codebase, accelerating scientific research in medical imaging. DIPY is participating in GSoC this year for the 7th time. + +How to become a part of DIPY's Google Summer of Code 2022 +========================================================= + +GSoC is a program that allows students to learn by contributing to an open-source project while receiving a fellowship from Google, and mentorship from open-source software developers. For details about this year's GSoC, please refer to `this page `_. This year, DIPY is participating in GSoC under the umbrella of the International Neuroinformatics Coordinating Facility (`INCF `_). More information can be found at INCF's `GSoC 2022 page `_. + +Before considering becoming part of the DIPY GSoC, please read about our expectations. + +All participants should have basic knowledge of scientific computing and development in Python. For a comprehensive introduction to these topics, please refer to the book `Effective Computation in Physics `_ by Katy Huff and Anthony Scopatz. However, you should be already familiar with data analysis using Python and Numpy before applying. + +Be happy to ask questions directly in our Gitter channel https://gitter.im/nipy/dipy + +Advice +------ + +Potential candidates should take a look at the guidelines on how to `contribute to DIPY `_. Making a small enhancement/bugfix/documentation fix/etc to DIPY already before applying for the GSoC can help you get some idea how things would work during the GSoC. The fix does not need to be related to your proposal. We have and will continue adding some beginner-friendly issues in Github. You can see some of them `here `_. + +Projects +======== + +GPU parallelization of DIPY algorithms +-------------------------------------- + +**Description:** We have multiple versions of GPU parallelized algorithms. The project will be bringing those together in a common framework and adding new methods as well. For example, we currently have cudipy and GPU streamlines. A unified framework is required. In addition, we will need to parallelize some algorithms such as those used for probabilistic tractography and nonrigid registration. + +**Difficulty:** Intermediate + +**Time:** Full time + +**Skills required:** CUDA, C/C++, Python + +**Mentors:** Shreyas Fadnavis, Jongsung Park, Bramsh Qamar Chandio (contact via gitter or incf neurostar) + +A pythonic implementation of topup +---------------------------------- + +**Description:** A popular request is to bring topup [1] in a simple pythonic implementation that can be easily extended. The student will work on implementing topup (or similar function) from the ground up using Python, Cython and Numpy. + +**Difficulty:** Hard + +**Time:** Full time + +**Skills required:** Strong familiarity with diffusion MRI, Python, Numpy, Cython or C/C++ + +**Mentors:** Jongsung Park, Shreyas Fadnavis, Bramsh Qamar Chandio (contact via gitter or incf neurostar) + +Extend DIPY Horizon workflow for Visualization +---------------------------------------------- + +**Description:** Extend dipy_horizon workflow by adding more options for the visualization and cleaning of tractograms generated from diffusion MRI data. DIPY Horizon is a workflow that enables to visualize diffusion data such as dMRI, tractograms, white matter bundles, and more from the command line. This project requires students to add support for different types of file formats and visualizations in the horizon workflow. Students will work on adding multiple features to help manual cleaning of streamlines such as select and remove streamlines from a bundle, cut some parts of the bundle and so on. + +**Difficulty:** Intermediate + +**Time:** Full time + +**Skills required:** Python, OpenGL, VTK + +**Mentors:** Bramsh Qamar Chandio, Shreyas Fadnavis, and Jong Sung Park (contact via gitter or incf neurostar) + +Add mutual information for non-rigid registration +------------------------------------------------- + +**Description:** Currently mutual information (MI) similarity metric is available only for affine registration and not nonrigid registration. The student will need to implement, compare and test MI for nonrigid registration. The existing implementation provides examples for SSD and Expectation Maximization metrics. The MI method is expected to work slightly better than our existing EM method for multimodal images. This project will focus on multimodal images. If time is permitted the student can also investigate a tensor based metric for registration. + +**Difficulty:** Beginner + +**Time:** Half time + +**Skills required:** Familiarity with registration algorithms, Python, Cython or C/C++. + +**Mentors:** Bramsh Qamar Chandio, Shreyas Fadnavis, Jong Sung Park (contact via gitter or incf neurostar) diff --git a/dipy.org/pull/66/_sources/posts/2023/2023_02_02_serge_gsoc_announcement.rst.txt b/dipy.org/pull/66/_sources/posts/2023/2023_02_02_serge_gsoc_announcement.rst.txt new file mode 100644 index 0000000..d921588 --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2023/2023_02_02_serge_gsoc_announcement.rst.txt @@ -0,0 +1,171 @@ +Google Summer of Code 2023 +========================== + +.. post:: February 02 2023 + :author: Serge Koudoro + :tags: google + :category: gsoc announcement + +Introduction to DIPY +==================== + +DIPY is a free and open-source software library for the analysis of 3D/4D+ imaging in Python. It contains generic methods for spatial normalization, signal processing, machine learning, statistical analysis, and visualization of medical images. Additionally, it contains specialized methods for computational anatomy including diffusion, perfusion, and structural imaging. DIPY has many users from computational neuroanatomy and the medical data science field. DIPY is an international project which brings together scientists across labs and countries to share their state-of-the-art code and expertise in the same codebase, accelerating scientific research in medical imaging. DIPY is participating in GSoC this year for the 7th time. + +How to become a part of DIPY's Google Summer of Code 2023 +========================================================= + +GSoC is a program that allows students to learn by contributing to an open-source project while receiving a fellowship from Google, and mentorship from open-source software developers. For details about this year's GSoC, please refer to `this page `_. + +Before considering becoming part of the DIPY GSoC, please read about our expectations. + +All participants should have basic knowledge of scientific computing and development in Python. For a comprehensive introduction to these topics, please refer to the book `Effective Computation in Physics `_ by Katy Huff and Anthony Scopatz. However, you should be already familiar with data analysis using Python and Numpy before applying. + +Be happy to ask questions directly in our: + +- Gitter channel https://gitter.im/dipy/dipy +- Forum https://github.com/dipy/dipy/discussions + +Advice +------ + +Potential candidates should take a look at the guidelines on how to `contribute to DIPY `_. Making a small enhancement/bugfix/documentation fix/etc to DIPY already before applying for the GSoC can help you get some idea how things would work during the GSoC. The fix does not need to be related to your proposal. We have and will continue adding some beginner-friendly issues in Github. You can see some of them `here (beginner-friendly issues) `_ or all the issues `here (all issues) `_. + +Project Ideas (7) +================= + +**Notice 1:** More project ideas might appear **Stay tuned and check regularly this page!** + +**Notice 2:** We want to provide the best mentoring to our students, **only 2 or 3 of these projects will be selected.** Not more! + +If you have any questions or if you want to contact a mentor: + +- `open a new discussion `_ with GSOC as a category. + +Project 1. Add mutual information for non-rigid registration +------------------------------------------------------------ + +**Difficulty:** Beginner / Intermediate + +**Mentors:** Serge Koudoro, Jong Sung Park (contact via github discussion) + +**Description:** + +Currently mutual information (MI) similarity metric is available only for affine registration and not nonrigid registration. The student will need to implement, compare and test MI for nonrigid registration. The existing implementation provides examples for SSD and Expectation Maximization metrics. The MI method is expected to work slightly better than our existing EM method for multimodal images. This project will focus on multimodal images. If time is permitted the student can also investigate a tensor based metric for registration. + +**Time:** Part-time (175 hours) or Full-Time (350 hours) + +**Skills required:** Familiarity with registration algorithms, Python, Cython or C/C++. + +Project 2. Generalized along tract analysis of fiber orientation dispersion +--------------------------------------------------------------------------- + +**Difficulty:** Intermediate + +**Mentors:** Rafael Neto Henriques, Julio Villalón (contact via github discussion) + +**Description:** + +Modeling the fiber orientation dispersion (OD) in the brain's white matter (WM) has been one of the long-standing goals of diffusion MRI (dMRI). The must common way to estimate OD is to apply biophysical models to diffusion MRI data acquired with multiple b-values (multi-shell acquisitions). Nevertheless, a vast amount of clinical dMRI data currently available worldwide is acquired using a single b-value (single-shell acquisitions). In this project, the student will need to implement OD estimation algorithms compatible to both single-shell and multi-shell acquisitions. These implementations should be integrated with the current reconstruction procedures in the Diffusion in Python (DIPY) library and they should be compatible with DIPY's unique along tract analysis. + +**Time:** Full-time (350 hours) + +**Skills required:** Familiarity with Python, experience with diffusion MRI would be a plus. + +Project 3. Correlation Tensor Magnetic Resonance Imaging +-------------------------------------------------------- + +**Difficulty:** Intermediate + +**Mentors:** Rafael Neto Henriques, Shreyas Fadnavis (contact via github discussion) + +**Description:** + +Typical diffusion MRI techniques uses different phenomenological and mechanistic models to infer microscopic tissue properties from conventional diffusion MRI acquisitions. Recent studies show, however, that advanced diffusion encoding sequences can provide unique information not accessible from more conventional acquisition approaches [1]. In this project, the student will implement a recently proposed diffusion MRI technique for advanced diffusion MRI acquisitions, termed the Correlation Tensor Magnetic Resonance Imaging [2, 3]. This technique allows the estimation of specific sources of tissue non-Gaussian diffusion free from tissue model assumptions. + +**Time:** Full-time (350 hours) + +**Skills required:** Familiarity with Python, experience with diffusion MRI with be a plus. + +**Related References and Links:** + +[1] Henriques, R.N., Palombo, M., Jespersen, S.N., Shemesh, N., Lundell, H., Ianuş , A., 2021. Double diffusion encoding and applications for biomedical imaging. J. Neurosci. Methods, 108989 doi: 10.1016/j.jneumeth.2020.108989 + +[2] Henriques, R.N., Jespersen, S.N., Shemesh, N., 2020. Correlation tensor magnetic resonance imaging. Neuroimage 211. doi: 10.1016/j.neuroimage.2020.116605 + +[3] Henriques, R.N., Jespersen, S.N., Shemesh, N., 2021. Evidence for microscopic kurtosis in neural tissue revealed by correlation tensor MRI. Magn. Reson. Med. 1–20. doi: 10.1002/mrm.28938 . + +Project 4. Creating Synthetic MRI data +-------------------------------------- + +**Difficulty:** Hard + +**Mentors:** Jong Sung Park, Serge Koudoro (contact via github discussion) + +**Description:** + +Diffusion models have been a state-of-the-art technique in the image generation area. While a lot of work has been done in the Computer Vision field, there has been limited work on conditional image generation of MRI data. Since it is relatively hard to acquire conditioned brain image data, various research can benefit from the synthetic dataset. The student will work on creating a conditioned diffusion model to generate brain MRI. The conditions can be, but not limited to, different modalities, existence/location of tumor/lesions, pediatric/adult, etc. + +**Time:** Full-time (350 hours) + +**Skills Required:** Python, Tensorflow or Pytorch (Tensorflow preferred), some familiarity on MRI. + +Project 5. An optimal pythonic implementation of Susceptibility Distortion Correction using AP PA data +------------------------------------------------------------------------------------------------------ + +**Difficulty:** Hard + +**Mentors:** Sreekar Chigurupati, Jong Sung Park (contact via github discussion) + +**Description:** + +A popular request is to bring topup [4] in a simple pythonic implementation that can be easily extended. We will provide an unoptimized version for reference. The student will work on optimizing topup (or similar function) using Python, Cython and Numpy. + +**Time:** Full-time (350 hours) + +**Skills Required:** Strong familiarity with diffusion MRI, Python, Numpy, Cython or C/C++ + +**Related References and Links:** + +[4] J.L.R. Andersson, S. Skare, J. Ashburner. How to correct susceptibility distortions in spin-echo echo-planar images: application to diffusion tensor imaging. NeuroImage, 20(2):870-888, 2003. + +Project 6. DIPY algorithms Optimizations +---------------------------------------- + +**Difficulty:** Intermediate + +**Mentors:** Serge Koudoro, Jongsung Park (contact via github discussion) + +**Description:** + +Our algorithms performance can be easily be improved via somealgorithms trick and auto-vectorization. To realize this, the project will make sure that our current code is "auto-vectorization" friendly for any C compiler. An extra step would be to parallelize via thread and CPU. In addition, an extra GPU component can be added for a full-time project if the student feel comfortable and confident with this technology. For example, we currently have cudipy and GPU streamlines. A unified framework is required. In addition, we will need to parallelize some algorithms such as those used for probabilistic tractography and nonrigid registration. + +**Project Steps**: + + - Step 1: Look at pitfalls of our Registration framework and tractography framework. + - Step 2: Unroll many loop and simplify several Cython function. + - Step 3: Benchmark performance improvement. + - Step 4: Research for new algorithm tricks, implement them. Also, implementation of multithreading/multiprocessing via openmp or MPI + - Step 5 (optional): GPU study and improvement. + +**Time:** Full-time (350 hours) or Part-time (175 hours) + +**Skills Required:** Python, cython, math, algorithms, optimization, CUDA, openmp, MPI, SIMD, SIMT + +Project 7. Accelerated MRI viewer – Horizon +------------------------------------------- + +**Difficulty:** Intermediate/Advanced + +**Mentors:** Sreekar Chigurupati, Jong Sung Park (contact via github discussion) + +**Description:** + +DIPY comes with a modern viewer for dMRI-related data - Horizon [6]. It can be used both via Python and CLI. The purpose of this project is to load multiple MR images as textures to use them for comparison in slicer viewers. The candidate is expected to use the FURY shader API to create a layer-by-layer visibility option to switch images as well as add functionality that changes the relative contrast. In addition, we will create functionality to reset the 3D view to coronal, sagittal, and horizontal views. + +**Time:** Part-time (175 hours) or Full-Time (350 hours) + +**Skills Required:** MRI, Python, DIPY Workflows, FURY, VTK, GLSL + +**Related References and Links:** + +[6] Garyfallidis E., M-A. Cote, B.Q. Chandio, S. Fadnavis, J. Guaje, R. Aggarwal, E. St-Onge, K.S. Juneja, S. Koudoro, D. Reagan, DIPY Horizon: fast, modular, unified and adaptive visualization, Proceedings of: International Society of Magnetic Resonance in Medicine (ISMRM), Montreal, Canada, 2019. \ No newline at end of file diff --git a/dipy.org/pull/66/_sources/posts/2023/2023_05_19_vara_week0.rst.txt b/dipy.org/pull/66/_sources/posts/2023/2023_05_19_vara_week0.rst.txt new file mode 100644 index 0000000..459ec54 --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2023/2023_05_19_vara_week0.rst.txt @@ -0,0 +1,35 @@ +Journey of GSOC application & acceptance : Week 0 +================================================= + +.. post:: May 19 2023 + :author: Vara Lakshmi Bayanagari + :tags: google + :category: gsoc + +While applying for the GSOC 2023 DIPY sub-project titled “Creating Synthetic MRI”, I knew +this would be the right one for me for two reasons. Keep reading to know more! + +As nervous and not-so-optimistic as I am about applying for academic competitions, I pushed +myself to apply for GSOC out of a necessity for summer job more than anything. This got me out +of my comfort zone and I ventured into open source development. During the time of application +I was a Master’s student from NYU(current status - graduated) with focus on Deep Learning +Applications in Healthcare. I was so involved in research in Computer Vision during school, I +decided to pursue career in the same field going forward. Fortunately, I came across a college +senior’s post on LinkedIn regarding getting accepted as a mentor for GSOC 2023 during that time. +This prompted me to look further into GSOC and its list of projects for this year. I have only +heard of GSOC during my undergrad, during which I never could muster courage to pursue something +outside college. But this time around, I decided to put a confident front and take the leap. + +As I searched through the list of available projects, I got iteratively definitive about what I +wanted to work on - looked for python projects first, filtered out machine learning projects next, +narrowed down to a couple of relevant projects. In the process, I came across the list of DIPY +projects. Firstly, I was looking to further my research knowledge in ML by exploring Generative AI. +Secondly, I have worked with MRI datasets in the context of Deep Learning previously, so +‘Creating Synthetic MRI’ project seemed the right fit. These reasons got me hooked to DIPY +sub-organization. I thoroughly enjoyed exploring DIPY applications and began the process for +the application preparation soon. With the wonderful help from the mentors, I successfully submitted +an application, later got an interview call and voila, I got in! + +I am very happy about participating in GSOC this year. What started out as a necessity has now become +a passion project. I hope to enjoy the journey ahead, looking forward to learning and implementing few +things along the way! diff --git a/dipy.org/pull/66/_sources/posts/2023/2023_05_29_Shilpi_Week_0_1.rst.txt b/dipy.org/pull/66/_sources/posts/2023/2023_05_29_Shilpi_Week_0_1.rst.txt new file mode 100644 index 0000000..dd8a3d6 --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2023/2023_05_29_Shilpi_Week_0_1.rst.txt @@ -0,0 +1,82 @@ +Community Bonding and Week 1 Insights +===================================== + +.. post:: May 29 2023 + :author: Shilpi Prasad + :tags: google + :category: gsoc + + +About Myself +~~~~~~~~~~~~ + +Hey there! I'm Shilpi, a Computer Science and Engineering undergrad at Dayananda Sagar College of Engineering, Bangalore. I'm on track to grab my degree in 2024. +My relationship with Python started just before I started college - got my hands dirty with this awesome Python Specialization course on Coursera. +When it comes to what makes me tick, it's all things tech. I mean, new technology always excites me. Ubuntu, with its fancy terminal and all, used to intimidate me at first, but now, I get a thrill out of using it to do even the simplest things. +Up until 2nd year I used to do competitive programming and a bit of ML. But from 3rd year I've been into ML very seriously, doing several courses on ML as well solving ML problems on Kaggle. ML is very fun and I've done a few project on ML as well. +Coding? Absolutely love it. It's like, this is what I was meant to do, y'know? I got introduced to git and GitHub in my first year - was super curious about how the whole version control thing worked. And then, I stumbled upon the world of open source in my second year and made my first contribution to Tardis: (``_) +Initially, I intended on doing GSoC during my second year but ended up stepping back for reasons. This time, though, I was fired up to send in a proposal to at least one organization in GSoC. And, well, here we are! + +Intro to Open-Source and GSoC +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +So, I started off finding out about GSoC - how many hours do selected folks put in, the kind of projects people usually tackle, and all that stuff. To get a handle on what they want in a proposal, I turned to some successful ones from previous years. Really gave me an idea of the kind of stuff they expect you to bring to the table. +Trying to find the organization that'd go with my skill set, I stumbled upon Python Software Foundation, and I was like, "This is it!". And under PSF, there was DIPY. +Diving into DIPY's docs was a breeze as they've got it so well put together that I managed to get my head around a completely new topic, "Diffusion MRI", just by going through their introductory docs and a bit of gpt. +While exploring DIPY, I noticed this issue that needed a new feature. It took a good bit of reading to really understand what they were looking for and how to actually build that feature. And then, I submitted my first PR (`check it out here `__)! Getting it merged wasn't exactly easy - there was a lot of room for improvement up in my code, but honestly, I feel like it's all part of the learning curve. +I was a bit of a latecomer to GSoC, so I didn't have much time to make up a ton of PRs. Plus, by the time I'd submitted my first PR, the proposal submission period had already begun. So, I focused all my energy on increasing my knowledge on the topic and polishing my proposal. Plus, I'd wanted to get my proposal reviewed at least once before I submitted it. + +Code contributions: + +1. [https://github.com/dipy/dipy/pull/2749] + +The Day +~~~~~~~ + +May 4th: I woke up feeling like a nervous wreck. That interview with my organization? Let's just say it didn't go very well. Yet, I couldn't help but hope for the best. The results were supposed to drop at 11:45pm, a moment I wasn't exactly looking forward to. +I tried logging into Google to check, but couldn't. Too many people doing the same thing. I threw my hands up, gave up on the login battle, and got back to work, hoping to distract myself. +Fast forward to 1:30am - I figured by now, the log-in rush should have calmed down. I gave it another shot and... I got in! I clicked over to the dashboard, and there it was. My project. Right there, listed under the Projects section. I had heard that if you get selected, your proposal shows up there. +To confirm that it was actually happening, I picked my phone to check if I'd gotten any official email yet. And yes!! I'd gotten it at 12:49 am. I just hadn't checked. +I whooped, woke up my roomies, rushed to call my parents. +Honestly, words can't even begin to capture how I felt at that moment. +Pure, undiluted joy, that's what it was. My parents, surprisingly actually picked up my call. But the minute I told them I'd made the cut, they congratulated me. It was heck of a day, (^^). + +What I did this week +~~~~~~~~~~~~~~~~~~~~ + +As this was my first week I majorly spent a lot of time knowing about the codebase of the organization. I also went through a couple of research papers of projects which have already been implemented to get information related to my branch. +I'm currently in the middle of reading through the research paper, which is directly related to my project: `here `__ +I also went through some of the videos related to information on cti, a couple of them are: `this `__ and also, `this `__ +I also submitted `this `__ PR. In this PR members of my organization are supposed to submit all the +blogs. +But mostly I spent a lot of time in implementing the already existing MultiTensor Simulation on my local system , and also completing the assignment which my mentor gave me. +In this assignment, I was given a specific number of directions, 'n' and some steps on how to produce bvals and bvecs. I had to create ``gtab1`` and ``gtab2``. and then taking ``gtab1`` & ``gtab2`` as input, I was supposed to create a function which would give output btensor i.e btens. +The purpose of this assignment was to strengthen my knowledge on concepts I've already read and also to give me some coding experience, as this is critical in order for me to be able to implement the rest of my project. + +What is coming up next Week +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +These simulations were basically the first task of the proposal. +So after the btensor I intend on producing the synthetic signals using the qti model (hint on how +it is done in qti tests). +make a figure similar to figure 1 of the 2021 CTI paper: +`here `__ + + +Did I get stuck anywhere +~~~~~~~~~~~~~~~~~~~~~~~~ + +I got stuck while creating `this `__ PR. I had to rebase a bunch of commits, and this was a new concept to me, so it took me a while to figure it out. Due to rebasing, I ended up creating a bunch of other commits, which made the commit history of this PR a mess. So, I had to learn about the concept of squashing the commits. + +I also got stuck a lot while trying to find out the perpendicular directions to the vectors used in ``gtab1``. I was supposed to implement the following formula: + +.. image:: /_static/images/gsoc/2023/shilpi/formula_.png + :width: 400 + :alt: formula cti gtab + +I had to spend a lot of time figuring out how to combine 3 vectors of shape (81, 3) to get V. And also working on the function which would give me the perpendicular vector to the vector in ``gtab1``. + +I got a bunch of ``ValueErrors`` saying: could not broadcast input array from shape (3,3,1) into shape (3,3) and some ``IndexError`` saying: shape mismatch: indexing arrays could not be broadcast together with shapes (81,) (3,1) (3,). + +I also had to experiment on how to concatenate different vectors to get the vector of the right shape, since there are a bunch of possible options while stacking, such as vstack, hstack, stack, etc. + diff --git a/dipy.org/pull/66/_sources/posts/2023/2023_05_29_vara_week1.rst.txt b/dipy.org/pull/66/_sources/posts/2023/2023_05_29_vara_week1.rst.txt new file mode 100644 index 0000000..61ded08 --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2023/2023_05_29_vara_week1.rst.txt @@ -0,0 +1,26 @@ +Community bonding and Project kickstart : Week 1 +================================================ + +.. post:: May 29 2023 + :author: Vara Lakshmi Bayanagari + :tags: google + :category: gsoc + +What I did this week +~~~~~~~~~~~~~~~~~~~~ + +Community Bonding period ended last week and my first blog is based on the work carried +out in the last week. My meeting with GSOC mentors at the start of the week helped me chalk +out an agenda for the week. As the first step, I familiarized myself with Tensorflow +operations, functions and distribution strategies. My previous experience with PyTorch as +well as `website tutorials `_ on basic Deep +Learning models helped me quickly learn Tensorflow. As the next step, I read VQ-VAE paper & +understood the tensorflow open source implementation. VQ-VAE addresses 'posterior collapse' +seen in traditional VAEs and overcomes it by discretizing latent space. This in turn also +improved the generative capability by producing less blurrier images than before. +Familiarizing about VQ-VAE early on helps in understanding the latents used in Diffusion models +in later steps. I also explored a potential dataset - `IXI (T1 images) `_ +- and performed some exploratory data analysis, such as age & sex distribution. The images contain +entire skull information, it may require brain extraction & registration. It maybe more useful +to use existing preprocessed datasets & align them to a template. For next week, I'll be +conducting further literature survey on Diffusion models. \ No newline at end of file diff --git a/dipy.org/pull/66/_sources/posts/2023/2023_06_05_Shilpi_week2.rst.txt b/dipy.org/pull/66/_sources/posts/2023/2023_06_05_Shilpi_week2.rst.txt new file mode 100644 index 0000000..c8a59fb --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2023/2023_06_05_Shilpi_week2.rst.txt @@ -0,0 +1,24 @@ +Signal Creation & Paper Research: Week2 Discoveries +=================================================== + +.. post:: June 05 2023 + :author: Shilpi Prasad + :tags: google + :category: gsoc + + + +What I did this week +~~~~~~~~~~~~~~~~~~~~ +I worked through this research paper, and found some relevant facts to the tasks at hand, such as the different sources of kurtosis. One other important fact I found out was that DDE comprises 2 diffusion encoding modules characterized by different q-vectors (q1 and q2 ) and diffusion times. This fact is important because, CTI approach is based on DDE's cumulant expansion, and the signal is expressed in terms of 5 unique second and fourth-order tensors. I also found out about how the synthetic signals could be created using 2 different scenarios, which comprises a mix of Gaussian components and a mix of Gaussian and/or restricted compartments. +The major time I spent this week was in creating synthetic signals, and therefore in creating simulations. + + +What Is coming up next week +~~~~~~~~~~~~~~~~~~~~~~~~~~~ +I intend on finishing the simulations with appropriate documentation and theory lines. If time permits, I'll resume working on the cti.py file and its tests section. + + +Did I get stuck anywhere +~~~~~~~~~~~~~~~~~~~~~~~~ +I didn't get stuck, however it did take me a while to go through all the code that I could possibly be needing in my simulations, and also in understanding the theory behind those codes. diff --git a/dipy.org/pull/66/_sources/posts/2023/2023_06_05_vara_week2.rst.txt b/dipy.org/pull/66/_sources/posts/2023/2023_06_05_vara_week2.rst.txt new file mode 100644 index 0000000..2bd9fa3 --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2023/2023_06_05_vara_week2.rst.txt @@ -0,0 +1,33 @@ +Deep Dive into VQ-VAE : Week 2 +============================== + +.. post:: June 05, 2023 + :author: Vara Lakshmi Bayanagari + :tags: google + :category: gsoc + + +What I did this week +~~~~~~~~~~~~~~~~~~~~ +This week I took a deep dive into VQ-VAE code. Here's a little bit about VQ-VAE - + +VQ-VAE is discretized VAE in latent space that helps in achieving high quality outputs. It varies from VAE by two points - use of discrete latent space, performing separate Prior training. VAE also showed impressive generative capabilities across data modalities - images, video, audio. + +By using discrete latent space, VQ-VAE bypasses the 'posterior collapse' mode seen in traditional VAE. Posterior collapse is when latent space is not utilized properly and collapses to similar vectors independent of input, thereby resulting in not many variations when generating outputs. + +Encoder, Decoder weights are trained along with L2 updates of embedding vectors. A categorical distribution is assumed of these latent embeddings and to truly capture the distribution of these vectors, these latents are further trained using PixelCNN model. + +In the original paper, PixelCNN has shown to capture the distribution of data while also delivering rich detailing in generated output images. In the image space, PixelCNN decoder reconstructs a given input image with varying visual aspects such as colors, angles, lightning etc. This is achieved through autoregressive training with the help of masked convolutions. Auto regressive training coupled with categorical distribution sampling at the end of the pipeline facilitates PixelCNN to be an effective generative model. + +A point to be noted here is that the prior of VQ-VAE is trained in latent space rather than image space through PixelCNN. So, it doesn't replace decoder as discussed in the original paper, rather trained independently to reconstruct the latent space. So, the first question that comes to my mind - How does latent reconstruction help in image generation? Is prior training required at all? What happens if not done? + +My findings on MNIST data shows that trained prior works well only with a right sampling layer(tfp.layers.DistrubutionalLambda), that helps with uncertainty estimation. Therefore, PixelCNN autoregressive capabilities are as important as defining a distribution layer on top of them. Apart from this, I've also been researching and collating different MRI datasets to work on in the future. + +What Is coming up next week +~~~~~~~~~~~~~~~~~~~~~~~~~~~ +My work for next week includes checking insights on CIFAR dataset, brushing up on Diffusion Models. + +Did I get stuck anywhere +~~~~~~~~~~~~~~~~~~~~~~~~ +Working with VQ-VAE code required digging in a little bit before drawing conclusions on results obtained. I reached out to the author of the Keras implementation blog to verify a couple of things. And conducted couple more experiments than estimated and presented the same work at the weekly meeting. + diff --git a/dipy.org/pull/66/_sources/posts/2023/2023_06_12_vara_week3.rst.txt b/dipy.org/pull/66/_sources/posts/2023/2023_06_12_vara_week3.rst.txt new file mode 100644 index 0000000..9baf378 --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2023/2023_06_12_vara_week3.rst.txt @@ -0,0 +1,35 @@ +VQ-VAE results and study on Diffusion models : Week 3 +===================================================== + +.. post:: June 12, 2023 + :author: Vara Lakshmi Bayanagari + :tags: google + :category: gsoc + +What I did this week +~~~~~~~~~~~~~~~~~~~~ + +I continued my experiments with VQ-VAE on MNIST data to see the efficacy of the Prior training in the generated outputs. The output of the encoder for every input image delivers a categorical index of a latent vector for every pixel in the output. As discussed in the `previous blog post `_, the Prior has been trained separately using PixelCNN (without any conditioning) in the latent space. + +If PixelCNN is a bunch of convolutions, then what makes it a generative model? This is an important question to ask and the answer to it is the sampling layer used on pixelCNN outputs during inference. + +The official code in Keras uses a ``tfp.layers.DistributionLambda(tfp.distributions.Categorical)`` layer as its sampling layer. Without this sampling layer PixelCNN outputs are deterministic and collapse to single output. Also similarly, sampling layer alone, i.e., without any PixelCNN trained Prior, on the predetermined outputs of the encoder is deterministic. This is due to the fact that latent distances are correctly estimated by the pre-trained encoder and during inference categorical sampling layer would always sample the least distance latent, i.e., the one closest to the input. + +Therefore, the autoregressive nature of PixelCNN combined with a sampling layer for every pixel delivers an effective generative model. The outputs for all my experiments are shown in the image below - + +.. image:: /_static/images/gsoc/2023/vara/vq-vae-results.png + :width: 800 + +Based on qualitative analysis, PixelCNN outputs may require some extra work. This leads me to the next step in my research - to explore Diffusion models. The first breakthrough paper on Diffusion models is by DDPM - Denoising Diffusion Probabilistic models. Inspired by previous work on nonequilibrium thermodynamics, they show that training diffusion models while maximizing the posterior likelihood in an image generation task is mathematically equivalent to denoising score matching. + +In simple terms, there are two processes in diffusion modelling - forward & reverse. Forward process iteratively produces noisy images using noise schedulers. This can be reduced to one step noisy image through reparametrization technique. In the reverse process, a U-Net is trained to estimate the noise in the final noisy image. + +During inference/sampling, noise is iteratively estimated and removed from a random noisy image to generate a new unseen image. The L2 loss used to estimate the noise during training is mathematically equivalent to maximizing the posterior likelihood i.e., maximizing the distribution of final denoised image. You can find more details in `this `_ paper. + +`Stable Diffusion `_ paper moves the needle by making the diffusion model more accessible, scalable and trainable using a single Nvidia A100 GPU. Earlier diffusion models were difficult to train, requiring 100s of training days, instability issues and restricted to image modality. Stable Diffusion achieved training stability with conditioning on multimodal data by working in latent space. A pre-trained image encoder such as VQ-VAE is used to downsample and extract imperceptible details of an input image. These latents are used to train Diffusion model discussed above. Doing so separates the notion of perceptual compression and generative nature of the whole network. Later the denoised latents can be passed through a VQ-VAE trained decoder to reconstruct images in pixel space. This results in a less complex model, faster training and high quality generative samples. + + +What is coming up next week +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Setting up of Big Red 200 HPC account. Training Diffusion model using MNIST latent from VQ-VAE in tensorflow without any conditioning. diff --git a/dipy.org/pull/66/_sources/posts/2023/2023_06_13_Shipi_week3.rst.txt b/dipy.org/pull/66/_sources/posts/2023/2023_06_13_Shipi_week3.rst.txt new file mode 100644 index 0000000..687d15a --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2023/2023_06_13_Shipi_week3.rst.txt @@ -0,0 +1,29 @@ +CTI Simulation and QTI tutorial : Week 3 +======================================== + +.. post:: June 13 2023 + :author: Shilpi Prasad + :tags: google + :category: gsoc + +What I did this week: +_____________________ + +This week I worked on finishing the simulations with the appropriate documentation. I also worked on creating a general tutorial for CTI/ QTI as one doesn't already exist for QTI. +The idea behind this general tutorial was that there isn't any tutorial for advanced diffusion encoding. The closest documentation QTI has is `here `_. However, there are several youtube videos. So, in this tutorial we started with simulating qti, and then we make things a little more complex by adding information on CTI as QTI can only handle a single Gradient Table whereas CTI can handle multiple Gradient Tables. +This week I also started by initializing ``cti_tests.py`` file by adding relevant simulations to it. + + +What Is coming up next week: +____________________________ + +I intend on finishing the simulations with appropriate documentation and theory lines. If time permits, I'll resume working on the ``cti.py`` file and it's tests section. +The work on creating simulations is not entirely complete as it requires conversion of all .py files into rst so that it could be generated by sphinx. I also intend on making the tutorial more complete by maybe adding some more detail to it. +The major thing I intend on working on is the construction of the design matrix for the CTI model. +In the context of diffusion MRI, a design matrix refers to a matrix that encodes the relationship between the data (diffusion-weighted signals) and the model parameters we want to estimate. It is essentially a way to map the model parameters to the observed data. +The design matrix in case of CTI captures more complex tissue microstructure information than either DTI or DKI or QTI. + +Did I get stuck anywhere +________________________ + +No, I didn't get stuck. But implementing btensor value for CTI simulation did require a bit of effort. diff --git a/dipy.org/pull/66/_sources/posts/2023/2023_06_19_Shilpi_week4.rst.txt b/dipy.org/pull/66/_sources/posts/2023/2023_06_19_Shilpi_week4.rst.txt new file mode 100644 index 0000000..17677d0 --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2023/2023_06_19_Shilpi_week4.rst.txt @@ -0,0 +1,29 @@ +Re-Engineering Simulation Codes with the QTI Model and Design Matrix +==================================================================== + +.. post:: Jun 19 2023 + :author: Shilpi Prasad + :tags: google + :category: gsoc + + + +What I did this week +~~~~~~~~~~~~~~~~~~~~ + +I had to change the ``cti_test.py`` file as the signals generated were not exactly correct. I was advised to follow the multiple gaussian signal generation method. While doing this I had to look closely at several already implemented methods and go in depth to understand how those functions were achieving the desired output. +The multiple gaussian signal generation method is preferred because the CTI signal generation closely resembles the multiple gaussian signals. We're using the multiple gaussian signals so that we can have a priori of what to expect from the outcome, if we fit our model to this signal. +I also managed to implement the design matrix for the CTI tensor and managed to save it up in the ``utils.py`` file. The design matrix is a crucial component of the CTI tensor as it represents the relationships between the different variables in our model. By accurately modeling these relationships, we can generate more realistic simulations and gain a deeper understanding of the CTI tensor. +The link of my work: `Here __` + + + +What is coming up next Week +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +This week I'll work on fitting CTI on multiple Gaussian simulations and see if it produces the expected output. And therefore, work on improving it. This may require implementing a bunch of methods for the Fit class. + +Did I get stuck anywhere +~~~~~~~~~~~~~~~~~~~~~~~~ + +No, I didn't get stuck anywhere. diff --git a/dipy.org/pull/66/_sources/posts/2023/2023_06_19_vara_week4.rst.txt b/dipy.org/pull/66/_sources/posts/2023/2023_06_19_vara_week4.rst.txt new file mode 100644 index 0000000..fae7485 --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2023/2023_06_19_vara_week4.rst.txt @@ -0,0 +1,25 @@ +Diffusion research continues: Week 4 +==================================== + + +.. post:: June 19, 2023 + :author: Vara Lakshmi Bayanagari + :tags: google + :category: gsoc + + +What I did this week +~~~~~~~~~~~~~~~~~~~~ + +As discussed last week, I completed researching on StableDiffusion(SD). Currently we're looking for unconditional image reconstruction/denoising/generation using SD. I completed putting together keras implementation of unconditional SD. Since I couldn't find an official implementation of unconditional SD code, I collated the DDPM diffusion model codebase, VQ-VAE codebase separately. + +DDPM code uses Attention based U-Net for noise prediction. The basic code blocks of the U-Net are Downsampling, Middle and Upsampling blocks, where each constitute ResidualBlock & AttentionBlock. ResidualBlock is additionally conditioned on the diffusion timestep, DDPM implements this conditioning by adding diffusion timestep to the input image, whereas DDIM performs a concatenation. + +Downsampling & Upsampling in the U-Net are performed 4 times with decreasing & increasing widths respectively. Each downsampling layer consists of two ResidualBlocks, an optional AttentionBlock and a convolutional downsampling(stride=2) layer. At each upsampling layer, there's a concatenation from the respective downsampling layer, three ResidualBlocks, an optional AttentionBlock, ``keras.layers.Upsampling2D`` and a Conv2D layers. The Middle block consists of two ResidualBlocks with an AttentionBlock in between, resulting in no change in the output size. The final output of the Upsampling block is followed by a GroupNormalization layer, Swish Activation layer and Conv2D layer to provide an output with desired dimensions. + +Due to personal reasons, I took a couple of days off this week and will be continuing the rest of the work next week. + +What Is coming up next week +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +I will be running experiments on CIFAR10 on SD, NFBS on 3D VQ-VAE. diff --git a/dipy.org/pull/66/_sources/posts/2023/2023_06_26_vara_week5.rst.txt b/dipy.org/pull/66/_sources/posts/2023/2023_06_26_vara_week5.rst.txt new file mode 100644 index 0000000..79c5e0d --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2023/2023_06_26_vara_week5.rst.txt @@ -0,0 +1,52 @@ +Carbonate Account Setup, Experiment, Debug and Repeat: Week 5 +============================================================= + + +.. post:: June 26, 2023 + :author: Vara Lakshmi Bayanagari + :tags: google + :category: gsoc + + + + +What I did this week +~~~~~~~~~~~~~~~~~~~~ + + +I finally got my hands on IU's HPC - Carbonate & Big Red 200. I quickly set up a virtual remote connection to Carbonate's Slate on VS Code with Jong's help. Later, I started looking up on Interactive jobs on Carbonate to have GPUs on the go for coding and testing. I spent a ton of time reading up on Carbonate's Interactive SLURM jobs information. Using X11 forwarding, I was able to spin up an interactive job inside the login node using command prompt. It popped up a Firefox browser window from the login node ending up slow and not very user friendly. Same goes for the Big Red 200 as well. Eventually my efforts were in vain and I resorted to installing a jupyter notebook server on my home directory. Although I can't request a GPU with this notebook, it allows me to debug syntax errors, output visualization, plotting loss values etc. + + +Continuing on my MNIST experiments, I ran into Multi Distribution issues while training the unconditional Diffusion Model(DM). Without getting into too many details I can summarize that having a custom train_step function in tensorflow, without any default loss reduction such as *tf.reduce_mean* or *tf.keras.losses.Reduction.SUM*, requires more work than *model.fit()*. So, my current loss function used for training DM is reduced on the last channel while the rest of the shape of each batch is kept intact. When using distributed training, tensorflow requires the user to take care of gradient accumulation if it's an unreduced loss. So, I tried to learn from Tensorflow tutorials. Alas, all their multi distributed strategy examples were based on functional API models whereas my approach is based on object oriented implementation. This led to design issues. For the sake of time management, I did a little bit of tweaking. While compiling the model under *tf.distribute.MirroredStrategy*, I passed *tf.keras.losses.Reduction.SUM* parameter to the loss function and divided the loss by a pre-decided factor which is *np.prod(out.shape[:-1])* i.e., number of elements in the output shape excluding the last channel which is reduced in the loss function. This tweak worked and also does not have any unexpected impacts on the architecture as well as the training paradigm. + + +I followed the architecture described in my previous blog for the DM. I trained this on VQ-VAE latents of MNIST dataset for 200 diffusion steps, 2 Nvidia V100 GPUs, Adam Optimizer with 2e-4 learning rate, 200 batch size per GPU for 100+ epochs. For the generative process, I denoised random samples for 50, 100 and 200 steps on the best performing model(112 epochs). Here are the results I achieved - + + +.. image:: /_static/images/gsoc/2023/vara/DM-MNIST-112epoch.png + :width: 800 + + +We see some resemblance of digit shapes in the generated outputs. On further training for 300 diffusion timesteps for the best performing model( 108 epochs) with least training loss, the visuals have improved drastically - + + +.. image:: /_static/images/gsoc/2023/vara/DM-MNIST-DDIM300-108epoch.png + :width: 800 + + +These outputs show the effectiveness of the model architecture, training parameters and the codebase. + + +What Is coming up next week +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +Work on T1 weighted MRI datasets on modified 3D conv code. Hyperparameter tuning for the best results. If time permits, work on the FID evaluation metric. + + +Did I get stuck anywhere +~~~~~~~~~~~~~~~~~~~~~~~~ + + +Most of the work conducted this week included setting up the environment, debugging, researching documentation. For the rest of the little time, I ran experiments. Having the code ready, both VQ-VAE and DM, before I got hold of GPUs, helped me save a lot of time. This week's work imparted a great learning experience for me. + diff --git a/dipy.org/pull/66/_sources/posts/2023/2023_07_06_shilpi_week5.rst.txt b/dipy.org/pull/66/_sources/posts/2023/2023_07_06_shilpi_week5.rst.txt new file mode 100644 index 0000000..8ae2c60 --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2023/2023_07_06_shilpi_week5.rst.txt @@ -0,0 +1,22 @@ +Design Matrix Implementation and Coding with PEP8: Week 5 +========================================================= + +.. post:: July 06 2023 + :author: Shilpi Prasad + :tags: google + :category: gsoc + +What I did this Week +~~~~~~~~~~~~~~~~~~~~ + +This week, my work focused on two main areas: improving the design matrix and implementing methods under the Fit class in CTI. +For the design matrix improvement, I noticed that the design matrix I had previously created was not according to PEP8 standards. After some effort, I managed to modify it to comply with the appropriate format. +This week, my time was mostly consumed by implementing methods under the Fit class in CTI. As CTI is an extension of DKI and shares similarities with the QTI model, I had to look into methods already implemented in DKI and QTI. My approach involved going through these two different modules, comparing the methods, and making notes on which ones would need to be implemented in CTI. This was challenging, as CTI's design matrix is significantly different. +Although this implementation is not completely done, I was able to learn a lot. + +What is coming up next Week +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +This week I intend to further implement the Fit class and also generate tests for the already implemented methods under the Fit class. +And also write tests to make sure that the signals generated in the QTI model are the same as the ones done in ``CTI_pred``. +I also intend on changing the order of parameters of covariance tensor as CTI has a lot of similarities with the QTI module, and in order to use QTI methods, we need to make sure that the order of parameters under covariance tensor in QTI is same as order of parameters in CTI. diff --git a/dipy.org/pull/66/_sources/posts/2023/2023_07_06_shilpi_week6.rst.txt b/dipy.org/pull/66/_sources/posts/2023/2023_07_06_shilpi_week6.rst.txt new file mode 100644 index 0000000..5acc2d8 --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2023/2023_07_06_shilpi_week6.rst.txt @@ -0,0 +1,31 @@ +Creating signal_predict Method: Testing Signal Generation +========================================================= + +.. post:: July 06 2023 + :author: Shilpi Prasad + :tags: google + :category: gsoc + + +What I did this week +~~~~~~~~~~~~~~~~~~~~ + +This week, I worked together with my mentor to come up with a new way of arranging the elements of the design matrix. So, first I rearranged all the parameters of the covariance parameters so that they'd match with the ones in QTI. So now, the order is: the diffusion tensor, the covariance tensor, and then the kurtosis tensors. But then we decided that it would be better to put the kurtosis tensors first because then we wouldn't have to re-implement all the kurtosis methods again. So, I changed the order of kurtosis and the covariance tensors. + +Also, in order to maintain the coding standards of the previously implemented models, we decided that the diffusion tensor should be divided into evals and evecs. + +Therefore, because of all these changes I had to re-implement a lot of already implemented functions in CTI which also required changing the description of those functions and not only the code. + +But my major time went towards writing tests for these modified codes. While writing codes, I realized that a lot of the functions needed to be modified a bit. Also, I had to import several new libraries in order for the functions to work. + +What Is coming up next week +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +The testing part for the implemented method is not yet done correctly, as the signals don't yet match the expected output. So, I intend on re-implementing them by taking into consideration the suggestions provided by my mentor. This would require modifying code of some already implemented functions as well as re-writing the tests, particularly the generation of the eigenvalues and the eigen vectors. + + +Did I get stuck anywhere +~~~~~~~~~~~~~~~~~~~~~~~~ + +I didn't exactly get stuck, but implementing the tests requires you to make sure that the shape of the tensors you're passing into a function is correct and is as expected. This took me a while to figure out. + diff --git a/dipy.org/pull/66/_sources/posts/2023/2023_07_10_vara_week6_and_week7.rst.txt b/dipy.org/pull/66/_sources/posts/2023/2023_07_10_vara_week6_and_week7.rst.txt new file mode 100644 index 0000000..9ae7562 --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2023/2023_07_10_vara_week6_and_week7.rst.txt @@ -0,0 +1,61 @@ +Diffusion Model results on pre-trained VQVAE latents of NFBS MRI Dataset: Week 6 & Week 7 +========================================================================================= + + +.. post:: July 10, 2023 + :author: Vara Lakshmi Bayanagari + :tags: google + :category: gsoc + + + + +What I did this week +~~~~~~~~~~~~~~~~~~~~ + + +My current code for VQVAE & DM is well tested on MNIST dataset as shown in the previous blog posts. I extended the current codebase for MRI dataset by using 3D convolutions instead of 2D ones, which resulted in 600k parameters for VQVAE for a downsampling factor f=3. I used a preprocess function to transform MRI volumes to the desired shape (128,128,128,1) through DIPY's reslice and scipy's affine_transform functions, followed by MinMax normalization. I trained the VQVAE architecture for batch_size=10, Adam optimizer's lr=2e-4, 100 epochs. I followed suit for downsampling factor f=2 as well and got the following training curves- + + +.. image:: /_static/images/gsoc/2023/vara/vqvae3d-training-curves.png + :width: 800 + + +The reconstructed brain volumes on the test dataset on the best performing model are as shown below. As seen in the first image, there are black artifacts in the captured blurry brain structure. Whereas the second image(f=2) does a better job in producing less blurrier brain structure. Nonetheless we only see the outline of the brain being captured with no micro-structural information inside them. + + +.. image:: /_static/images/gsoc/2023/vara/vqvae3d-reconst-f3.png + :width: 800 + + +.. image:: /_static/images/gsoc/2023/vara/vqvae3d-reconst-f2.png + :width: 800 + + +Later, the 3D Diffusion Model was trained for approximately 200 epochs for 200 & 300 diffusion time steps in two different experiments respectively. The training curves and obtained generations are shown respectively. Both the generations are noisy and don't really have a convincing outlook. + + +.. image:: /_static/images/gsoc/2023/vara/dm3d-training-curves.png + :width: 800 + + +.. image:: /_static/images/gsoc/2023/vara/dm3d-reconst-D200-D300.png + :width: 800 + + +Given the achieved noisy generations, I decided to train VQVAE for a higher number of epochs. This may also indicate that the performance of DM is hitched on good latent representations i.e., a trained encoder capable of perfect reconstructions. So I trained f=3 VQVAE for a higher number of epochs as shown below. + + +.. image:: /_static/images/gsoc/2023/vara/vqvae-f3-higher-epochs.png + :width: 800 + + +The reconstructions obtained on best VQVAE seemed to have produced a better volumetric brain structure. Although, a common theme between all reconstructions is that we see a pixelated output for the last few slices with a checkerboard sort of artifacts. Anyhow, I ran a couple more experiments with a more complex VQVAE model that has residual blocks to carry forward information. None of the reconstructions nor the DM generations have made any progress qualitatively. + + +What Is coming up next week +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +One idea can be working to improve VQVAE's effectiveness by playing around with architecture components and hyper-parameter tuning. Alongside I can also work on looking into checkerboard artifacts seen in the reconstructions. + diff --git a/dipy.org/pull/66/_sources/posts/2023/2023_07_12_Shilpi_week7.rst.txt b/dipy.org/pull/66/_sources/posts/2023/2023_07_12_Shilpi_week7.rst.txt new file mode 100644 index 0000000..c7add33 --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2023/2023_07_12_Shilpi_week7.rst.txt @@ -0,0 +1,29 @@ +Modifying Test Signal Generation +================================ + +.. post:: July 12 2023 + :author: Shilpi Prasad + :tags: google + :category: gsoc + + +What I did this week +~~~~~~~~~~~~~~~~~~~~ + +One of the tasks I did this week was modify the ``cti_design_matrix`` again, as asked by my mentor to make the code more readable. The initial code was following pep8 standard but it wasn't very easy to read, but now it is. +Also, I realized that the main reason my signals weren't matching the ground truth values before at all was because the eigenvalues and eigenvectors of the diffusion tensor distribution were wrong. This was because, before I tried getting D_flat by doing: ``np.squeeze(from_3x3_to_6x1(D))`` which returned a tensor of shape ( 6, ). But in this case, it returned the diffusion tensor elements in the order : Dxx, Dyy, Dzz and so on which isn't the correct format of input expected for the "from_lower_triangular" function. So, initially, we were doing : ``evals, evecs = decompose_tensor(from_lower_triangular(D_flat))`` where the from_lower_triangular function is returning a tensor of shape: (3,3). But then I realized that rather than calculating D_flat, we can simply do: ``evals, evecs = decompose_tensor(D_flat)``. Following this approach gave the correct value of "evals and evecs". So, now we have the correct values of "evals and evecs" which made the signals come closer to the ground truth signals, but we still don't have the signals completely matching the ground truth signals. +Another problem we realized was that while passing "C", covariance tensor parameters, we needed to make sure that we were passing the modified C parameters, that is "ccti". This again helped in bringing the signals to the expected values. +So, after talking things through with my mentor, and analyzing the QTI paper, we came to a few conclusions which could be done to improve the signal values. + +What is coming up next week +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +We realized that there might be a slight probability that there is a typo somewhere in the actual implementation of the QTI signals. So we decided to contact the original author and code implementer of QTI. +Also, one important thing I intend on doing is check the papers, and see the conversions are being done correctly, that is we are taking into consideration the (root2) factor which is present for some covariance tensor elements in the original paper. This is because, for the isotropic case we observe that the signals are matching perfectly because in the isotropic case all the (root2) parameters of the original covariance elements are zero. +Another thing that I intend on doing is to create a new test method which will have some similarities to the test method in dki_tests. + +Did I get stuck anywhere +~~~~~~~~~~~~~~~~~~~~~~~~ + +I didn't get stuck anywhere, but trying to figure out the problem with the current signal generation did take some time and required looking into the research papers. + diff --git a/dipy.org/pull/66/_sources/posts/2023/2023_07_24_vara_week_8_9.rst.txt b/dipy.org/pull/66/_sources/posts/2023/2023_07_24_vara_week_8_9.rst.txt new file mode 100644 index 0000000..31bebbb --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2023/2023_07_24_vara_week_8_9.rst.txt @@ -0,0 +1,74 @@ +VQVAE MONAI models & checkerboard artifacts: Week 8 & Week 9 +============================================================ + +.. post:: July 24, 2023 + :author: Vara Lakshmi Bayanagari + :tags: google + :category: gsoc + + +What I did this week +~~~~~~~~~~~~~~~~~~~~ + +We observed in our previous results that the Diffusion Model's performance may depend on better and effective latents from VQVAE. After playing around with convolutional & residual components in the existing architecture that yielded unsatisfactory results, we decided to move to a more proven model on 3D MRIs. It is not necessary that a model that worked well on MNIST dataset would also deliver similarly on 3D MRI datasets, owing to the differences in complexity of the data distributions. Changing the convolutions to 3D filters alone clearly did not do the job. + + +MONAI is an open source organization for Machine Learning in Medical Imaging, it has repositories and tutorials for various high performing networks tested on multiple Medical Image datasets. We adopted the deep learning architecture for VQVAE from MONAI's PyTorch implementation that was trained & tested on BRATS(400 data elements). The predominant difference is that the encoder & the decoder of VQVAE use Residual units differently than our existing setup. These Residual units are alternated between downsampling/upsampling convolutions in the encoder/decoder. Additionally, MONAI's VectorQuantizer uses non-trainable embeddings with statistical updates(Laplace Smoothing) on them at every iteration. + + +I implemented MONAI's VQVAE architecture in Tensorflow from scratch, excluding the VectorQuantizer. This architecture has 46.5M trainable parameters. The training objective is to minimize the sum of reconstruction & quantization loss - same training paradigm as our previous experiments. In addition, to address the checkerboard artifacts, I referred to the `Sub-Pixel Convolution paper `_. + +This paper proposes two methods to overcome the deconvolution overlap, a phenomenon that causes checkerboarded outputs in deconvolution/upsampling layers. These two methods are - Sub Pixel Convolution & NN Resize Convolution. + +For an upsampling rate :math:`r`, Sub Pixel Convolution outputs :math:`3r^2` output channels & later reshuffles channel dimension along spatial dimensions (upsamples them by :math:`r` across each) resulting in 3(desired) output channels. + +Whereas NN Resize performs interpolation on the kernel to upsample its size by :math:`r` before carrying out convolution that outputs 3 channels. The former method relies on shuffling & the later method relies on nearest neighbor interpolation to obtain an upsampled output respectively. + +Both methods have shown to perform better qualitatively in dealing with the checkerboards, on random initialization. The authors also go ahead and prove mathematically that with an efficient initialization, both methods prove to be equivalent. They call it the ICNR initialization - Initialization of Convolution with NN Resize. + + +I ran multiple experiments with batch_size=5,10,10(with ICNR). The training loss curves obtained are as follows, all of them trained on 1 GPU for 24hrs. We see that all of them converge except the last one(B=10 with ICNR). + + +.. image:: /_static/images/gsoc/2023/vara/vqvae3d-monai-training.png + :width: 800 + + +The best training checkpoint has been used to reconstruct test images. Following images depict 2 such reconstructions in 2 rows, where 5 slices from each of these reconstructions have been displayed in columns. + + +The first one is for B=10, the best training checkpoint had training loss=0.0037. Compared to our previous VQVAE model, we see a better performance in capturing the brain outer structure. Moreover, we don't see white blobs or artifacts as inner matter, rather some curvatures contributing to the inner microstructure of a human brain. + + +.. image:: /_static/images/gsoc/2023/vara/vqvae3d-monai-B10.png + :width: 800 + + +The second one is for B=10 with ICNR kernel initialization, the best training checkpoint had training loss=0.0067. Although, the test results do not look complete. I implemented ICNR through DIPY's resize function to achieve NN resize equivalent output on the kernel filters. This initialization didn't work as it was intended to, further proving that the training is yet to be converged. + + +.. image:: /_static/images/gsoc/2023/vara/vqvae3d-monai-B10-ICNR.png + :width: 800 + + +The next & last image is for B=5, the best training checkpoint had training loss = 0.0031. By far the best one quantitatively as well as visually. The test loss for the below reconstructions is 0.0013. The superior performance of this batch size can be owed to the Batch Normalization(BN) layers in the architecture that calculate mean & average of the batch to perform normalization over all batch elements using these statistics. Having lesser batch size may contribute to least variation in the output of the layer & helps in achieving converging outputs faster. This explanation stems from the concept of Contrastive Learning, where BN layers are used as the source of implicit negative loss learners. Higher the batch size, more implicit negative samples to move away from. Whereas our objective is to minimize the reconstruction loss, having lesser batch size consequently may help in lesser variation. + +.. image:: /_static/images/gsoc/2023/vara/vqvae3d-monai-B5.png + :width: 800 + + +What is coming up next week +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +As the next step, I can focus on training the LDM(Latent Diffusion Model) from the best performing model from the above experiments. + + +Did I get stuck anywhere +~~~~~~~~~~~~~~~~~~~~~~~~ + +In both weeks, I had issues accessing resources & specifically multiple GPUs. + + + + + diff --git a/dipy.org/pull/66/_sources/posts/2023/2023_07_27_Shilpi_week_8_9.rst.txt b/dipy.org/pull/66/_sources/posts/2023/2023_07_27_Shilpi_week_8_9.rst.txt new file mode 100644 index 0000000..3560d25 --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2023/2023_07_27_Shilpi_week_8_9.rst.txt @@ -0,0 +1,31 @@ +Generating Fit Functions : Week 8 & 9 +===================================== + +.. post:: July 27, 2023 + :author: Shilpi Prasad + :tags: google + :category: gsoc + + +What I did this week +~~~~~~~~~~~~~~~~~~~~ + +This week, I started by figuring out how to run Spyder on Ubuntu. After resolving technical problems, I needed to ensure I could edit code to meet pep8 standards but the automatic formatting of code wasn't working. I made changes in the ``utils.py`` file to increase the design matrix readability and fixed a typo in the ``B[:,3]`` and ``B[:, 4]`` diffusion tensor elements. This is because we realized that the sign needed to be negative to show that it's representing a signal decay. +I implemented mapping all of the covariance parameters from paper to its actual code, creating a need to talk to the original paper's authors as the conversion shown in the paper didn't quite match its implementation. +I also worked on matching the ground truth signal values in case of anisotropic and combined DTDs. This is because the isotropic DTD signals that were being generated matched exactly the QTI signals, as in case of isotropic we've 6 non zero elements, and the rest are 0s. However in anisotropic case we had more non-zero covariance parameters (9 non-zero), similarly as in the case of combined DTD. So we figured out that the non-zero elements are being multiplied to some value which isn't correct and that this needs modifying the ``ccti`` conversion. +So, I worked on reading more about voigt notation, as the QTI parameters were implemented using that notation. +Then we looked again into the QTI paper, and felt the need to contact their author and code implementer and realized that the coding was done while keeping in mind the voigt notation conversion as well as some other factors. At the end of this we figured out the correct conversion of the ``ccti`` parameters. We noticed that some factors needed the (root2) division, while some others needed (2). Therefore, we were able to successfully figure out the correct factors that needed to be multiplied/ divided to each of the covariance parameters. +And hence, now the signal values of all the DTDs match as expected. +Then the other major ongoing task this week has been the implementation of the Fit class in CTI. This required me to implement some functions which might've been implemented in DKI/ QTI. This is an ongoing task and would require more work. + +Things coming up next week +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +After the matching of the correct signals which matched the ground truth values, we realized that a DTD with more non-zero covariance parameters might make the ``ccti`` conversion more robust while taking all cases into consideration. So, we created a DTD with mevals, its angles and the fractions. However the signals didn't match exactly. But rather than being stuck on this case, we decided to move forward for the time being. So, I'll work on making sure that all the ground truth signals match. +But the more important work I will be doing this coming week would be to implement all the required functions in the ``ccti`` module such as the different sources of kurtosis which hasn't been implemented before as this is one of the differentiating factors of CTI. And then hopefully move on to generating the tests for these functions. + +Did i get stuck this week +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Not at all. Some things were kind of vexing, but I didn't get stuck as there was always something else that could be done. + diff --git a/dipy.org/pull/66/_sources/posts/2023/2023_07_31_Shilpi_week10.rst.txt b/dipy.org/pull/66/_sources/posts/2023/2023_07_31_Shilpi_week10.rst.txt new file mode 100644 index 0000000..3027026 --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2023/2023_07_31_Shilpi_week10.rst.txt @@ -0,0 +1,41 @@ +Adding Tests : Week 10 +====================== + +.. post:: July 31, 2023 + :author: Shilpi Prasad + :tags: google + :category: gsoc + + +What I did this week +~~~~~~~~~~~~~~~~~~~~ + +Last week, we decided to generate a DTD to make the model more robust. This decision accounted for situations where almost all the parameters were non-zero. However, the signals weren't matching exactly in that situation. This week, I fixed that issue. We can now safely say that all DTDs will match the ground truth signals, regardless of which parameters are non-zero or what changes we make. We accomplished this by figuring out the correct order of ccti parameters. These are covariance parameters that take the root2 and '2' factor into consideration. + +I also removed the mean, axial, radial, and kurtosis functions from our CTI Fit method. I realized that we're already importing all the necessary features by importing the DKI Fit class. I still need to write tests for these methods to ensure that everything is working properly. + +The most important task this week has been the implementation of the ``ls_fit_cti``, ``cls_fit_cti``, and ``split_cti_params`` functions. These are vital in the CTI Fit class. I also wrote tests for this class. This required understanding previously implemented similar functions and the parameters they needed. One example is ``data``, which can basically be interpreted as the ``cti_pred_signals``. +The importance of writing tests is to see if given the signals, ``gtab1`` and ``gtab2`` can we obtain the cti parameters correctly or not. So, I'm still analyzing all the functions which have been implemented in DKI, and the tests that have been implemented for them. +So, the important testing function in order to be able to is the ``test_fit`` function in CTI. + +I also implemented the ``test_split_cti_params`` and ``test_cti_fits`` in the ``test_cti.py`` file. In CTI, we must implement tests for radial, axial, anisotropic, and mean kurtosis, even though we're not implementing them ourselves. To write these tests, I referred to the ground truth values when calling the functions outside the Fit class. I implemented tests accordingly. +Note that in DKI, we only needed multiple b-values. In CTI, we also need symmetric ``b1`` and ``b2s`` and also parallel and perpendicular experiments. In CTI, we consider not only the different components of kurtosis but also the sources of kurtosis. The three sources of kurtosis that we identify in CTI is isotropic kurtosis, anisotropic kurtosis, and microscopic kurtosis in CTI. I looked through the original CTI paper's formulas and implemented functions for these. + +Things to do next week +~~~~~~~~~~~~~~~~~~~~~~ + +Even though I've implemented the function which could calculate the sources of kurtosis, it's still outside the CTI Fit class, and needs to be incorporated in the Fit class. I also need to write tests for these functions. In order to do this, I intend on referring to the QTI paper to look for similarities. + +Also, even though I've written tests for the ``ls_fit_cti`` and ``cls_fit_cti`` function, I still need to run these tests and make sure that it's working as expected. + +If I get stuck, I plan to write a tutorial for the CTI implementation. + +I also need to make sure that my model runs in case of given multi-voxels. A voxel in a 3D image which can be thought of as similar to a pixel in a 2D image in that both represent a discrete element of the image data. In DIPY, a voxel model refers to a representation of a 3D image volume as a collection of individual voxels, each of which represents a small volume element within the image. +After I write tests for multi-voxel cases, and if my model is working on everything, then I'll move on to running my model on real-time data. + +If I get stuck anywhere, then I intend on writing a tutorial for the CTI implementation. + +Did I get stuck +~~~~~~~~~~~~~~~ + +No, I didn't. diff --git a/dipy.org/pull/66/_sources/posts/2023/2023_08_07_vara_week_10_11.rst.txt b/dipy.org/pull/66/_sources/posts/2023/2023_08_07_vara_week_10_11.rst.txt new file mode 100644 index 0000000..5bf4822 --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2023/2023_08_07_vara_week_10_11.rst.txt @@ -0,0 +1,34 @@ +Carbonate issues, GPU availability, Tensorflow errors: Week 10 & Week 11 +======================================================================== + +.. post:: August 7, 2023 + :author: Vara Lakshmi Bayanagari + :tags: google + :category: gsoc + + +What I did this week +~~~~~~~~~~~~~~~~~~~~ + +Recently, I've been an assigned RP(Research Project) account on University of Bloomington's HPC cluster - Carbonate. This account lets me access multiple GPUs for my experiments in a dedicated account. + +Once I started configuring my sbatch file accordingly, I started running into issues like GPU access. My debug print statements revealed that I'm accessing 1 CPU despite configuring the sbatch job for more than 1 GPUs. I double checked my dataloader definition, DistributionStrategy, train function. I read through IU's blogs as well as other resources online to see if I'm missing something. + +Nothing worked, my mentor eventually asked me to raise a IT request on Carbonate, the IT personnel couldn't help either. This could only mean that Tensorflow is picking upon assigned GPUs. So, on my mentor's suggestion, I loaded an older version of the deeplearning module 2.9.1(used 2.11.1 earlier). This worked! + +This also meant using a downgraded version of tensorflow(2.9). This meant I ran into errors again, time taking yet resolvable. I made some architectural changes - replaced GroupNorm with BatchNorm layers, tensor_slices based DataLoader to DataGenerator - to accommodate for the older tensorflow version. Additionally, I also had to change the model structure from a list of layers to ``tensorflow.keras.Sequential`` set of layers with input_shape information defined in the first layer. Without this last change, I ran into ``None`` object errors. + +Once all my new code was in place, the week ended, hahahah. And also GPU's were in scarcity in the same week. I'm glad I got some work done though. + + +What Is coming up next week +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Run more experiments! + + +Did I get stuck anywhere +~~~~~~~~~~~~~~~~~~~~~~~~ + +All I did was get stuck again & again :P +But all is well now. diff --git a/dipy.org/pull/66/_sources/posts/2023/2023_08_08_Shilpi_week11.rst.txt b/dipy.org/pull/66/_sources/posts/2023/2023_08_08_Shilpi_week11.rst.txt new file mode 100644 index 0000000..5602feb --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2023/2023_08_08_Shilpi_week11.rst.txt @@ -0,0 +1,30 @@ +Making the Tests Work : Week 11 +=============================== + +.. post:: August 08, 2023 + :author: Shilpi Prasad + :tags: google + :category: gsoc + +What I did this week +~~~~~~~~~~~~~~~~~~~~ + +Previously, I had the function for different sources of kurtosis outside the Fit class. Upon suggestion from my mentor, this week I put them inside the Fit class. This required me to make changes to how certain variables were being called inside those functions. I also had to determine what arguments needed to be passed to those functions. + +I added documentation to those functions, including their basic information and the math involved. I wrote tests for them inside the ``test_cti_fits`` function. To implement this, I took help from ``QTI.py`` as our ``K_aniso`` resembles the QTI ``k_shear`` function, and ``K_iso`` resembles the QTI ``k_bulk`` function. Taking these points into consideration, my job of writing tests was made easier. While implementing the tests, I had to keep in mind the conversion factors. Every variable involving conversion ``from_a_6x6_to_21x1`` matrix required its own conversion function, such as ``k_shear``, ``k_bulk``, etc. + +I also implemented tests for the mean, axial, radial, and apparent kurtosis functions already present in the CTI Fit class. These measures, derived from the diffusion kurtosis tensor, provide different perspectives on the microstructural complexity of the tissue. +This week, I was also given real-time data to construct ``bvals``, ``bvecs`` with files that had extensions ``.bvals``, ``.bvecs``. I was able to run these files successfully on my computer. I ensured that the ``gtab`` created from these files gave the expected signals. + +Majorly this week, I worked on modifying the tests of the functions as required and tried to make them work. However, I realized that the 'fit' function inside the model class was not implemented properly. Hence, the test functions were not able to work as expected. +I also started with the tutorial for CTI. + +What is coming up next week +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +This week, I'll try to make sure that the test functions work properly. I'll focus on the function ``ls_fit_cti`` to ensure it's giving the required output. After this function works, I'll move on to implementing the 'fit' function. It will later help in the testing of the Fit class. + +Did I get stuck anywhere +~~~~~~~~~~~~~~~~~~~~~~~~ + +I didn't exactly get stuck, as I kept learning new things while encountering problems in testing the function. When I felt that I could not move forward with one task, I immediately started with other tasks. These included providing documentation to function and getting started with the tutorials. diff --git a/dipy.org/pull/66/_sources/posts/2023/2023_08_15_Shilpi_week12.rst.txt b/dipy.org/pull/66/_sources/posts/2023/2023_08_15_Shilpi_week12.rst.txt new file mode 100644 index 0000000..8a7ab36 --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2023/2023_08_15_Shilpi_week12.rst.txt @@ -0,0 +1,28 @@ +Week12: Making Test Functions Work +================================== + +.. post:: August 15 2023 + :author: Shilpi Prasad + :tags: google + :category: gsoc + + +What I did this week +~~~~~~~~~~~~~~~~~~~~ + +According to last week's situation, I was trying to make the ``test_cti_fits`` function work as well as the ``test_split_cti_params`` function. This week I figured out the problem with these functions and was able to fix it. The major problem was occurring while I was trying to compare the parameters. Firstly, I started by removing all the extra fit methods in ``common_fit_method`` list such as NLS, CLS and CWLS as I realized that we won't immediately be needing an extra ``multi_tensor_fit`` function in CTI. + +I also realized that our fit method needs to accommodate multiple voxels, so I had to add "@multi_voxel_fit" decorator at the top of that method. Major fact that I realized was that the already implemented ``ls_fit_cti`` function was not implemented incorrectly. The cti_return (obtained directly from ``ls_fit_cti``) was not matching cti_params because of the eigen value decomposition. Since the total diffusion tensor in my test was isotropic, when the tensor was decomposed inside the function ``ls_fit_cti``, it created a different eigenvector due to floating point precision. So, I instead tried to compare the more broken down variables, hence I compared the evals from ``cti_params`` to evals of ``cti_return``, and the same for covariance tensor elements and kurtosis tensor elements. And on doing this the variables matched perfectly. + +Also, last time I had the tests and functions for axial, radial, mean and apparent kurtosis which are basically different measures derived from the diffusion kurtosis tensor, and they help in providing different perspectives on the microstructural complexity of the tissue. So, since last time the fit function itself could not be tested, consequently I couldn't run tests for these functions as well. So, after the tests for the fit function were working, I ran tests for these functions and realized some errors. I fixed the errors by having to take care of the implementation part of these functions as well as changing direction assertion to np.allclose as we also need to take care of the floating point precision detail. + +However, for implementing tests for the sources of kurtosis, we'd previously decided that we'd take QTI tests into consideration as ``k_bulk`` appeared similar to ``k_iso`` and ``k_shear`` seemed similar to ``k_aniso``, but upon careful examination we realized that this was not actually the case. This is because in case of QTI, when they're calculating D from self.params, they already have the top 6 elements in voigt notation. We can also observe that when they convert ``from_3x3_to_6x1``, they're multiplying some factors. So, the end point being that "D" in our case and the diffusion tensor "D" in their case is different. And this matters because they're using D to calculate ``k_bulk`` (line 1059 in QTI.py) and we're using it to calculate ``K_iso``. Therefore I couldn't use their tests anymore. So, the only solution now in order to be able to implement tests for the sources of kurtosis is to look at the actual paper and implement their ground truth values. + +This week I also spent time creating a basic draft of the CTI simulation. + +Things to do next week +~~~~~~~~~~~~~~~~~~~~~~ + +This week I intend on making sure that the tests for the sources of kurtosis are implemented properly and they're working as expected. +Also, the real life data when run on our model at the moment is giving some overflow errors. I intend on working towards this as well. +I'm also going to make sure that at the end of this week, I'm able to create a good near finish draft of the CTI simulation. diff --git a/dipy.org/pull/66/_sources/posts/2023/2023_08_21_vara_week_12_13.rst.txt b/dipy.org/pull/66/_sources/posts/2023/2023_08_21_vara_week_12_13.rst.txt new file mode 100644 index 0000000..244a5f8 --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2023/2023_08_21_vara_week_12_13.rst.txt @@ -0,0 +1,82 @@ +Finalized experiments using both datasets: Week 12 & Week13 +============================================================ + +.. post:: August 21 2023 + :author: Vara Lakshmi Bayanagari + :tags: google + :category: gsoc + +What I did this week +~~~~~~~~~~~~~~~~~~~~ + +Monai's VQVAE results on T1-weighted NFBS dataset, 125 samples, for batch size of 5 were qualitatively and quantitatively superior to all previous results. I continued the same experiments on the T1-weighted CC359(Calgary-Campinas-359) public dataset consisting of 359 anatomical MRI volumes of healthy individuals. Preprocessed the data using existing ``transform_img`` function - + + 1. skull-strips the volume using the respective mask + 2. dipy's ``resize`` & scipy's ``affine_transform`` scale the volume to (128,128,128,1) shape & (1,1,1) voxel size + 3. MinMax normalization to limit the range of intensities to (0,1) + +Using existing training parameters, carried out two experiments, one on CC359 alone & another on both datasets combined. Additionally, I made a slight modification in the loss definition by attributing different weights of 0.5 & 1 to background & foreground pixels compared to equal weights from previous experiments. This resulted in faster convergence as shown in the red, blue & purple lines in the combined plot shown below. (Naming convention for each training curve is ``B-``, where CC=CC359, NFBS=NFBS, both=[NFBS,CC359]) + +.. image:: /_static/images/gsoc/2023/vara/vqvae3d-monai-training-plots.png + :alt: Combined trainings plots for all experiments + :width: 800 + +Inference results on the best performing model, B12-both, is shown below, where every two rows correspond to reconstructions & original volumes respectively, with equally spaced slices in each row. These slices visualized are anterior-posterior topdown & ventral-dorsal LR. + +.. image:: /_static/images/gsoc/2023/vara/vqvae-monai-B12-both.png + :alt: VQVAE-Monai-B12-both reconstructions & originals showing equally spaced 5 slices for 2 different test samples + :width: 800 + +Here's a similar visualization of the inference on the next best performing model, B12-CC. + +.. image:: /_static/images/gsoc/2023/vara/vqvae-monai-B12-CC.png + :alt: VQVAE-Monai-B12-CC reconstructions & originals showing equally spaced 5 slices for 2 different test samples + :width: 800 + +This shows that our training not only converged quickly but also improved visually. Here's a comparison of our current best performing model i.e., VQVAE-Monai-B12-both & the previous one on NFBS i.e., VQVAE-Monai-B5-NFBS. The test reconstruction loss is 0.0013 & 0.0015 respectively. + +.. image:: /_static/images/gsoc/2023/vara/vqvae-reconstructions-comparison.png + :alt: VQVAE reconstruction comparison for B12-both & B5-NFBS + :width: 800 + +I also carried out Diffusion Model training on the best performing VQVAE-Monai-B12-both model for 300 & 500 diffusion steps and the training curves obtained are as follows- + +.. image:: /_static/images/gsoc/2023/vara/dm3d-monai-training-curves.png + :alt: Diffusion Model training plots for 300 & 500 diffusion steps + :width: 800 + +These curves seemed to converge pretty quickly but the sampling outputs in the generation pipeline are still pure noise as shown below- + +.. image:: /_static/images/gsoc/2023/vara/dm3d-monai-B8-DM500.png + :alt: Diffusion Model training plots for 300 & 500 diffusion steps + :width: 800 + + +The best performance of KL encoders/VAEs/VQVAEs have been shown to deliver blurry reconstructions on Medical datasets. Despite using a pretty less complex VQVAE model, with only ``num_res_channels=(32, 64)``, we consistently achieved improved reconstruction results with every experiment. From capturing only the brain outer structure through VQAVE-B5-NFBS model to able to capture the volumetric details of the inner microstructure of the brain through VQVAE-Monai-B10-both model, we've stretched the capabilities of the VQVAE model. + + +For future work we should look into two things - debugging Diffusion Model, scaling VQVAE model. + +As a first priority, we could analyze the reason for pure noise output in DM3D generations, this would help us rule out any implementation errors(although I already did that, a second pair of eyes would be helpful). Because MONAI's work shows that even without a stable convergence on their diffusion model they achieved visualizations resembling a brain. Whereas our model despite achieving a stable looking training curve, our generations are not close to being convincing. + +As a second step, we could also try scaling up both VQVAE as well as Diffusion Model in terms of complexity, such as increasing intermediate channel dimensions from 64 to 128 or 256 as done in MONAI's 3D LDM. This hopefully may help us achieve the state-of-art on NFBS & CC359 datasets. + + +What is coming up next week +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Wrapping up documentation & final report + + +Did I get stuck anywhere +~~~~~~~~~~~~~~~~~~~~~~~~ + +Yes, I carried out debugging to understand the generation pipeline of the Diffusion Model. Cross-checked implementations of posterior mean & variance in the code base with respective formulas from the paper, as well as with MONAI's DDPM implementation. Didn't come across any error, yet the generated samples are erroneous. + + + + + + + + diff --git a/dipy.org/pull/66/_sources/posts/2023/2023_08_22_Shilpi_Week13.rst.txt b/dipy.org/pull/66/_sources/posts/2023/2023_08_22_Shilpi_Week13.rst.txt new file mode 100644 index 0000000..dd98454 --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2023/2023_08_22_Shilpi_Week13.rst.txt @@ -0,0 +1,26 @@ +Writing Tests & Making Documentation: Week 13 +============================================= + +.. post:: August 22, 2023 + :author: Shilpi Prasad + :tags: google + :category: gsoc + + +What I did this week +~~~~~~~~~~~~~~~~~~~~ + +This week, I finished writing tests for the sources of kurtosis. While the isotropic source passed the test for the anisotropic DTD, the anisotropic source passed tests for all DTDs. As a result, I integrated the test for the anisotropic source within the ``test_cti_fits`` function, eliminating the need for a separate function. +I also created tests for multi-voxel cases but the tests passed only for single voxel cases. One reason I think this might be happening is because of the way we're accessing the covariance and diffusion tensor elements. I intend to look further into this. +I worked on the real life data, trying to plot maps, but it didn't work out because the problem was really related to the fact that the current kurtosis source implementations do not handle multiple voxel cases. +I worked on real life data, attempting to plot maps. Even though I was not able to get the desired result, I'm sure I'll figure it out with further research and possible collaboration. + +What is coming up next Week +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +- As soon as mapping of the images is done, I'll finish with the documentation of my model. +- I also intend on adding some references to some of the important methods. +- If everything is done early, and time remains then I'll look into finishing the QTI simulation tutorial. +- I also intend on creating a final work report of my project and submit it to the GSoC dashboard. + + diff --git a/dipy.org/pull/66/_sources/posts/2023/2023_08_28_Shilpi_Week14.rst.txt b/dipy.org/pull/66/_sources/posts/2023/2023_08_28_Shilpi_Week14.rst.txt new file mode 100644 index 0000000..7ee8df6 --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2023/2023_08_28_Shilpi_Week14.rst.txt @@ -0,0 +1,28 @@ +Doing Final Touch-Ups: Week 14 +============================== + +.. post:: August 28, 2023 + :author: Shilpi Prasad + :tags: google + :category: gsoc + + +What I did this week +~~~~~~~~~~~~~~~~~~~~ + +This week I fixed the test for the isotropic source of kurtosis so now it's working for all DTDs. I also created tests for the ``K_micro`` function. Initially while running the test, I got some errors which made me look deeper into the actual function. The error was that I was doing sqrt of some elements when actually I was supposed to calculate square of them. Also I was using a '1/5' factor which was not actually required. On fixing these issues, the overall map image of ``K_micro`` improved significantly. +Previously the multi-voxel test case was failing due to different eigenvalues in the isotropic total diffusion tensors simulations. Removing the eigenvalue assertion made the test pass, as verifying the kt, cvt, and evals values sufficed. +I also provided documentation to some functions in the test file such as ``_perpendicular_directions_temp_`` and ``from_qte_to_cti`` etc. +Also had to change the name of some functions to make it sound more relatable to what they were actually doing. +This week I almost finished with the CTI tutorial. The only thing remaining is to create a fetcher for the data so that all the users can download the data and use it. Currently the path given for the data retrieval is of my local system. +I also added some references and overall improved the wording and information in the tutorial. +I also finished up writing my final work report and get it reviewed by my mentors and then updated it. +Finally, before pushing the file onto the main PR, I cleaned up the code by removing all the extra comments and some unnecessary code and overall made sure that the entire code was following the pep8 standard. + +What is coming up Next Week +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Now that almost everything is completed and all the unit tests are passing, I'll just focus on getting the PR merged. +I will also aim to achieve a 100% coverage report, thereby ensuring that every line of code is thoroughly tested. +At present, the merging of my PR is on hold as we're actively seeking a pair of reviewers who can conduct a thorough examination of the entire codebase and provide their expert feedback. +Additionally, I'll try to get the QTI simulation merged as well . diff --git a/dipy.org/pull/66/_sources/posts/2024/2024_01_10_serge_gsoc_announcement.rst.txt b/dipy.org/pull/66/_sources/posts/2024/2024_01_10_serge_gsoc_announcement.rst.txt new file mode 100644 index 0000000..b83e150 --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2024/2024_01_10_serge_gsoc_announcement.rst.txt @@ -0,0 +1,150 @@ +Google Summer of Code 2024 +========================== + +.. post:: January 10 2024 + :author: Serge Koudoro + :tags: google + :category: gsoc announcement + +Introduction to DIPY +==================== + +DIPY is a free and open-source software library for the analysis of 3D/4D+ imaging in Python. It contains generic methods for spatial normalization, signal processing, machine learning, statistical analysis, and visualization of medical images. Additionally, it contains specialized methods for computational anatomy including diffusion, perfusion, and structural imaging. DIPY has many users from computational neuroanatomy and the medical data science field. DIPY is an international project which brings together scientists across labs and countries to share their state-of-the-art code and expertise in the same codebase, accelerating scientific research in medical imaging. DIPY is participating in GSoC this year for the 7th time. + +How to become a part of DIPY's Google Summer of Code 2024 +========================================================= + +GSoC is a program that allows students to learn by contributing to an open-source project while receiving a fellowship from Google, and mentorship from open-source software developers. For details about this year's GSoC, please refer to `this page `_. + +Before considering becoming part of the DIPY GSoC, please read about our expectations. + +All participants should have basic knowledge of scientific computing and development in Python. For a comprehensive introduction to these topics, please refer to the book `Effective Computation in Physics `_ by Katy Huff and Anthony Scopatz. However, you should be already familiar with data analysis using Python and Numpy before applying. + +Be happy to ask questions directly in our: + +- Gitter channel https://gitter.im/dipy/dipy +- Forum https://github.com/dipy/dipy/discussions + +Advice +------ + +Potential candidates should take a look at the guidelines on how to `contribute to DIPY `_. Making a small enhancement/bugfix/documentation fix/etc to DIPY already before applying for the GSoC can help you get some idea how things would work during the GSoC. The fix does not need to be related to your proposal. We have and will continue adding some beginner-friendly issues in Github. You can see some of them `here (beginner-friendly issues) `_ or all the issues `here (all issues) `_. + +Project Ideas +============= + +**Notice 1:** More project ideas might appear **Stay tuned and check regularly this page!** + +**Notice 2:** We want to provide the best mentoring to our students, **only 2 or 3 of these projects will be selected.** Not more! + +If you have any questions or if you want to contact a mentor: + +- `open a new discussion `_ with GSOC as a category. + +Project 1. Add mutual information for non-rigid registration +------------------------------------------------------------ + +**Difficulty:** Beginner + +**Mentors:** Sreekar Chigurupati, Serge Koudoro, Jong Sung Park (contact via github discussion) + +**Description:** + +Mutual information (MI) similarity metric is a metric for registering images that have different modalities or styles. Currently, the metric is available only for affine registration and not nonrigid registration. The student will need to implement, compare and test MI for nonrigid registration. An existing implementation in DIPY of different metrics will provide a guideline. An initial implementation separate from DIPY will be provided as well. This metric is expected to boost registration accuracy. This project will focus on multimodal images. If time is permitted the student can also investigate a tensor based metric for registration. + +**Time:** small (~90 hour projects) or medium (~175 hr projects) + +**Skills required:** Familiarity with registration algorithms, Python and some knowledge in Cython or C/C++. + +Project 2. CUDA non-rigid registration +-------------------------------------- + +**Difficulty:** Intermediate + +**Mentors:** Sreekar Chigurupati, Serge Koudoro, Jong Sung Park (contact via github discussion) + +**Description:** + +Non-rigid registration has a larger degree of freedom than affine registration, hence a more accurate registration output. However, this also means an increase in time complexity. In neuroimaging, it is always beneficial to have a faster preprocessing pipeline. The student will work implementing the current non-rigid registration algorithm in DIPY using CUDA, as part of the gpu acceleration projects of various DIPY algorithms. If time permits, the student can also investigate accelerating different algorithms such as tracking or segmentation. + +**Time:** medium (~175 hr projects) or large (~350 hour) + +**Skills required:** Familiarity with registration algorithms, Python and CUDA + +Project 3. DIPY algorithms Optimizations +---------------------------------------- + +**Difficulty:** Intermediate + +**Mentors:** Serge Koudoro, Jongsung Park (contact via github discussion) + +**Description:** + +Our algorithms performance can be easily be improved via some algorithms trick and auto-vectorization. To realize this, the project will make sure that our current code is "auto-vectorization" friendly for any C compiler. An extra step would be to parallelize via thread and CPU. In addition, an extra GPU component can be added for a full-time project if the student feel comfortable and confident with this technology. For example, we currently have cudipy and GPU streamlines. A unified framework is required. In addition, we will need to parallelize some algorithms such as those used for probabilistic tractography and nonrigid registration. + +**Project Steps:** + + - Step 1: Look at pitfalls of our Registration framework and Denoising framework. + - Step 2: Unroll many loop and simplify several Cython function. + - Step 3: Benchmark performance improvement. + - Step 4: Implementation of multithreading/multiprocessing via openmp or MPI + - Step 5 (optional): GPU study and improvement. Research for new algorithm tricks and implement them + +**Time:** Full-time (350 hours) or Part-time (175 hours) + +**Skills Required:** Python, cython, math, algorithms, optimization, CUDA, openmp, MPI, SIMD, SIMT + +Project 4. Solving Issues in DIPY_HORIZON +----------------------------------------- + +**Difficulty:** Beginner + +**Mentors:** Maharshi Gor, Serge Koudoro, Jong Sung Park (contact via github discussion) + +**Description:** + +Horizon is a highly efficient scientific visualization tool. While DIPY contributors have been upgrading the function to accommodate user feedback and fix bugs, there are still several issues that need to be solved. This project requires the student to modify the python code and work on the issue. It will be a great opportunity for them to get involved in an open-source project and possibly continue to work on such projects. + +**Time:** small (~90 hour projects) + +**Skills required:** Python + +Project 5. Project ideas using AI/ML in Diffusion MRI processing +---------------------------------------------------------------- + +**Difficulty:** Intermediate + +**Mentors:** Jong Sung Park, Sreekar Chigurupati, Serge Koudoro (contact via github discussion) + +**Description:** + +While there are many techniques that provide good results in Diffusion MRI processing, in many cases the algorithm can be a time bottleneck in the pipeline due to the complex equations and following time complexity. This project is to support the research of students who are interested in a AI/ML project that uses diffusion MRI data. Once the project is concluded and if the model is ready, further support will be done for publishing and providing open source code. + +**Time:** medium (~175 hr projects) or large (~350 hour) + +**Skills Required:** Python, Some knowledge with diffusion MRI, experience with AI/ML and corresponding tools (e.g. Tensorflow, Pytorch, etc) + +Project 6. Modernize DIPY Codebase +---------------------------------- + +**Difficulty:** Beginner + +**Mentors:** Serge Koudoro, Maharshi Gor, Jong Sung Park + +**Description:** + +The primary objective is to implement key improvements, including transitioning to keyword-only arguments for a more robust and readable codebase. Additionally, the initiative aims to integrate lazy loading, enhancing the tool's efficiency by loading resources only when needed. This modernization effort reflects a commitment to maintainability, code clarity, and optimizing performance in DIPY, ensuring it remains at the forefront of scientific visualization tools in the Python ecosystem. Other potential tasks may include code refactoring, adopting best practices, and incorporating new features to further elevate DIPY's capabilities and user experience. + +**Project Steps:** + + - Step 1: keyword-only arguments integration. + - Step 2: lazy loading implementation. + - Step 3: Improve and simplify the management of the current website. + - Step 4: Improve Issues and Pull Requests Triage + Triage automation. + - Step 5: Integrates multiple Github Actions to simplify the workflows. + - Step 6: Refactor some DIPY packages + Improves Docstring + - Step 7: Add Tutorials + +**Time:** Part-time (175 hours) or full-time (350 hours) project + +**Skills Required:** Python, Sphinx. \ No newline at end of file diff --git a/dipy.org/pull/66/_sources/posts/2024/2024_05_27_Inigo_week_0.rst.txt b/dipy.org/pull/66/_sources/posts/2024/2024_05_27_Inigo_week_0.rst.txt new file mode 100644 index 0000000..111ca9b --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2024/2024_05_27_Inigo_week_0.rst.txt @@ -0,0 +1,40 @@ +Community Bonding Period Summary and first impressions +====================================================== + +.. post:: May 27 2024 + :author: Iñigo Tellaetxe + :tags: google + :category: gsoc + + +About Iñigo +~~~~~~~~~~~ +Hi everyone! I am Iñigo Tellaetxe Elorriaga, BSc in Biomedical Engineering and MSc in Biomedical Technologies in Mondragon Unibertsitatea, Basque Country. I am a first year PhD student in the Computational Neuroimaging Laboratory in the Biobizkaia Health Research Institute, also in the Basque Country. In the lab, our main paradigm is brain connectivity, so I am familiar with diffusion MRI and tractography. My main lines of research are brain aging, age modelling, and neurorehabilitation, all in the presence of neurodegenerative diseases and acute brain injuries. +As of my programming skills, I am mainly a Python developer and I am one of the main contributors to the `ageml `__ library, which we are developing at our lab as part of my PhD thesis. +I also worked in the industry as a research engineer in the field of medical computer vision for Cyber Surgery, developing new methods to generate synthetic CT images from MRI for reducing ionizing radiation in spinal surgery patients, using generative diffusion models. +I have been using DIPY for a while now for my research and other projects, so I am obviously really excited to contribute to the project this summer. + +How did I get involved with DIPY +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +My `thesis supervisor `__, who was a professor at my master's and also a participant and mentor in other editions, told me about GSoC. As a person that has been naturally attracted to research and open science I got really interested in open source software. I was also lucky enough to meet `@drombas `__, who took part in GSoC in 2021 with DIPY. He told me about his work and encouraged me to participate in DIPY, as he positively valued his experience. +After starting my PhD, I saw the perfect opportunity to contribute to the organization and potentially also to my research field. That is why I wanted to fuse tractography with age modelling in the context of Alzheimer's Disease. + +What I did this week and in the Community Bonding Period +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +During this period, I had the opportunity to meet the other GSoC participants in the organization and my mentors. It was perfect to learn about how we should contribute to DIPY aside from our project and to get up and running with the environment, the coding style guidelines, and the community guidelines. + +Briefly, the objective of my project is to implement a new feature to generate synthetic tractograms in DIPY, being able to specify the "age" and the clinical status (healthy or Alzheimer's Disease affected) of the requested tractogram. + +I talked with my mentors and we agreed on the first tasks to carry out. Jon Haitz provided me with the data he used to train his AutoEncoder (AE) network. These are 2 datasets: Tractoinferno and FiberCup. +I forked the Tractolearn repo and translated his AE architecture from PyTorch to TensorFlow. I updated the Dockerfile in my fork and created a working Docker image of the repo to run experiments in the DIPC cluster when the experiments phase starts. +I also started writing the training loop for the AE, this is WIP. + +What is coming up next week +~~~~~~~~~~~~~~~~~~~~~~~~~~~ +I will work on finishing the training loop for the AE and make an experiment in which I overfit the model with a small dataset. This is useful to check whether the loss and the training objective are correctly defined. +After this, I could continue to launch bigger training sessions with the full dataset. + + +Did I get stuck anywhere +~~~~~~~~~~~~~~~~~~~~~~~~ +I got a bit stuck with reproducing the Tractolearn environment locally and making the Dockerfile work, but I solved it by updating the Dockerfile. The local environment problem was solved installing a missing system dependency, `liblapack-dev`. \ No newline at end of file diff --git a/dipy.org/pull/66/_sources/posts/2024/2024_05_27_kaustav_week0.rst.txt b/dipy.org/pull/66/_sources/posts/2024/2024_05_27_kaustav_week0.rst.txt new file mode 100644 index 0000000..ca77f51 --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2024/2024_05_27_kaustav_week0.rst.txt @@ -0,0 +1,66 @@ +My Journey Begins: Community Bonding Period with DIPY +===================================================== + +.. post:: May 27 2024 + :author: Kaustav + :tags: google + :category: gsoc + +Hello everyone, +I am thrilled to share that I have been selected as a Google Summer of Code (GSoC) student for 2024. +Over the summer, I will be working with DIPY, and I am incredibly excited about the journey ahead. + +Community Bonding Period: Building the Foundation +-------------------------------------------------- + +The GSoC program is structured in a way that allows us to gradually ease into our projects. The first phase, known as the Community Bonding period, is crucial for laying a strong foundation for the work that lies ahead. During this time, I had the opportunity to: + +1. **Get Acquainted with the Community**: I joined various communication channels like mailing lists, chat rooms, and forums. Interacting with the mentors and other community members helped me understand the culture and workflow of the organization. + +2. **Understand the Project Scope**: I spent time understanding the broader goals of my project, the expected outcomes, and the milestones I need to achieve. This involved several discussions with my mentor, Serge, who provided invaluable guidance and insights. + +3. **Set Up the Development Environment**: Getting the right tools and environment set up is critical for any development work. I followed the documentation to set up my development environment, ensuring I had all the necessary dependencies and tools installed. + +Project-Task 1: Enhancing Keyword-Only Arguments with Decorators +---------------------------------------------------------------- + +The focus of my 1st task is to implement the keyword-only arguments in DIPY. Specifically, I am working on enhancing this feature with the help of decorators. + +What are Keyword-Only Arguments? +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +In many programming languages, keyword-only arguments are parameters that can only be supplied using their name, not as positional arguments. This helps in making the code more readable and reduces the chances of errors. + +The Role of Decorators +~~~~~~~~~~~~~~~~~~~~~~ + +Decorators are a powerful feature in Python that allows us to modify the behavior of functions or classes. By leveraging decorators, we can enforce the use of keyword-only arguments in a clean and efficient manner. + +Progress So Far +--------------- + +During the Community Bonding period, I have made significant strides in understanding the current implementation and identifying areas for improvement. Here’s a brief overview of what I have accomplished: + +1. **Research and Analysis**: I reviewed the existing codebase to understand how keyword-only arguments are currently handled. This involved reading through the documentation and studying the source code. + +2. **Week 0**: With the guidance of my mentor, I implemented a decorator that enforces keyword-only arguments. I have submitted a PR for review on this. The PR includes the decorator feature and relevant tests, it is also implemented for functions in stats module. + +3. **Pull Request**: [https://github.com/dipy/dipy/pull/3239] + +What’s Next? +------------ + +As we transition from the Community Bonding period to the Coding phase, I am eager to dive deeper into the project. My next steps for this task include: + +- **Writing Tests**: To ensure the reliability of the new feature, I will write more tests and integrate them into the existing test suite. +- **Documentation**: Good documentation is key to the success of any project. I will update the documentation to reflect the new changes and provide examples of how to use the new feature. +- **Fixing style**: I will fix the code styling format with accordance to PEP8. + +Final Thoughts +-------------- + +The Community Bonding period has been an enriching experience, allowing me to connect with my mentors and the community. I am grateful for the support and guidance I have received so far and am looking forward to contributing to DIPY over the summer. + +Stay tuned for more updates as I continue this exciting journey! + +Thank you for reading! diff --git a/dipy.org/pull/66/_sources/posts/2024/2024_05_31_Inigo_week_1.rst.txt b/dipy.org/pull/66/_sources/posts/2024/2024_05_31_Inigo_week_1.rst.txt new file mode 100644 index 0000000..f2d1ef4 --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2024/2024_05_31_Inigo_week_1.rst.txt @@ -0,0 +1,24 @@ +First Week into GSoC 2024: Building the AutoEncoder, writing the training loop +============================================================================== + +.. post:: May 31 2024 + :author: Iñigo Tellaetxe + :tags: google + :category: gsoc + + +What I did this week +~~~~~~~~~~~~~~~~~~~~ +I finished becoming familiar with the TensorFlow + Keras basics and I wrote the training loop and a couple of scripts for instantiating and training the AutoEncoder. +Data loading was also addressed and I am able to load the data from the FiberCup dataset in .trk format using `NiBabel `_, transform it into NumPy arrays, and feed it into the network. + +What is coming up next week +~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Because the training loop is taking too long, I will refactor the code to make it more modular and more in the TensorFlow style. Also, other DIPY models are implemented in this fashion, what will contribute to consistency across the library. +I think I have yet to get the hang of the TensorFlow way of doing things. +I plan to use a class for the ``Encoder`` and another one for the ``Decoder``. Then I will bring them together under an ``AutoEncoder`` class that inherits from the Keras ``Model`` class. +This will allow me to use the ``fit`` method from the Keras ``Model`` class and make the training loop more efficient, together with easing its usage. I will just have all the relevant training parameters to the ``compile`` method, to later call the ``fit`` method to train the model, taking care of the weight updates more efficiently than my handmade training loop. + +Did I get stuck anywhere +~~~~~~~~~~~~~~~~~~~~~~~~ +Getting the handmade training loop up and running gave me a couple of headaches because the weight update was taking ages. I am still stuck here and that is why I will refactor the code next week. \ No newline at end of file diff --git a/dipy.org/pull/66/_sources/posts/2024/2024_06_03_kaustav_week_1.rst.txt b/dipy.org/pull/66/_sources/posts/2024/2024_06_03_kaustav_week_1.rst.txt new file mode 100644 index 0000000..75b42fc --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2024/2024_06_03_kaustav_week_1.rst.txt @@ -0,0 +1,49 @@ +My Journey Continues: Week 1 Progress with DIPY +=============================================== + +.. post:: June 3 2024 + :author: Kaustav + :tags: google + :category: gsoc + +Hello everyone, +I am back with another update on my Google Summer of Code (GSoC) journey with DIPY. The Community Bonding period has come to an end, and I am now fully immersed in the Coding phase of the project. + +Progress So Far +--------------- + +Last week I updated my PR with required functionality for it to be ready to be integrated. I was having trouble with CI(Continuous Integration) pipeline in Github, I reached out to the whole team in GSOC and received support immediately, +I am really grateful to `Iñigo Tellaetxe `__ and my mentor `Serge Koudoro `__ for providing me necessary insights to fix the issue with CI. +I also put up another very small PR to fix the codespell issues happening in the CI. + +Expanding the Decorator Implementation +-------------------------------------- + +Building upon the foundation laid during the Community Bonding period, I have been looking into the expansion of the decorator implementation across more modules in DIPY. +I have gone through the required functions and tests. I will have to plan which modules are next for change with my mentor. +I will write comprehensive tests in this part as well. Also will be updating the necessary documentation as well. + +Refining Decorator Implementation +--------------------------------- + +I will continue to refine the decorator implementation based on feedback from the community and my mentor. This includes fixing any bugs, improving performance, and enhancing the documentation further. + +Lazy Loading Integration +------------------------ + +In addition to expanding the decorator implementation, I have begun researching for Task 2, which involves integrating lazy loading into DIPY. Lazy loading is a design pattern that can help improve the efficiency and performance of the codebase by delaying the initialization of objects until they are actually needed. +I will begin this task when my mentor will advise me to. + +Community Engagement +-------------------- + +Staying engaged with the DIPY community is crucial. I will try to participate in discussions, seek feedback, and collaborate with other contributors to ensure the success of the project. + +Final Thoughts +-------------- + +The first week of the Coding phase has been incredibly productive, and I am excited about the progress I have made so far. The support and guidance from my mentor and the community have been invaluable, and I am looking forward to the challenges and opportunities that lie ahead. + +Stay tuned for more updates as I continue to work on enhancing DIPY! + +Thank you for reading! diff --git a/dipy.org/pull/66/_sources/posts/2024/2024_06_07_Inigo_week_2.rst.txt b/dipy.org/pull/66/_sources/posts/2024/2024_06_07_Inigo_week_2.rst.txt new file mode 100644 index 0000000..dd5f0ac --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2024/2024_06_07_Inigo_week_2.rst.txt @@ -0,0 +1,29 @@ +Second Week into GSoC 2024: Refactoring the AutoEncoder, preliminary results +============================================================================ + +.. post:: June 7 2024 + :author: Iñigo Tellaetxe + :tags: google + :category: gsoc + + +What I did this week +~~~~~~~~~~~~~~~~~~~~ +This week I refactored the AutoEncoder code to match the design patterns and the organization of other Deep Learning models in the DIPY repo; and to make the training loop more efficient and easy to use. I transferred my code to a `separate repo `_ to keep the DIPY repo clean and to experiment freely. Once the final product is working, I will merge it into DIPY. I also packaged the whole repo so I can use it as a library. +Training experiments were run for a maximum of a 150 epochs, with variable results. They are not amazing, but at least we get some reconstruction of the input tracts from FiberCup, which seems to be on the right track. I also implemented training logs that report the parameters I used for training, so I can reproduce the results at any time. This still needs work though, because not all parameters are stored. Need to polish! +The left image shows the input tracts, and the middle and right images show two reconstructions from two different training experiments. + +.. image:: /_static/images/gsoc/2024/inigo/fibercup_preliminary_results.png + :alt: Preliminary results of the AutoEncoder training for a subset of plausible fibers of the FiberCup dataset. + :width: 600 + +What is coming up next week +~~~~~~~~~~~~~~~~~~~~~~~~~~~ +With the help of my mentors, we identified possible improvements to the AutoEncoder training process. Yesterday I investigated how PyTorch weights are initialized in convolutional kernels and in Keras Dense layers using the `He Initialization `_. Using custom initializers, one can mimic the same behavior in TensorFlow, which I started to implement also yesterday. +This week should focus on trying to reproduce the small implementation differences that might be causing the model to not converge as the PyTorch one. I will also try to finish implementing the He Initialization in TensorFlow. + + +Did I get stuck anywhere +~~~~~~~~~~~~~~~~~~~~~~~~ +I got a bit stuck refactoring the code to match the DIPY design patterns and also with the TensorFlow implementation itself, because the output shape of the ``Encoder`` and the input shape of the ``Decoder`` were not matching. +After investigating what caused this issue, I discovered that ``tf.shape`` was not giving me the usual (and expected) shape of the tensor, conveniently stored in a ``tuple``. I found this behavior strange, but I solved the problem just calling a ``.shape`` method on the Tensor, which does give me the shape tuple I needed. \ No newline at end of file diff --git a/dipy.org/pull/66/_sources/posts/2024/2024_06_12_kaustav_week_2.rst.txt b/dipy.org/pull/66/_sources/posts/2024/2024_06_12_kaustav_week_2.rst.txt new file mode 100644 index 0000000..5c71311 --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2024/2024_06_12_kaustav_week_2.rst.txt @@ -0,0 +1,56 @@ +My Journey Continues: Week 2 Progress with DIPY +=============================================== + +.. post:: June 10 2024 + :author: Kaustav + :tags: google + :category: gsoc + +Greetings, everyone! +It's time for another update on my Google Summer of Code (GSoC) journey with DIPY. The second week of the Coding phase has been equally productive and exciting, with significant advancements in both tasks. + +Decorator Function Refinement +----------------------------- + +Building upon the initial implementation from the previous week, I have invested significant efforts in refining and correcting the decorator function. +With the guidance of my mentor, `Serge Koudoro `__, I have meticulously reviewed the code, identified areas for improvement, and made necessary changes to enhance its performance, readability, and maintainability. + +Comprehensive Testing and Bug Fixing +------------------------------------ + +Alongside the refinement process, I have written comprehensive tests to ensure the decorator function's robustness and reliability. +These tests have been instrumental in identifying and resolving bugs, allowing me to thoroughly validate the decorator's functionality across various scenarios and cases. + + +Difficulties & Challenges +------------------------- + +I didn't face any particular difficulty with my task this week, as it was pretty straightforward. I just had concerns about how the versioning would work out but I made proper test case scenarios for that as well. + +Documentation Updates +--------------------- + +To ensure the long-term maintainability and usability of the decorator, I have updated the necessary documentation with Numpy standard. +This includes clear explanations of the decorator's functionality, guidelines for its usage, and examples to assist other contributors and users in leveraging its capabilities effectively. + +Lazy Loading Research +--------------------- + +In addition to decorators, I looked and tested some implementations of lazy loading in some libraries. +1. Scipy +2. Numpy +3. Matplotlib + +Next Week +--------- + +The plan next week is to implement decorator functions to all modules in DIPY and put up PRs. My main PR at the moment is https://github.com/dipy/dipy/pull/3239 . + +Final Thoughts +-------------- + +The second week of the Coding phase has been dedicated to refining and correcting the decorator function, laying a solid foundation for its seamless integration into DIPY's codebase. + +Stay tuned for more updates as I continue to work on enhancing DIPY! + +Thank you for reading! diff --git a/dipy.org/pull/66/_sources/posts/2024/2024_06_14_Inigo_week_3.rst.txt b/dipy.org/pull/66/_sources/posts/2024/2024_06_14_Inigo_week_3.rst.txt new file mode 100644 index 0000000..351d9ad --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2024/2024_06_14_Inigo_week_3.rst.txt @@ -0,0 +1,35 @@ +Third Week into GSoC 2024: Replicating training parameters, approaching replication +=================================================================================== + +.. post:: June 14 2024 + :author: Iñigo Tellaetxe + :tags: google + :category: gsoc + + +What I did this week +~~~~~~~~~~~~~~~~~~~~ +This week was slightly less productive because I was really busy with my PhD tasks, but I managed to progress nevertheless. +After implementing custom weight initializers (with He Initialization) for the ``Dense`` and ``Conv1D`` layers in the AutoEncoder (AE), I launched some experiments to try to replicate the training process of the original model. +This yielded better results than last week, this time setting the weight decay, the learning rate, and the latent space dimensionality as shown in the `FINTA paper `_. +Now the AE has no problem learning that the bundles have depth, and the number of broken streamlines decreased a lot compared to the previous results. +I also worked on trying to monitor the training experiments using TensorBoard, but I did not succeed because it was a last minute idea and I did not have time to implement it properly. + +.. image:: /_static/images/gsoc/2024/inigo/fibercup_better_results.png + :alt: Preliminary results of the AutoEncoder training for a subset of plausible fibers of the FiberCup dataset, approaching better replication compared to the PyTorch model. + :width: 600 + +What is coming up next week +~~~~~~~~~~~~~~~~~~~~~~~~~~~ +My mentors and I agreed on trying to transfer the weights of the pre-trained PyTorch model to my Keras implementation, because it may take less time than actually training the model. Thus, the strategy we devised for this to work is the following: +1. Implement dataset loading using HDF5 files, as the original model uses them, and the TractoInferno dataset is contained in such files (it is approximately 75 GB). +2. Launch the training in Keras in the Donostia International Physics Center (DIPC) cluster, which has GPU accelerated nodes that I can use for speeding up training. Unlike PyTorch, I don't need to adjust the code for GPU usage, as TF takes care of that for speeding up training. +3. While the previous step is running, I will work on transferring the weights from the PyTorch format to the Keras model. This will be a bit tricky but my mentor Jong Sung gave me a code snippet that was used in the past for this purpose, so I will try to adapt it to my needs. +4. In parallel, I will try to read about the streamline sampling and filtering strategy Jon Haitz used for `GESTA `_ and FINTA, respectively, to implement them in DIPY. I think the code is hosted in the TractoLearn repository, but I need to look it up. + +Did I get stuck anywhere +~~~~~~~~~~~~~~~~~~~~~~~~ +It was not easy to implement the custom weight initializers for the Keras layers because the He initialization is not described in the Keras documentation as in the PyTorch one, so I had to make a mix of both. +Otherwise, I did not get stuck this week, but I am a bit worried about the weight transfer process, as it may be a bit tricky to implement. + +Until next week! diff --git a/dipy.org/pull/66/_sources/posts/2024/2024_06_19_kaustav_week_3.rst.txt b/dipy.org/pull/66/_sources/posts/2024/2024_06_19_kaustav_week_3.rst.txt new file mode 100644 index 0000000..80cfdaf --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2024/2024_06_19_kaustav_week_3.rst.txt @@ -0,0 +1,58 @@ +My Journey Continues: Week 3 Progress with DIPY +=============================================== + +.. post:: June 17 2024 + :author: Kaustav + :tags: google + :category: gsoc + +Greetings, everyone! +The third week of the Coding phase has been a whirlwind of progress. I have achieved significant milestones in both the decorator implementation and lazy loading integration tasks, bringing us closer to enhancing DIPY's performance and efficiency. + +Decorator Implementation across Modules +--------------------------------------- + +With refinements made in the previous week, I am thrilled to announce that I have successfully implemented the decorator function across 15 modules in DIPY. +This milestone represents a major step forward in enhancing the codebase's efficiency and maintainability. + +Difficulties & Challenges +------------------------- + +When I was trying to implement the decorator to multiple modules, I tried to do it through a script so that I could apply the decorator to all necessary functions at one shot. +The script functionality would first parse the selected file, then apply the decorator to the necessary functions and lastly unparse and reconstruct the file. I was using libraries which comply with PEP-8 and ruff formatting. +I faced two problems here: +1. After reconstruction, the comments that were put in the file will be removed. (This happened due to how the libraries worked) +2. The libraries were not able to unparse the Docstring correctly and would give errors. + +After long time of trial and errors, I decided to work on applying decorators manually to all the modules. + +Testing and Continuous Integration +---------------------------------- + +While the decorator implementation has been completed, there is still some testing remaining to ensure the robustness and reliability of the integrated code. +Additionally, I have submitted a pull request (PR) for the decorator implementation, and I am currently working on addressing any issues or concerns raised by the Continuous Integration (CI) pipeline. +Ensuring a smooth CI process is crucial for maintaining the quality and integrity of the codebase. + +Lazy Loading Demonstration +-------------------------- + +Parallel to the decorator implementation, I have also made progress on the lazy loading integration task. +As a proof of concept, I have demonstrated a simple implementation of lazy loading in the align module of DIPY. +This initial implementation showcases the potential benefits of lazy loading, such as improved memory efficiency and performance optimization. +By delaying the initialization of objects until they are actually needed, lazy loading can significantly reduce the memory footprint of the application and enhance overall efficiency. + +Next Week +--------- + +The plan next week is to work on the Lazy Loading, implement in DIPY and share a test branch with my mentor. +I will also look into fixing the existing PRs where I have implemented the decorator in sub-modules. + +Final Thoughts +-------------- + +The third week of the Coding phase has been a significant milestone, with the decorator implementation across multiple modules and the demonstration of lazy loading capabilities. +I am grateful for the guidance and support provided by my mentor `Serge Koudoro `__, and the DIPY community, which have been instrumental in driving this project forward. + +Stay tuned for more updates as I continue to work on enhancing DIPY! + +Thank you for reading! diff --git a/dipy.org/pull/66/_sources/posts/2024/2024_06_21_Inigo_week_4.rst.txt b/dipy.org/pull/66/_sources/posts/2024/2024_06_21_Inigo_week_4.rst.txt new file mode 100644 index 0000000..dc55176 --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2024/2024_06_21_Inigo_week_4.rst.txt @@ -0,0 +1,57 @@ +Week 4 into GSoC 2024: Weight transfer experiments, hardships, and results! +=========================================================================== + + +.. post:: June 21 2024 + :author: Iñigo Tellaetxe + :tags: google + :category: gsoc + + + +What I did this week +~~~~~~~~~~~~~~~~~~~~ + +Well, this week was really intense. I spent most of the time trying to transfer the weights from the pre-trained PyTorch model of the TractoInferno dataset to the Keras model. +I must say that thanks to the reduced size of the AutoEncoder, it was feasible to do it layer by layer without going crazy. + +PyTorch uses a *channels first* convention, whereas TensorFlow uses *channels last*, what means that all the weights in the convolutional layers had to be transposed. +This was the easiest part, as it was just a matter of using ``np.transpose``. + +After addressing the convolutional layers, I set a common input for both networks, and I compared their outputs. As expected, they were not the same, and they were not even close enough. +Thus, matching the behavior of the PyTorch model with the Keras implementation became my objective. To achieve so, I run a common input through all the layers of both models sequentially, and systematically compared the outputs of each layer. +In the Encoder block, I found all the outputs to be within a reasonable range of each other (MAE = 1e-6), except for the last two operations, which flatten the output of the 1D convolutional layers and then feed it to a fully connected layer. +This was partially good news, because most of the Encoder was behaving as desired, but, the most challenging part was adapting the flattening and reshaping operations happening in the Encoder and the Decoder, respectively. +As the Keras 1D convolutional output dimensions do not follow the same ordering as in PyTorch, (*[n, m, channels]* vs *[n, channels, m]*), the flattening behavior of the models was different (the elements followed a different sorting when being concatenated into a 1D array), and thus, the fully connected layer of the Encoder (named ``fc1``) was receiving different inputs. +To solve this, I first reshaped the output of the Keras 1D convolutional layer to match the PyTorch *channels first* convention, and then applied the flattening. +This effectively resulted in a within-reasonable-error (MAE = 1e-6) output of the Encoder block. Problem solved! The Decoder block was a bit more challenging, because the PyTorch implementation was using linear interpolation in its ``torch.nn.Upsample`` layers. +For this, I had to implement a custom layer in Keras that would perform the same operation, as linear interpolation is unavailable in the ``tf.keras.layers.UpSampling1D`` layers. I made this workaround using the ``tf.image.resize`` function, tricking the function into taking a modified 1D tensor to be a pseudo 2D tensor. +The errors in the Decoder block are higher than in the Encoder but we assumed that a MAE of around 1e-3 is acceptable. + +On the other hand, I started implementing the dataset loading using HDF5 files, but I set that aside because it is not priority. + +Finally, my mentor `Jon Haitz `_ kindly provided me with the weights of the PyTorch AE he trained on the FiberCup dataset, and he suggested an experiment consisting of encoding the FiberCup tractogram with my Keras model, and Decoding it with the PyTorch model to see if the Encoder works properly. This was indeed the case, as the PyTorch model effectively reconstructed the tractogram, but unfortunately the Keras encoder was not capable of giving the same result. Naturally, this suggests that the Keras Decoder implementation is still not similar enough to the PyTorch one, so there is still room +for improvement. Despite not being successful, this experiment was very enlightening, and it gave me a lot of insight into the differences between the two implementations. + +In a last effort to get to replicate the PyTorch model results, I went on to train the my Keras architecture on the FiberCup dataset with the same parameters as my mentor used in his `GESTA `_ paper to see if the results I get are similar to the ones he got. +Well, this resulted in amazing results, as you can check visually in the figure below. Note that none of the models were able to capture the depth dimension of the streamlines, but this is not concerning. It can be solved reducing the latent dimension size to 16 (it is 32 now). + +.. image:: /_static/images/gsoc/2024/inigo/fibercup_replicated.png + :alt: Left: Source data to encode/decode. Middle: Keras model reconstruction. Right: PyTorch model reconstruction. + :width: 600 + + +What is coming up next week +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Next week we will start working on a conditional version of the AutoEncoder, which should give us the ability to generate tractograms conditioned on a specific scalar input. This will be a very interesting feature to have because we can get tractograms with properties of interest. Well, this is the main goal of this project. +We decided to focus on developing a conditional version of the AE over adding the latent space sampling because the code for the latter is already available in the `tractolearn `_ repository, so we can postpone it for now. + + +Did I get stuck anywhere +~~~~~~~~~~~~~~~~~~~~~~~~ + +Of course I got stuck, but as the project has an explore and research nature, I would not really call this being 'stuck'. Things got hard at some points, but we found ways to solve them. +I am very happy with the progress we are making and I am also very excited to see where we can get with the conditional AutoEncoder. + +Until next week! \ No newline at end of file diff --git a/dipy.org/pull/66/_sources/posts/2024/2024_06_28_Inigo_week_5.rst.txt b/dipy.org/pull/66/_sources/posts/2024/2024_06_28_Inigo_week_5.rst.txt new file mode 100644 index 0000000..52cf075 --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2024/2024_06_28_Inigo_week_5.rst.txt @@ -0,0 +1,33 @@ +Week 5 into GSoC 2024: Vacation, starting with the conditional AutoEncoder +========================================================================== + + +.. post:: June 28 2024 + :author: Iñigo Tellaetxe + :tags: google + :category: gsoc + + + +What I did this week +~~~~~~~~~~~~~~~~~~~~ + +Hi everyone! This week I have been on vacation, so I have not been able to work on the project as much as the previous weeks. However, I have been thinking about the next steps to take and I have decided to start with the conditional AutoEncoder. I have been reading some papers and I have found some interesting ideas that would be nice to implement. + +While stuck at the Munich airport, I started to write some code for this (the weather was not very good and my flight was delayed, so I lost my connecting flight and I had to sleep at the airport). I found an implementation of a regression variational AutoEncoder `in this paper `_, where the authors implement a way to manipulate the latent space so that the input data that get projected (streamlines for our case, 3D image patches in the paper) into it are organized along a desired scalar parameter. + +I thought this could be a good starting point for my conditional AutoEncoder because it basically provides a way to sample from the latent space in a controlled manner, where you can select the age of the streamlines you want to generate. Also, having the variational regularizes the latent space, so our model is more resilient against overfitting against the identity function, which might happen in "vanilla" AutoEncoders without any regularization. + +Also, they provided their code in TensorFlow, so I started adapting it to our use case, which uses 1D convolutions instead of 3D ones. + +What is coming up next week +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +I will continue working on the conditional AutoEncoder next week, but you can see the progress `here `_. + +Did I get stuck anywhere +~~~~~~~~~~~~~~~~~~~~~~~~ + +The only place I got stuck in was the airport, but thankfully I managed to arrive to my destination, even if my baggage was lost (they delivered it 2 days later, thankfully nothing was missing!) + +Until next week! \ No newline at end of file diff --git a/dipy.org/pull/66/_sources/posts/2024/2024_06_30_kaustav_week_4.rst.txt b/dipy.org/pull/66/_sources/posts/2024/2024_06_30_kaustav_week_4.rst.txt new file mode 100644 index 0000000..a924456 --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2024/2024_06_30_kaustav_week_4.rst.txt @@ -0,0 +1,45 @@ +My Journey Continues: Week 4 Progress with DIPY +=============================================== + +.. post:: June 23 2024 + :author: Kaustav + :tags: google + :category: gsoc + +Hello everyone, +I hope this update finds you well. The fourth week of GSOC has made a little slow progress. + +Decorator Implementation PR Fix +------------------------------- + +A significant portion of this week was dedicated to addressing and resolving issues with the tests for our decorator implementations. My primary goal was to eliminate any warnings in the Continuous Integration (CI) pipeline, as these warnings are treated as errors and prevent the acceptance of our changes. +This process involved carefully reviewing each test, identifying the source of warnings, and making necessary adjustments to ensure our code passes all checks without raising any flags. + +Difficulties & Challenges +------------------------- + +No specific challenges faced this week. + +Peer Review Contributions +-------------------------- + +In addition to working on our own implementations, I've had the opportunity to review Pull Requests (PRs) from my peer. +This has been an enriching experience, allowing me to gain insights into different coding approaches and problem-solving techniques. + +PR: https://github.com/fury-gl/fury/pull/888 + +Blog Review +----------- + +I also reviewed blogs of Peers. +1. https://github.com/fury-gl/fury/pull/896 - `Robin Roy `__ +2. https://github.com/fury-gl/fury/pull/897 - `Wachiou BOURAÏMA `__ + +Next Week +--------- + +Next week I am planning to submit my Lazy Loading PR. + +Stay tuned for more updates as I continue to work on enhancing DIPY! + +Thank you for reading! \ No newline at end of file diff --git a/dipy.org/pull/66/_sources/posts/2024/2024_07_01_kaustav_week_5.rst.txt b/dipy.org/pull/66/_sources/posts/2024/2024_07_01_kaustav_week_5.rst.txt new file mode 100644 index 0000000..5c2e032 --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2024/2024_07_01_kaustav_week_5.rst.txt @@ -0,0 +1,40 @@ +My Journey Continues: Week 5 Progress with DIPY +=============================================== + +.. post:: July 01 2024 + :author: Kaustav + :tags: google + :category: gsoc + +Hello everyone, +I hope this update finds you well. The fifth week of my Google Summer of Code (GSoC) journey with DIPY has been a bit different from the previous ones, +and I wanted to share an honest update about my progress and plans. + +A Week of Unexpected Challenges +------------------------------- + +This past week, I faced an unexpected setback as I fell ill. As a result, I wasn't able to make as much progress on the project as I had initially planned. +I'm grateful for the understanding and support from my mentor, `Serge Koudoro `__, and my fellow GSOC team on the project. + +Preparing for Lazy Loading Implementation +----------------------------------------- + +Whatever time I could get, I used it to prepare for the upcoming task of implementing lazy loading in DIPY. +I've been reviewing the research I conducted earlier and planning my approach for integrating lazy loading into various modules. + +Next Steps +---------- + +As I recover and regain my full productivity, I'm excited to dive into the lazy loading implementation. My goals include: + +- Identifying key modules and functions that would benefit most from lazy loading. +- Developing a strategy for implementing lazy loading without disrupting existing functionality. +- Beginning the actual implementation process, by implementing for Sub-Modules of DIPY and sharing a build with my mentor for POC. +- Preparing comprehensive tests to ensure the lazy loading implementation works as expected and doesn't introduce any bugs. + +Final Thoughts +-------------- + +While this week didn't see as much tangible progress as I would have liked, it has been an opportunity for reflection and preparation. +I'm grateful for the support of my mentor and excited to return to full speed at the earliest. +Thank you for your understanding and continued support. Stay tuned for what I hope will be a more code-filled update next week! diff --git a/dipy.org/pull/66/_sources/posts/2024/2024_07_06_Inigo_week_6.rst.txt b/dipy.org/pull/66/_sources/posts/2024/2024_07_06_Inigo_week_6.rst.txt new file mode 100644 index 0000000..b92d6d3 --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2024/2024_07_06_Inigo_week_6.rst.txt @@ -0,0 +1,46 @@ +Week 6 into GSoC 2024: Stuck with the Variational AutoEncoder, problems with Keras +================================================================================== + +.. post:: July 06 2024 + :author: Iñigo Tellaetxe + :tags: google + :category: gsoc + +What I did this week +~~~~~~~~~~~~~~~~~~~~ + +This week was all about the Variational AutoEncoder. My mentors advised me to drop the TensorFlow implementation of the regression VAE I found last week, to instead directly integrate the variational and conditional characteristics in my AE implementation, following a more modular approach. This was a good decision, as adapting third party code to one's needs is often a bit of a mess (it already started being a mess, so yeah). Also, once the variational part is done, implementing the conditional should not be that hard. + +To provide a bit of intuition behind the VAE, let me illustrate first the "vanilla" AE. This is a neural network that compresses the input data into a reduced representation, and then tries to reconstruct the original data from this representation. We refer to "latent space" to the place where the compressed data representations live. So, once the AE is good at compressing data to the latent space, we could take a sample from it and generate new data. + +The objective of the AE is to minimize the difference between the input data and the generated data, also called the "reconstruction loss". + +.. image:: /_static/images/gsoc/2024/inigo/inigo_vanilla_autoencoder.png + :alt: Vanilla AutoEncoder diagram. + :width: 600 + + +Usually the problem with AEs is that the latent space is full of "holes", i.e. it is a set of points, meaning that if a sample is taken between two already encoded samples, the generated data will not be an interpolation between the two samples, but a random sample. To solve this, the VAE seeks to "regularize" the latent space, encoding/compressing the input data into distributions that live in it, instead of single points. This way, the latent space is continuous and the interpolation between two samples is meaningful and more likely to follow the data distribution. + +The VAE does this by adding a "regularization loss" to the AE, which is the Kullback-Leibler divergence between the latent space distribution and a prior distribution (usually a normal distribution). If you are curious, you can find a nice explanation about the differences between AEs and VAEs `here `_. + +.. image:: /_static/images/gsoc/2024/inigo/inigo_variational_autoencoder.png + :alt: Variational AutoEncoder diagram. + :width: 600 + +Thus, I started by implementing the VAE in Keras, and I must say that it was not as easy as I thought it would be. I used `this VAE example from Keras `_, adding a ``ReparametrizationTrickSampling`` layer between the ``Encoder`` and the ``Decoder``. + +The main problem was that the Keras implementation was not behaving as expected because it was giving me problems of model initialization, so I encapsulated the ``Encoder`` and the ``Decoder`` parts into individual ``keras.Model`` class instances, to put them all together under a wrapping Model. Doing this was a bit painful, but it worked. +The problem was that I was not being able to train the model because the loss was constantly ``nan`` values. + +What is coming up next week +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Next week I must correct the ``nan`` problem when training because I am not sure what is causing it. I am using exponential operations in the KL loss computation and in the ``ReparametrizationTrickSampling`` layer, which could return excessively large values if the exponent value is too big, leading to an exploding gradients issue. I will explore this. + +Did I get stuck anywhere +~~~~~~~~~~~~~~~~~~~~~~~~ + +I got stuck in the Keras implementation of the VAE to instantiate the model, but I think I am on the right track now. + +Until next week! \ No newline at end of file diff --git a/dipy.org/pull/66/_sources/posts/2024/2024_07_07_kaustav_week_6.rst.txt b/dipy.org/pull/66/_sources/posts/2024/2024_07_07_kaustav_week_6.rst.txt new file mode 100644 index 0000000..0cabf0c --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2024/2024_07_07_kaustav_week_6.rst.txt @@ -0,0 +1,50 @@ +My Journey Continues: Week 6 Progress with DIPY +=============================================== + +.. post:: July 07 2024 + :author: Kaustav + :tags: google + :category: gsoc + +Greetings, everyone! +The sixth week of GSOC has been a hectic one. A ton of time in correcting errors and fixing PRs. + +Decorator PR Fixes +------------------ + +With decorator implementation PRs in place, and the integration of my original PR for keyword-only arguments (https://github.com/dipy/dipy/pull/3239), it was now time for the PRs which had the decorator implementation for specific modules to be integrated. + +Lazy Loading PR +--------------- + +I have submitted a PR for the lazy loading feature using the `lazy_loader `__ package. (https://github.com/dipy/dipy/pull/3288) + +Difficulties & Challenges +------------------------- + +I had and have a ton of challenges ahead as I am having trouble with some PR, I am specifically having a challenge in completing: + +1. https://github.com/dipy/dipy/pull/3257 +- This PR has issues related to the **wls_fit_tensor** function, after the application of the decorator I am receiving an error where it is telling me that 3 positional arguments are allowed but 4 are being passed, but when I check the main function parameters there are only 3 positional params. I am not able to figure out why 1 extra **argument** is being passed. + +2. https://github.com/dipy/dipy/pull/3254 +- This PR have a issue about the application of arguments separator, I am just awaiting response on applying the separator. + +3. https://github.com/dipy/dipy/pull/3288 +- This PR is my lazy loading PR. I am having trouble with the application of **type_stub** as the package is not able to recognize it and thus I am receiving error when trying to import the modules. I was trying to make it as close as possible to `scikit-image `__ implementation, it hasn't happened yet. But I'm hopeful. + +Next Week +--------- + +Next week I am planning to push through the remaining PRs of decorator implementation. Also for **lazy_loader**, I am planning to fix the error. +I will have to go through the `PEP-561 `__ documentation which talks about the **type_stub** implementation. + +Final Thoughts +-------------- + +The sixth week of the Coding phase has been a challenging week, with the decorator implementation and lazy loading. Hopefully I can fix these soon and move on to next problems. +I am grateful for the guidance and support provided by my mentor `Serge Koudoro `__, and the DIPY community, which have been instrumental in driving this project forward. + +Stay tuned for more updates as I continue to work on enhancing DIPY! + +Thank you for reading! diff --git a/dipy.org/pull/66/_sources/posts/2024/2024_07_12_Inigo_week_7.rst.txt b/dipy.org/pull/66/_sources/posts/2024/2024_07_12_Inigo_week_7.rst.txt new file mode 100644 index 0000000..7d4f1d5 --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2024/2024_07_12_Inigo_week_7.rst.txt @@ -0,0 +1,36 @@ +Week 7 into GSoC 2024: Starting to see the light at the end of the VAE +====================================================================== + +.. post:: July 12 2024 + :author: Iñigo Tellaetxe + :tags: google + :category: gsoc + +What I did this week +~~~~~~~~~~~~~~~~~~~~ + +Finally, I figured out how to solve the ``nan`` value problem in the VAE training. As I suspected, the values that the ``ReparametrizationTrickSampling`` layer was getting were too big for the exponential operations. I use the exponential operation because I am treating the Encoder output as the log variance of the latent space distribution, and for sampling we need the standard deviation. We use the log variance instead of the standard deviation for avoiding computing logarithms. + +The solution consisted of two measures: first, clipping the values of the exponential operations to the ``[-1e10, 1e10]`` range; second, adding batch normalization layers after each convolution in the Encoder enforces a 0 mean and unit variance, preventing large values at the output. I did not use batch normalization in the fully connected layers that output the mean and the log variance of the latent distribution because I did not want to constrain the latent space too much with distributions with the mentioned characteristics. These layers should capture other characteristics of the data distribution. + +You can see a preliminary result of training the Variational AutoEncoder below, with the exact same training parameters as the AutoEncoder, but only for 50 epochs for proving the concept. + +.. image:: /_static/images/gsoc/2024/inigo/inigo_preliminary_vae_result_fibercup.png + :alt: Preliminary reconstruction result after training the VAE for 50 epochs with the FiberCup dataset. + :width: 600 + +I should mention that the variance of the 0-mean distribution from which ``epsilon`` is sampled in the ``ReparametrizationTrickSampling`` layer can be adjusted as a hyperparameter. I set it to 1 for now, but I could explore this hyperparameter in the future, but first I need to learn about the theoretical implications of modifying it. + +On the other hand, I started a discussion in a `PR `_ that includes all these changes to make it easier for my mentors and my GSoC colleagues to review my code. This change was suggested by my mentors to make my advances more public and accessible to feedback. + +What is coming up next week +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Next week I will train the VAE for longer (~120 epochs) and I will also explore annealing the KL loss, which is a technique that consists of increasing the KL loss weight during the first epochs of training and then decreasing it. This is done to avoid the model getting stuck in a local minimum of the loss function. + +Did I get stuck anywhere +~~~~~~~~~~~~~~~~~~~~~~~~ + +I got a bit stuck thinking how to solve the exploding loss problem, and despite the described advances may seem small, they required a lot of thought and debugging. Thankfully my lab colleague `Jorge `_ helped me with his machine learning expertise and gave me the idea of batch normalization. + +Until next week! \ No newline at end of file diff --git a/dipy.org/pull/66/_sources/posts/2024/2024_07_15_kaustav_week_7.rst.txt b/dipy.org/pull/66/_sources/posts/2024/2024_07_15_kaustav_week_7.rst.txt new file mode 100644 index 0000000..10c7a17 --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2024/2024_07_15_kaustav_week_7.rst.txt @@ -0,0 +1,113 @@ +My Journey Continues: Week 7 Progress with DIPY +=============================================== + +.. post:: July 15 2024 + :author: Kaustav + :tags: google + :category: gsoc + +Greetings, everyone! +The seventh week of GSOC has been a fruitful and learning one. + +Decorator PR Fixes +------------------ + +This week I tried to fix one of the decorator PR(:ref:`PR #3257 <#3257>`). I had implemented a lot of solutions to make the tests pass, but realized I had added unnecessary code, all I had to do was fix the tests. +Basically I had to add the necessary calls for keyword arguments. + +Lazy Loading PR +--------------- + +I had submitted a PR for the lazy loading feature last week(:ref:`PR #3288 <#3288>`). I was able to fix most of the problems that I was facing last week, I added a ton of **.pyi** files for various modules. +I had initiated the review of the PR this week. I hope to get it merged ASAP, maybe in a day or two. Alas this was my biggest learning until now and also quite fun, and I am really thankful for the opportunity to do this work. + +Difficulties & Challenges +------------------------- + +While fixing the PRs, these were some of my thoughts + +.. _#3257: + +1. `PR #3257 `_ +- Most of the issues were fixed thanks to the review by my mentor. One clarification still remains about the **fit_method**. I was receiving an error as mentioned below, to fix it I applied an `else` clause for the `fit_method` in `FreeWaterTensorModel`. + +.. code-block:: python + + def test_fwdti_singlevoxel(): + # Simulation when water contamination is added + gtf = 0.44444 # ground truth volume fraction + mevals = np.array([[0.0017, 0.0003, 0.0003], [0.003, 0.003, 0.003]]) + S_conta, peaks = multi_tensor( + gtab_2s, + mevals, + S0=100, + angles=[(90, 0), (90, 0)], + fractions=[(1 - gtf) * 100, gtf * 100], + snr=None, + ) + fwdm = fwdti.FreeWaterTensorModel(gtab_2s, "WLS") + fwefit = fwdm.fit(S_conta) + + dipy/reconst/tests/test_fwdti.py:100: + _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ + + dipy/reconst/multi_voxel.py:40: in new_fit + return single_voxel_fit(self, data) + dipy/testing/decorators.py:210: in wrapper + return convert_positional_to_keyword(func, args, kwargs) + dipy/testing/decorators.py:191: in convert_positional_to_keyword + return func(*args, **kwargs) + _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ + + self = + data = array([ 100. , 43.50442139, 13.18018944, 33.49286891, + 18.75861633, 22.62819702, 37.1749802 ,... 5.72778443, + 29.20758655, 5.17472072, 35.21098605, 27.02812095, + 20.99614616, 5.7284468 ]), mask = None, kwargs = {}, S0 = 100.0 + + @multi_voxel_fit + @warning_for_keywords() + def fit(self, data, *, mask=None, **kwargs): + """Fit method of the free water elimination DTI model class + + Parameters + ---------- + data : array + The measured signal from one voxel. + mask : array + A boolean array used to mark the coordinates in the data that + should be analyzed that has the shape data.shape[:-1] + """ + S0 = np.mean(data[self.gtab.b0s_mask]) + fwdti_params = self.fit_method( + self.design_matrix, data, S0, *self.args, **self.kwargs + ) + E TypeError: 'str' object is not callable + + dipy/reconst/fwdti.py:161: TypeError + +.. _#3254: + +1. `PR #3254 `_ +- I am still having issues with this PR, where applying the decorator to two specific functions gives me errors. I am assuming the problem is occurring because my decorator is not receiving the signature correctly from the cython function. I was able to resolve it locally through the `cython.binding` method but it didn't work out for the CI. + +.. _#3288: + +3. `PR #3288 `_ +- Very good progress, I have a few little doubts to complete this PR. I will try to ask them in the next meeting session with my mentor. Also for some reason the external libraries are not getting loaded even with the correct implementation of the `lazy_loader `__ package. Hopefully I can resolve this soon. + +Next Week +--------- + +Next week I am planning to take care of some documentation issues that are raised in the repository like `Issue #2665 `__. Also I have some tasks assigned by my mentor towards improvements of DIPY. +I have started working on them already, planning to put down the PRs soon. + +Final Thoughts +-------------- + +The seventh week of the Coding phase has been a progressive one. I learned tons of new stuff and implemented it. Hopefully I can finish the PRs soon and move onto next tasks. +I am grateful for the guidance and support provided by my mentor `Serge Koudoro `__, and the DIPY community, which have been instrumental in driving this project forward. + +Stay tuned for more updates as I continue to work on enhancing DIPY! + +Thank you for reading! diff --git a/dipy.org/pull/66/_sources/posts/2024/2024_07_19_Inigo_week_8.rst.txt b/dipy.org/pull/66/_sources/posts/2024/2024_07_19_Inigo_week_8.rst.txt new file mode 100644 index 0000000..b512125 --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2024/2024_07_19_Inigo_week_8.rst.txt @@ -0,0 +1,37 @@ +Week 8 into GSoC 2024: Further advances with the VAE model +========================================================== + +.. post:: July 19 2024 + :author: Iñigo Tellaetxe + :tags: google + :category: gsoc + +What I did this week +~~~~~~~~~~~~~~~~~~~~ + +This week I continued training the VAE model with the FiberCup dataset, this time for 120 epochs, and the results are promising. The model is able to reconstruct the input data with a decent level of detail. + +.. image:: /_static/images/gsoc/2024/inigo/vanilla_vae_120_epoch_results.png + :alt: Vanilla Variational AutoEncoder reconstruction results after 120 epochs of training. + :width: 600 + +I also looked at the theoretical and technical implications of implementing the `beta-VAE architecture `_ for my experiments, which could help in disentangling the latent space representation of the streamlines according to features learnt in an unsupervised manner. + +Shortly, applying a weight (bigger than 1) to the KL loss component of the VAE loss function encourages the model to learn a version of the latent space where features that can be perceived in the data space are aligned with the latent space dimensions. This way, one can modulate the generative process according to the learnt 'perceivable' features, once they are identified and located in the latent space. + +However, increasing the beta weight compromises the reconstruction quality, which is what basically makes streamlines look reasonable. Finding a good beta weight is as 'simple' as running a hyperparameter search while constraining the parameter to be higher than one, and to try to prioritize the MSE (Mean Squared Error, reconstruction loss) in the search algorithm. + +From the technical side implementing a beta-VAE is very straightforward, by just adding the beta weight in the loss equation and storing the parameter for traceability, so this did not take a lot of time. + +What is coming up next week +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Next week I wanted to tinker around a bit with this parameter to see how it affects the quality of the reconstructions and the organization of the latent space, but I don't think this is an effective strategy, nor the priority. Thus, I will start implementing the conditional VAE, which will allow me to generate new streamlines by conditioning the latent space with a specific continuous variable. +This is a bit more complex than the vanilla VAE, but I think I will be able to implement it on time because the main components are already there and I just need to add the conditioning part, based on this `paper `_. + +Did I get stuck anywhere +~~~~~~~~~~~~~~~~~~~~~~~~ + +This week I haven't got stuck in any particular issue, because I was mainly focused on training the model and understanding the beta-VAE architecture. + +Until next week! \ No newline at end of file diff --git a/dipy.org/pull/66/_sources/posts/2024/2024_07_22_kaustav_week_8.rst.txt b/dipy.org/pull/66/_sources/posts/2024/2024_07_22_kaustav_week_8.rst.txt new file mode 100644 index 0000000..5283f00 --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2024/2024_07_22_kaustav_week_8.rst.txt @@ -0,0 +1,86 @@ +My Journey Continues: Week 8 Progress with DIPY +=============================================== + +.. post:: July 22, 2024 + :author: Kaustav + :tags: google + :category: gsoc + +Hello everyone! Time for another week of progress. This week has been particularly productive as I tackled several important issues in the dipy.org project and implemented an enhancement suggested by my mentor. Let me walk you through the details of my work. + +Issues in dipy.org +------------------ + +1. Blog Post migration (https://github.com/dipy/dipy.org/issues/11) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +One of the major tasks I undertook this week was migrating blog posts from various old sources to our current website. This task was crucial for consolidating our content and ensuring all valuable information is accessible on our main platform. I worked with three different sources: a Blogspot site dedicated to GSoC 2015, a personal blog, and a Google Docs document. + +The migration process was quite involved. I had to carefully convert each post from its original format to RST (reStructuredText), ensuring that all formatting, links, and images were correctly transferred. In total, I migrated 20 blog posts, each requiring individual attention to preserve its unique content and structure. + +A significant part of this task involved adding and organizing images associated with these posts. I created a new directory structure in our `/_static/images/gsoc/` folder to house these images, making sure they were correctly linked in the new RST files. + +To ensure the quality of the migration, I conducted thorough testing. This involved verifying that all 20 migrated posts rendered correctly on our development server, checking both internal and external links for functionality, and confirming that all images displayed properly in their new locations. + +One challenge I encountered was dealing with complex formatting, particularly in code blocks. These required manual adjustments to ensure they displayed correctly in the RST format. Additionally, I made sure to preserve all author information in the RST metadata, maintaining proper attribution for each post. + +Lastly, I updated the old blog URLs to redirect to their new locations on our site. + +2. Image Organization (https://github.com/dipy/dipy.org/issues/50) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Upon reviewing our image organization, I noticed that it was rather chaotic and in need of improvement. To address this, I proposed and implemented a new organizational structure for our images. The new system organizes images by year and contributor, following this pattern: + +.. code-block:: text + + _static/images/gsoc/[year]/[name_of_contributor] + +It involved moving numerous images from their original locations in the main folder to their new, more specific locations based on the year and contributor. This wasn't just a matter of moving files; each move necessitated updating the corresponding image links in our blog posts and other content. + +The benefits of this new structure are significant. It allows for easier management of our growing image collection, makes it simpler to find specific images when needed, and provides a clear history of contributions over the years. Moreover, it sets a clear standard for future image uploads, ensuring our organization remains consistent moving forward. + +3. GSoC wiki posts migration (https://github.com/dipy/dipy.org/issues/21) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Another important task I tackled this week was migrating our GSoC (Google Summer of Code) and GSOD (Google Season of Docs) announcements from the DIPY wiki to our main blog. This migration is part of our ongoing efforts to centralize our content and make important announcements more accessible to our community. + +The process involved carefully converting the wiki content into blog post format, ensuring that all the information was accurately transferred. I paid special attention to maintaining the original context and importance of these announcements while adapting them to our blog's style and format. + +After creating these new blog posts, I conducted thorough testing to verify that they render correctly on our development server. This included checking the formatting, ensuring all links are functional, and confirming that any associated images or embedded content displays properly. + +This migration will make our GSoC and GSOD announcements more visible and easily accessible to potential participants and the wider DIPY community, potentially increasing engagement with these programs. + +Enhancement +----------- + +1. Adding sidebar to blog page (https://github.com/dipy/dipy.org/pull/53) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +In addition to addressing existing issues, I also worked on an enhancement suggested by my mentor: adding a sidebar to our `blog.html` page. Previously, our blog page lacked a sidebar, which made navigation less intuitive and user-friendly. + +The new sidebar is a significant improvement to our blog's usability. It provides quick access to important navigational elements such as categories, archives, and tags. This addition allows users to easily browse through our blog content, find related posts, or explore posts from specific time periods. + +This enhancement greatly improves the overall user experience of our blog, making it easier for readers to discover and engage with our content. It's a small change that has a big impact on the usability of our site. + +Difficulties & Challenges +------------------------- + +I didn't particularly find anything difficult, the tasks were time consuming but not trouble causing. + +Next Week +--------- + +Looking ahead to next week, I have several objectives in mind: + +1. I plan to continue addressing open issues on the `dipy.org` & `dipy` repository. There are still several tasks that need attention, and I'm eager to keep up the momentum we've built. I have yet to look into which issues I will be tackling next week. + +2. For the preparation of next task, I have to look into Docker. I am planning to learn it through next week to be well prepared for the next task which is about github actions. + +Final Thoughts +-------------- + +The eighth week of the Coding phase has been highly productive, allowing me to address several existing issues and implement a valuable enhancement. It's satisfying to see these improvements take shape and know that they're contributing to a better experience for our users and contributors. + +I continue to be grateful for the guidance and support provided by my mentor, `Serge Koudoro `__, and the entire DIPY community. Their insights and feedback have been instrumental in shaping my work and driving this project forward. + +Stay tuned for more updates as I continue to work on enhancing DIPY! Thank you for reading, and I look forward to sharing more progress next week. \ No newline at end of file diff --git a/dipy.org/pull/66/_sources/posts/2024/2024_07_26_Inigo_week_9.rst.txt b/dipy.org/pull/66/_sources/posts/2024/2024_07_26_Inigo_week_9.rst.txt new file mode 100644 index 0000000..d3038f2 --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2024/2024_07_26_Inigo_week_9.rst.txt @@ -0,0 +1,61 @@ +Week 9 into GSoC 2024: The Conditional VAE implementation +========================================================= + +.. post:: July 26 2024 + :author: Iñigo Tellaetxe + :tags: google + :category: gsoc + +What I did this week +~~~~~~~~~~~~~~~~~~~~ + +This week was a bit shorter than usual because Thursday was a holiday in the Basque Country, and today we had an outdoor activity with my lab mates (we went kayaking to the Urdaibai Biosphere Reserve). Nevertheless, it was full of advances and interesting scientific matters. + +As I mentioned in my last blog post, this week I worked on the implementation of the conditional VAE based on `this implementation `_ . + + +As a refresher, the main idea behind conditional generative models is being able to generate new data with some desired properties or parameters. To achieve this, it is common to organize the latent representation in a way that allows locating the regions in which the desired properties are found. + +For example, imagine our VAE learned a latent representation of images of cats with different fur lengths. If we do not condition our latent space on the fur length, our model might not learn about this distinctive feature found in the data space, and cats with drastically different fur lengths may be closely clustered together in the latent space. But with conditioning, we can tell the model to cluster the images along a "fur-length" dimension, so if we sample 2 images from a line that varies along that dimension but in opposite sides, we get a cat with very short fur, and another one, with very long fur. This results in a generative process that can be tuned on demand! + +However, there are many methods to condition a Variational AutoEncoder, and they usually depend on the type of variable we want to condition on, so the methods for categoric variables (cat vs. dog, bundle_1_fiber vs. bundle_2_fiber, etc.) and continuous ones (age of the person, length of a streamline) are normally not applicable to both types. + +In the case of the FiberCup dataset, I chose to condition the latent space on the length of the streamlines, which is a continuous variable and it is a fairly easy thing to learn from the morphology of the streamlines. + +After implementing the conditional VAE as in the provided reference and training it for 64 epochs (early stopped due to lack of improvement in the MSE) I did not get a decent reconstruction, but the latent space seems to be organized differently compared to the vanilla VAE, which suggests that the conditioning is doing something (good or not, we will see...). + +.. image:: /_static/images/gsoc/2024/inigo/cvae_first_reconstruction_result.png + :alt: First reconstruction of the training data of the conditional VAE (cVAE). + :width: 600 + +On the other hand, note that the FiberCup has 7 distinct bundles, and both latent spaces show (when 2D-projected with the t-SNE algorithm) 7 clusters, suggesting that the network does know about the different bundles. Samples/streamlines are colored according to their length, and even if the bundle to which they belong is not plotted, we know that each cluster is formed by streamlines of different bundles because each bundle has a distinctive length. + +Note that the t-SNE representation may have aligned other dimensions more meaningful to the algorithm that do not necessarily include the conditioning variable (streamline length). Maybe the authors of the code I based mine were lucky to get the alignment, or smart enough to manipulate the conditioning variable (normalization, Z-scoring, etc.) to make t-SNE grab this information and put it along an axis in their plots. + +.. image:: /_static/images/gsoc/2024/inigo/latent_space_comparison_VAE_cVAE_colored_by_streamline_length.png + :alt: t-SNE projections of the latent space (only plausible fibers) of the conditional VAE and the vanilla VAE. + :width: 600 + + +What is coming up next week +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +After discussing with my mentors, we decided to take two steps: + +1. Validate reliably whether the conditioning is working or not. For this, there are two strategies: + - Checking that the predicted conditioning variable matches the input. In other words, measuring the MSE between the "true age" VS the "predicted age", their correlation, and plotting one against the other. This way you know how good the encoder is capturing the conditioning variable variation in the training data. The authors of the paper I based my implementation on do it like this: + + .. image:: /_static/images/gsoc/2024/inigo/conditioning_validation_using_mse.png + :alt: Scatter plot of true vs predicted conditioning variable for validation. + :width: 600 + + - Visual checking fiber generation for specific bundles. Knowing that different bundles have different fiber lengths, we try to generate fibers of specific length, and see whether the generated fibers belong to the desired bundle (no matter if they are plausible or implausible). Having length as the conditioning variable allows us to perform this trick, what would not be so intuitive to check if we had used Fractional Anisotropy or other DTI-derived metrics, as these are not visually as intuitive as length. + +2. To try out an adversarial framework, which is 1) easier to implement 2) easier to understand, and 3) likely to also work (we'll see if better or not). The idea is to have a discriminator that tries to predict the conditioning variable from the latent space, and the encoder tries to fool the discriminator. This way, the encoder learns to encode the conditioning variable in the latent space, and the discriminator learns to predict it. This is a very common approach in GANs, and it is called "Conditional GAN" (cGAN). As a result, we would have what I would call a conditional adversarial VAE (CA-VAE). You can read more about adversarial VAEs `in this work `_ or `in this one `_. + +Did I get stuck anywhere +~~~~~~~~~~~~~~~~~~~~~~~~ + +Luckily this week I also did not get stuck in any problem, but I am a bit worried about the quality of the reconstructions of the cVAE. I hope that the adversarial framework will help with this. + +Until next week! \ No newline at end of file diff --git a/dipy.org/pull/66/_sources/posts/2024/2024_07_29_kaustav_week_9.rst.txt b/dipy.org/pull/66/_sources/posts/2024/2024_07_29_kaustav_week_9.rst.txt new file mode 100644 index 0000000..bdcc9d8 --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2024/2024_07_29_kaustav_week_9.rst.txt @@ -0,0 +1,65 @@ +My Journey Continues: Week 9 Progress with DIPY +=============================================== + +.. post:: July 29, 2024 + :author: Kaustav + :tags: google + :category: gsoc + +Hello everyone! It’s time for another update on my progress. Week 9 has been a blend of learning, preparation, and a bit of personal work as I continue my journey with the dipy.org project. This week, I focused on diving into Docker, an essential tool for the upcoming tasks in our project. Let me take you through what I accomplished and learned over the past few days. + +Learning Docker +--------------- + +One of the key goals for this week was to get up to speed with Docker, as it will play a significant role in the next phase of our project. I’ve heard a lot about Docker’s versatility and how it simplifies the process of managing development environments, so I was eager to dive in. + +**Getting Started with Docker** + +I started my Docker journey by exploring the official `Docker documentation `_, which provided a solid foundation. The tutorials and examples there were incredibly helpful in understanding the basics of Docker, including concepts like containers, images, Dockerfiles, and volumes. The step-by-step guide walked me through setting up my first container, which was a satisfying experience. + +**Trial and Error** + +While the documentation gave me a good start, much of my learning came through trial and error. I experimented with creating and running containers, exploring how to build custom Docker images, and understanding how to manage multiple containers simultaneously. + +I also explored some video tutorials on YouTube. + +1. `Docker Tutorial for Beginners `_ +2. `Learn Docker in 1 Hour | Full Docker Course for Beginners `_ + +For reference, I was pointed to look at the pyAFQ docker as well. (https://github.com/yeatmanlab/pyAFQ) + +**Docker in the Context of DIPY** + +The reason for learning Docker was for creation of DIPY docker image that can be utilized easily for running DIPY and doing tests. + +PhD Applications +---------------- + +On the personal front, this week also marked the beginning of my PhD application process. Balancing the demands of my work with DIPY and the preparation required for my applications has been challenging, but I’m hanging on. I’ve started drafting my statement of purpose and gathering the necessary documents. + +The process of applying for a PhD is quite intense. I’m excited about this next step in my academic journey, but it does mean I’m a bit busier than usual. Nevertheless, I’m committed to continuing my work with DIPY and ensuring that I stay on track with my GSoC project. + +Difficulties & Challenges +------------------------- + +This week’s challenges were more about balancing my time and managing multiple learning curves. Docker, while immensely powerful, has a steep learning curve, especially when you’re new to containerization. However, I found that breaking down the learning process into smaller, manageable tasks helped me make steady progress. + +The other challenge, of course, was balancing my PhD application work with my commitments to DIPY. It’s a delicate balance, but I’m confident that with careful time management, I’ll be able to handle both effectively. + +Next Week +--------- + +Looking ahead to next week, I plan to: + +1. **Practice with Docker and Create a DIPY Docker Image**: I’ll continue refining my Docker skills and aim to create a Docker image specifically for DIPY. This will involve ensuring all dependencies are correctly configured and the image can be used effectively within our development workflow. + +2. **Fix Tutorials for the Keyword Argument PR**: In addition to working with Docker, I’ll also be fixing the tutorials related to the keyword argument PR that I implemented earlier. This will involve updating the documentation and ensuring the tutorials reflect the latest changes in the codebase. + +Final Thoughts +-------------- + +Week 9 has been a productive blend of learning and preparation, setting the stage for some exciting developments in the coming weeks. I’m particularly looking forward to seeing how Docker can streamline our workflows in DIPY, and I’m equally excited about the progress I’m making on my PhD applications. + +As always, I’m grateful for the support and guidance from my mentor, `Serge Koudoro `__, and the DIPY community. Their feedback and encouragement continue to be invaluable as I navigate both the technical challenges of this project and the personal milestones in my academic journey. + +Stay tuned for more updates as I continue to dive deeper into Docker and tackle the next set of tasks in DIPY! Thank you for reading, and I look forward to sharing more progress next week. diff --git a/dipy.org/pull/66/_sources/posts/2024/2024_08_02_Inigo_week_10.rst.txt b/dipy.org/pull/66/_sources/posts/2024/2024_08_02_Inigo_week_10.rst.txt new file mode 100644 index 0000000..c5ff299 --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2024/2024_08_02_Inigo_week_10.rst.txt @@ -0,0 +1,65 @@ +Week 10 into GSoC 2024: Validating the conditional VAE results +============================================================== + +.. post:: August 02 2024 + :author: Iñigo Tellaetxe + :tags: google + :category: gsoc + +What I did this week +~~~~~~~~~~~~~~~~~~~~ + +During this week I focused on validating the results of the conditional VAE (cVAE) that I implemented and experimented with last week. + +I further trained the model and got a result with lower label loss, indicating that theoretically the model was capturing the attribute variability (streamline length, in this case) in the training data. Our model was able to "understand" how to pick up the length of the streamlines only from their morphology. + +As I mentioned last week, plotting the predicted length against the true length is a good way to validate that the model is indeed doing this correctly. In addition, I plotted the 2-dimensional t-SNE projection of latent space of the model. Knowing that each bundle of the FiberCup dataset has a characteristic and rather constant streamline length, the bundles should be clustered together. See the figure below: + +.. image:: /_static/images/gsoc/2024/inigo/vae_conditioning_validation.png + :alt: (Left) Predicted length vs True length of streamlines in the training data of the cVAE; (Right) Latent space t-SNE projection of the plausible fibers in the training dataset of the cVAE. + :width: 600 + +We can see that yes, the streamlines are grouped into "worm" clusters. Again, there are 7 bundles and 7 worms, and each worm has a constant length, indicating that yes, each bundle is represented by a worm. + +Now that we have validated that the model is capturing the attribute variability, we can move on to the next step: generating fibers of specific lengths and checking if they belong to the desired bundle. This is a more intuitive way to validate the model, as we can visually check if the model is generating fibers of the desired length and bundle. + +To do this, I generated streamlines spanning the whole length range of the dataset and checked if they belonged to the correct bundle, but the results were not great, because the morphology of the generated streamlines was the same every time, regardless of the indicated length. To check the results better, I generated 300 streamlines of the minimum and maximum lengths found in the training dataset, but the morphology problem persisted. In the figure below you can see that the shorter streamlines (length = 30 units) are morphologically very similar to the ones in the right (length = 300 units). + +.. image:: /_static/images/gsoc/2024/inigo/streamlines_short_long.png + :alt: Bundles of generated streamlines with length 30 (left) and 300 (right). + :width: 600 + +In our weekly meeting we discussed this problem and we argued that this could be due to the architecture of the model, which is trying to predict a whole latent vector ``z`` from a single number ``r``, the attribute to be conditioned on. Find below a diagram of the model architecture for clarification: + +.. image:: /_static/images/gsoc/2024/inigo/conditional_vae_architecture_diagram.png + :alt: Architecture of the implemented conditional VAE. + :width: 600 + +As you see in the diagram, the ``D3`` and ``D4`` blocks of the model are trying to predict the attribute prior represented by the ``r`` variable. The label loss is defined as the mean squared error between this ``r`` and the true attribute of the data (streamline length in this case), so when ``D3`` and ``D4`` pick up the attribute, the model should be able to generate streamlines of the desired length, which is what actually happens. + +However, the generator (yellow block) tries to generate two parameters that are compared to the ones in the green block. This means that when we try to generate a streamline of a specific length by running a specific ``r`` value through the generator and then through the decoder, the model is only able to generate a fixed morphology, regardless of the length. This is because the generator is trying to generate a whole latent vector ``z`` from a single number ``r``. + +After a thorough discussion, we decided to try a non-variational adversarial framework to solve this problem due to the following reasons: + +- The adversarial nature of the architecture implicitly introduces a prior to the data, so regularization with variational methods is not necessary, so the architecture and the loss computation of the model is simpler. + +- It is easier to understand, because the original authors of the implemented conditional VAE did not provide a clear derivation of the loss function, so my understanding of its underlying mechanisms is not as deep as I would need to tune its behavior effectively. All in all, the adversarial framework is way more intuitive (at least for me). + +- It is widespread and there are many resources out there to understand it and implement it. What is more, I quickly found several implementations of adversarial AutoEncoders in TensorFlow with a basic search in Google. I need to read through them and filter which one is the best for me. + +- For sure there are ways to condition the network on categorical and continuous variables, which would be really convenient to condition both on the bundle and the attribute of the data. Currently it is not possible with the cVAE implementation, as it only conditions on the attribute. This would provide greater control when sampling from the latent space. + +What is coming up next week +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Having said this, this week I will focus on learning about adversarial AutoEncoders and implementing one. I will also investigate how to introduce the conditioning on the adversarial framework, and how to condition on both categorical and continuous variables. + +For now I found that the continuous attribute can be conditioned `like in this work `_ that I mentioned in my GSoC application, and the categorical one like in the `original adversarial AutoEncoders paper `_. + + +Did I get stuck anywhere +~~~~~~~~~~~~~~~~~~~~~~~~ + +As I said in a past blog post, research work may not be as straightforward as one would like, but I also would not say that I got stuck. I am just facing some challenges and this is a journey to complete surely-but-steady, because science does not need "fast food", solutions, but "slow cooked", well-thought, and well-tested ones. + +Thank you for reading, until next week! \ No newline at end of file diff --git a/dipy.org/pull/66/_sources/posts/2024/2024_08_05_kaustav_week_10.rst.txt b/dipy.org/pull/66/_sources/posts/2024/2024_08_05_kaustav_week_10.rst.txt new file mode 100644 index 0000000..69590b8 --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2024/2024_08_05_kaustav_week_10.rst.txt @@ -0,0 +1,41 @@ +My Journey Continues: Week 10 Progress with DIPY +================================================ + +.. post:: August 5, 2024 + :author: Kaustav + :tags: google + :category: gsoc + +Hello everyone! Week 10 has been a challenging one, with a lot happening both in my personal life and with the DIPY project. Unfortunately, I wasn’t able to make as much progress as I had hoped, but I still managed to get some important work done. Let me walk you through what I accomplished this week. + +Fixing Tutorials for the Keyword Argument PR +-------------------------------------------- + +This week, my main focus was on fixing the tutorials related to the keyword argument PR that I had worked on earlier. Ensuring that all tutorials are up-to-date and accurately reflect the changes in our codebase is crucial for maintaining the quality and usability of our documentation. + +I spent time reviewing the tutorials, identifying areas where the new keyword arguments were relevant, and updating the examples accordingly. + +Keyword PR: https://github.com/dipy/dipy/pull/3306 + +Busy Week: PhD Applications and Moving Houses +--------------------------------------------- + +On the personal front, this week has been incredibly hectic. I’ve been deep into the process of applying for PhD programs, which has taken up a significant amount of my time and energy. Drafting statements, gathering documents, and try to get the letter of recommendations have all been time-consuming, but I’m making good progress on my applications. + +Additionally, I had to move houses this week, which added another layer of complexity to an already busy schedule. The logistics of packing, moving, and settling into a new place were more exhausting than I anticipated, leaving me with less time and focus for my work on DIPY. + +Next Week +--------- + +Looking ahead to next week, I plan to: + +1. **Resume Docker Practice and Create a DIPY Docker Image**: With the move and PhD applications mostly behind me, I’m aiming to get back on track with Docker. My goal is to continue practicing with Docker and work on creating a Docker image specifically for DIPY, ensuring all dependencies are correctly configured for our development workflow. + +Final Thoughts +-------------- + +Week 10 has been a slower week in terms of progress, but sometimes life throws unexpected challenges our way. Despite the lower productivity, I’m glad I was able to keep moving forward, even if at a slower pace. I’m looking forward to getting back into a more productive rhythm next week as I continue my work on DIPY. + +As always, I appreciate the support and understanding of my mentor, `Serge Koudoro `__, and the DIPY community. Their encouragement is invaluable, especially during weeks like this one. + +Thank you for reading, and I look forward to sharing more updates next week as I pick up the pace again! diff --git a/dipy.org/pull/66/_sources/posts/2024/2024_08_09_Inigo_week_11.rst.txt b/dipy.org/pull/66/_sources/posts/2024/2024_08_09_Inigo_week_11.rst.txt new file mode 100644 index 0000000..ef51dcc --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2024/2024_08_09_Inigo_week_11.rst.txt @@ -0,0 +1,44 @@ +Week 11 into GSoC 2024: The Adversarial AutoEncoder +=================================================== + +.. post:: August 09 2024 + :author: Iñigo Tellaetxe + :tags: google + :category: gsoc + +What I did this week +~~~~~~~~~~~~~~~~~~~~ + +This week was all about learning about adversarial networks, `attribute-based latent space regularization in AutoEncoders `_, and fighting with Keras and TensorFlow to implement the adversarial framework. It was a bit (or two) challenging, but I managed to do it, thanks to a `very nice and clean implementation `_ I found, based on the original `adversarial AutoEncoders paper `_. + +I still did not implement the attribute-based regularization (AR), but once I train the adversarial AutoEncoder (AAE from now on), visualize its latent space, and check that I can generate samples from a specific class (bundles in our case), I will implement it. Hopefully, all this will go smoothly. For now, I succeeded instantiating the model without any errors, and next week I will train it. + +Anyways, in the figure below you can see the architecture I proposed for the AAE, which should allow conditioning the data generation process on a categorical variable and a continuous attribute: + +.. image:: /_static/images/gsoc/2024/inigo/adversarial_ae_with_abr.png + :alt: Diagram of the architecture proposed to allow conditioning on categoric and continuous variables. + :width: 600 + +Let's break down how the AAE works. For those not familiar with how generative adversarial networks (GANs) work, the idea is to have two networks, a generator and a discriminator, that play a game. The generator tries to generate samples that look like the real data (e.g.: pictures of animals), while the discriminator tries to distinguish between real and generated samples. The generator is trained to fool the discriminator, and the discriminator is trained to not be fooled. This way, the generator learns to generate samples that look like the real data (e.g.: real pictures of animals). The adversarial loss (:math:`\mathcal{L}_{adv}`) is computed as it is shown in the lowest rectangle. + +In our case, the generator is the encoder :math:`\mathcal{G}`, which generates a latent representation of the input data, which the discriminator :math:`\mathcal{D}` tries to distinguish from "real" latent representations, sampled from a given prior distribution. The trick to condition the model on a categorical variable (e.g.: the kind of animal to which the photo belongs to) is to concatenate the latent representation generated by the encoder :math:`\mathcal{G}` with the one-hot encoded animal type class. This way, the decoder :math:`\mathcal{D}` can generate samples conditioned on a categorical variable. The reconstruction loss (:math:`\mathcal{L}_{MSE}`) is computed as it is shown in the middle rectangle, and it ensures that the samples are reconstructed from the latent representation as close as possible to the original data. + +As for the AR, we try to tie a continuous attribute of choice found in the data space (e.g.:fur length) to a specific dimension of the latent space. To do this, we compute an attribute-distance matrix in the data space :math:`D_a`, and we compute a distance matrix from the chosen dimension of the latent space (:math:`D_r`). By minimizing the mean absolute error (MAE) between the two matrices, we force the latent space to be organized in such a way that the chosen dimension is related to the chosen attribute. This way, we can generate samples conditioned on the attribute of choice, e.g.: we can generate a specific category (cat) with a specific attribute (fur length). The AR loss (:math:`\mathcal{L}_{AR}`) is computed as it is shown in the top rectangle. Coming back to the real domain of our problem, the categorical variable would be the bundle to which the fiber belongs, and the continuous attribute would be the streamline length, and in the future this last one would be the age of the tractogram. + +Lastly, I also started writing my last post for GSoC 2024, which will be a summary of the project, the results, and the future work. I will open a draft PR for continuing my work outside of the coding period because I want to keep working on this project as it is a very interesting topic in line with my PhD research. + +What is coming up next week +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Next week I will: + +- Train the AAE. I will probably need to do this in a computing cluster, as my mighty laptop is not powerful enough to train the model in a reasonable time. +- Continue writing the final GSoC2024 post. +- Open the draft PR to include in the final post and have a tangible place to publish my work as a PR. + +Did I get stuck anywhere +~~~~~~~~~~~~~~~~~~~~~~~~ + +This week I fought a lot with Keras and TensorFlow but as I had gained experience from previous "fights" I managed to not get really stuck, so I am happy to say that I also won this time! + +Until next week! \ No newline at end of file diff --git a/dipy.org/pull/66/_sources/posts/2024/2024_08_12_kaustav_week_11.rst.txt b/dipy.org/pull/66/_sources/posts/2024/2024_08_12_kaustav_week_11.rst.txt new file mode 100644 index 0000000..f4709b4 --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2024/2024_08_12_kaustav_week_11.rst.txt @@ -0,0 +1,50 @@ +My Journey Continues: Week 11 Progress with DIPY +================================================ + +.. post:: August 12, 2024 + :author: Kaustav + :tags: google + :category: gsoc + +Hello everyone! Week 11 has been another week of progress, although it came with its own set of challenges. I’ve been working on Docker, making updates to the tutorial fixes, and I also have some exciting news on the personal front. Let me take you through the highlights of this week. + +Creating a Docker Image for DIPY +-------------------------------- + +This week, I focused on creating a Docker image for DIPY. However, I ran into some issues related to the `FURY` and `VTK` packages due to version incompatibilities. These packages are crucial for the visualization components of DIPY, and ensuring they work correctly within the Docker environment has been a bit tricky. + +The version conflicts have caused some build failures, and I’ve been troubleshooting the issue by testing different configurations and dependencies. While I haven’t completely resolved the issue yet, I’m hopeful that I can find a solution soon. I’ll continue to work on this in the coming days, as getting this Docker image ready is important for our continuous integration processes. + +.. image:: /_static/images/gsoc/2024/kaustav/docker_issue_fury.png + :alt: Docker FURY VTK issue + :width: 800 + +Tutorial Fixes PR Update +------------------------ + +In addition to Docker, I’ve been updating the PR for the tutorial fixes related to the keyword argument changes I made earlier. I’m happy to report that I’m about 60% done with the updates. The tutorials are shaping up well, and the changes are making them more robust and easier to follow. + +I plan to complete the remaining 40% by today or tomorrow, ensuring that all tutorials are fully updated. + +PhD Application Submitted +-------------------------- + +On a personal note, I successfully submitted my PhD application this week! This has been a significant milestone for me, and I’m thrilled to have it completed. Applying for a PhD is a rigorous process, and I’ve put a lot of effort into crafting the application. Now that it’s submitted, I’m hopeful for the best outcome. + +If I do get accepted, it would be a dream come true. I’m particularly excited about the prospect of learning from and working with leaders in the field, such as `Dr. Eleftherios Garyfallidis `__, whose work I greatly admire. The opportunity to contribute to and learn from such a program would be invaluable, and I’m eagerly looking forward to the possibility. + +Next Week +--------- + +Looking ahead to next week, my priorities will be: + +1. **Fixing the Docker Image Issues**: I’ll continue troubleshooting the `FURY` and `VTK` package issues within the Docker environment, aiming to get a working Docker image for DIPY. + +2. **Completing the Tutorial Fixes**: I plan to finish the remaining 40% of the tutorial updates, ensuring that the PR is ready for review and merging. + +Final Thoughts +-------------- + +Week 11 has been a productive week, despite some challenges with Docker. I’m pleased with the progress I’ve made on the tutorials and excited about the future, both with DIPY and potentially with my PhD journey. As always, I’m grateful for the support from my mentor, `Serge Koudoro `__, and the DIPY community, who continue to provide invaluable guidance. + +Thank you for reading, and I look forward to sharing more updates next week as I continue working on these tasks! diff --git a/dipy.org/pull/66/_sources/posts/2024/2024_08_19_kaustav_week_12.rst.txt b/dipy.org/pull/66/_sources/posts/2024/2024_08_19_kaustav_week_12.rst.txt new file mode 100644 index 0000000..771015c --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2024/2024_08_19_kaustav_week_12.rst.txt @@ -0,0 +1,78 @@ +My Journey Continues: Week 12 Progress with DIPY +================================================ + +.. post:: August 19, 2024 + :author: Kaustav + :tags: google + :category: gsoc + +Hello everyone! We've reached Week 12, the final week of my GSoC journey with DIPY. It's been an incredible experience, and I'm excited to share my progress and reflections from this week. + +Dockerfile Creation and Deployment +---------------------------------- + +This week, I focused on creating a Dockerfile for DIPY, along with build and push shell scripts. This task was crucial for improving our continuous integration processes and making DIPY more accessible to users who prefer containerized environments. + +The Dockerfile creation process involved carefully selecting the right base image and ensuring all necessary dependencies were included. The build script automates the Docker image creation process, while the push script facilitates easy deployment to our container registry. + +While working on this, I encountered some challenges related to package versions and compatibility. However, through careful testing and configuration adjustments, I was able to resolve these issues and create a functional Dockerfile. + +You can view the progress of this work here: https://github.com/dipy/dipy/pull/3322 + +Tutorial Fixes Progress +----------------------- + +I've made significant progress on fixing up the tutorials. This ongoing task involves updating and improving various DIPY tutorials to ensure they're up-to-date, clear, and fully functional. I've been working through the tutorials systematically, making necessary adjustments and improvements. + +Check out the tutorial fixes PR here: https://github.com/dipy/dipy/pull/3306 + +Addressing Missed Keyword Arguments +----------------------------------- + +In a separate PR, I addressed an important issue related to missed keyword arguments in several function calls. These weren't updated during the initial implementation of keyword arguments. The affected functions include: + +* ``get_fname`` +* ``get_sphere`` +* ``gradient_table`` + +This PR was crucial as these missed keyword arguments were not detected during the initial testing phase, potentially leading to issues in related functionalities. By adding the necessary keyword arguments to these function calls, we've ensured proper execution and improved the overall robustness of our codebase. + +You can review this PR here: https://github.com/dipy/dipy/pull/3324 + +Preparing for Project Conclusion +-------------------------------- + +As this is the final week of GSoC, I've been focusing on wrapping up my project: + +1. **Final Project Report**: I've started preparing the final project report, which will summarize all the work done during my GSoC period, the challenges faced, and the solutions implemented. + +2. **Final Presentation**: I'm also working on a presentation that will showcase the key achievements and learnings from my GSoC experience with DIPY. + +Reflections and Gratitude +------------------------- + +As I near the end of this incredible journey, I'm filled with gratitude for the DIPY community, my amazing mentor `Serge Koudoro `_, and everyone who has supported me throughout this process. The guidance and support I've received have been invaluable, helping me grow as a developer and contributor to open-source software. + +I am really grateful to my fellow GSoC teammates: + +* `Iñigo Tellaetxe Elorriaga `_ +* `Robin Roy `_ +* `Wachiou BOURAIMA `_ + +Their amazing support has been crucial throughout this journey. + +This experience has not only improved my technical skills but also taught me the importance of community collaboration in open-source projects. I'm truly thankful for the opportunity to have been part of this amazing team. + +Next Steps +---------- + +In the coming days, my priorities will be: + +1. Finalizing and submitting the project report + +Final Thoughts +-------------- + +As I reflect on my GSoC journey with DIPY, I'm amazed at how much I've learned and grown. This experience has reinforced my passion for neuroimaging and open-source development. I'm excited to see how my contributions will help future DIPY users and developers. + +Thank you all for your support throughout this journey. I look forward to sharing my final presentation and continuing to be part of the DIPY community! \ No newline at end of file diff --git a/dipy.org/pull/66/_sources/posts/2024/2024_08_21_kaustav_final_report.rst.txt b/dipy.org/pull/66/_sources/posts/2024/2024_08_21_kaustav_final_report.rst.txt new file mode 100644 index 0000000..9d0a376 --- /dev/null +++ b/dipy.org/pull/66/_sources/posts/2024/2024_08_21_kaustav_final_report.rst.txt @@ -0,0 +1,464 @@ +.. role:: custom-title + +.. |main-title| raw:: html + + Google Summer of Code Final Work Product + +.. |project-abstract-title| raw:: html + + Project Abstract + +.. |proposed-objectives-title| raw:: html + + Proposed Objectives + +.. |objectives-completed-title| raw:: html + + Objectives Completed + +.. |objectives-progress-title| raw:: html + + Objectives in Progress + +.. |future-work-title| raw:: html + + Future Works + +.. |weekly-title| raw:: html + + GSoC Weekly + +.. |timeline-title| raw:: html + + Timeline + +.. image:: /_static/images/logos/gsoc-logo.png + :height: 40 + :target: https://summerofcode.withgoogle.com/programs/2024/projects/dHajBmW3 + :class: no-background + +.. image:: /_static/images/logos/python-logo.png + :height: 40 + :target: https://summerofcode.withgoogle.com/programs/2024/organizations/python-software-foundation + :class: no-background + +.. image:: /_static/images/logos/dipy-logo-2.png + :height: 30 + :target: http://dipy.org + :class: no-background + +.. raw:: html + +
+ +|main-title| +============ + +.. post:: August 21 2024 + :author: Kaustav + :tags: google + :category: gsoc + +- **Name:** `Kaustav Deka `_ +- **Organization:** `Python Software Foundation `_ +- **Sub-Organization:** `DIPY `_ +- **Project:** `Modernize DIPY Codebase `_ + +|project-abstract-title| +------------------------ + +This project is dedicated to implementing crucial improvements to DIPY, with the primary goal of enhancing its functionality, efficiency, and user experience through a series of targeted updates and additions. + +This initiative comprehensively covers several critical development areas, including transitioning to keyword-only arguments to boost code robustness and readability, and implementing lazy loading to optimize resource management and overall performance. + +The project also aims to improve and simplify the current website management system, enhancing user interaction and information accessibility. Furthermore, it focuses on streamlining the Issues and Pull Requests triage process, incorporating automation to increase efficiency. The integration of multiple GitHub Actions is planned to further streamline workflows. Additionally, the project involves refactoring select DIPY packages and improving docstring documentation, ensuring clearer and more maintainable code. Lastly, new tutorials will be added to support user education and engagement, making DIPY more accessible to both new and experienced users. + +|proposed-objectives-title| +--------------------------- + +The objectives include: + +1. **Transitioning to Keyword-Only Arguments**: Restructuring function calls to utilize keyword-only arguments for improved code clarity and robustness. + +.. raw:: html + +
+ +2. **Lazy Loading Integration**: Implementing lazy loading techniques to load resources dynamically, thereby optimizing performance and reducing memory footprint. + +.. raw:: html + +
+ +3. **Improvement and Simplification of Website Management**: Enhancing the management of the DIPY website to streamline content updates and provide a more user-friendly experience for visitors. + +.. raw:: html + +
+ +4. **Integration of Multiple GitHub Actions**: Implementing a suite of GitHub Actions to simplify development workflows, automate repetitive tasks, and enhance collaboration among contributors. + +.. raw:: html + +
+ +5. **Improvement of Issues and Pull Requests Triage + Triage Automation**: Enhancing the process of managing issues and pull requests by implementing efficient triage practices and automation tools to streamline the review and resolution process. + +.. raw:: html + +
+ +6. **Refactoring of DIPY Packages + Improvement of Docstrings**: Conducting targeted refactoring of DIPY packages to improve code structure, readability, and maintainability. Additionally, enhancing docstrings to provide comprehensive documentation for developers and users. + +.. raw:: html + +
+ +7. **Addition of Tutorials**: Creating new tutorials to guide users through various aspects of DIPY functionality, ensuring accessibility for users of all experience levels and facilitating learning and adoption. + +.. raw:: html + +
+ +|objectives-completed-title| +---------------------------- + +1. Keyword-Only Arguments +------------------------- + +This task focused on transitioning DIPY's codebase to use keyword-only arguments in function calls. Keyword arguments are a powerful feature in Python that allow for more explicit and self-documenting function calls. They offer several advantages: + +i. **Improved readability:** By using keyword arguments, it's immediately clear what each argument represents, making the code easier to understand. +ii. **Reduced errors:** Keyword arguments reduce the risk of passing arguments in the wrong order. +iii. **Better API design:** They allow for more flexible function signatures and make it easier to add optional parameters without breaking existing code. + +In the future, we want all function calls in DIPY to use keyword arguments instead of positional arguments. To facilitate this transition, I created a decorator that provides warnings when functions are called with positional arguments instead of keyword arguments. + +Here's how the decorator works: + +.. code-block:: python + + @warning_for_keyword() + def function(a, b, *, c=100): + pass + + variable = function(a, b, 50) # This will trigger a warning + +In this example, the decorator will issue a warning saying that ``c`` is being called as a positional argument when it should be a keyword argument. + +For the transition phase, we're providing warnings instead of completely blocking the function execution. This approach allows for a smoother migration, giving developers time to update their code while still maintaining functionality. + +The implementation of this decorator was a significant part of the project. You can find the full pull request for the decorator here: `PR #3239 `_ + +After creating the decorator, we applied it systematically across various DIPY modules. This extensive work touched many parts of the codebase: + +- **align** `PR #3249 `_: Applied to alignment algorithms. +- **core** `PR #3251 `_: Updated core functionality of DIPY. +- **data** `PR #3253 `_: Modified data handling functions. +- **denoise** `PR #3252 `_: Updated denoising algorithms. +- **io** `PR #3255 `_: Applied to input/output operations. +- **nn** `PR #3256 `_: Modified neural network related functions. +- **segment** `PR #3258 `_: Updated segmentation algorithms. +- **sims** `PR #3259 `_: Applied to simulation functions. +- **tracking** `PR #3260 `_: Modified tracking algorithms. +- **utils** `PR #3261 `_: Updated utility functions. +- **viz** `PR #3262 `_: Applied to visualization functions. +- **workflows** `PR #3263 `_: Modified workflow-related functions. + +This comprehensive application of the decorator across DIPY's modules represents a significant step towards modernizing the codebase. It not only improves the immediate readability and robustness of the code but also sets the stage for future enhancements and maintains consistency across the project. + +The warnings generated by this decorator will help both DIPY developers and users transition to using keyword arguments, ultimately leading to more maintainable and error-resistant code. As the community adapts to this change, we expect to see a gradual reduction in the use of positional arguments, paving the way for potentially making keyword arguments mandatory in future releases. + +2. Website Improvements +----------------------- + +Several tasks were undertaken to improve the DIPY website, enhancing its functionality, organization, and user experience. These improvements are crucial for maintaining an effective online presence and ensuring that our community has easy access to all relevant information. + +a. Blog Post Migration +^^^^^^^^^^^^^^^^^^^^^^ + +One of the major tasks was the migration of blog posts from various old sources to our current website. This consolidation effort ensures that all valuable information is accessible on our main platform. The migration involved working with three different sources: + +- A Blogspot site dedicated to GSoC 2015 +- A personal blog +- A Google Docs document + +This task was crucial for several reasons: + +- It centralizes our content, making it easier for users to locate information. +- It ensures that valuable historical content is not lost. +- It provides a unified platform for all DIPY-related blog posts. + +For more details, see the related issue: `Issue #11 `_ + +b. Image Organization Improvement +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Upon reviewing our image organization, it became apparent that the existing system was chaotic and in need of improvement. To address this, a new organizational structure for our images was proposed and implemented. The new system organizes images by year and contributor, which offers several benefits: + +- Easier navigation and retrieval of images +- Better tracking of image contributions over time +- Improved ability to manage and update image assets + +This reorganization sets the stage for more efficient image management in the future. + +For more information, refer to: `Issue #50 `_ + +c. DIPY Wiki Migration +^^^^^^^^^^^^^^^^^^^^^^ + +Another significant task was migrating our GSoC (Google Summer of Code) and GSOD (Google Season of Docs) announcements from the DIPY wiki to our main blog. This migration is part of our ongoing efforts to centralize content and make important announcements more accessible to our community. Benefits of this migration include: + +- Increased visibility of GSoC and GSOD announcements +- Improved searchability of this content +- Consistency in how we present important project information + +You can find more details in: `Issue #21 `_ + +d. Adding Sidebars to Blog Page +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +An enhancement suggested by the mentor was implemented: adding a sidebar to our ``blog.html`` page. Previously, the blog page lacked a sidebar, which made navigation less intuitive and user-friendly. The new sidebar significantly improves the blog's usability by providing: + +- Quick access to important navigational elements +- Categories for easy content classification +- Archives for exploring historical posts +- Tags for topic-based navigation + +These additions allow users to easily browse through our blog content, find related posts, or explore posts from specific time periods. The sidebar enhances the overall user experience and makes our blog more engaging and accessible. + +The pull request for this feature can be found here: `PR #53 `_ + +Conclusion +^^^^^^^^^^ + +These website improvements represent a significant step forward in enhancing DIPY's online presence. By consolidating content, improving organization, and enhancing navigation, we've made it easier for our community to engage with DIPY's resources and stay informed about project developments. These changes lay a solid foundation for future improvements and will contribute to the growth and accessibility of the DIPY project. + +|objectives-progress-title| +--------------------------- + +1. Keyword-Only Arguments +------------------------- + +Implementation of keyword-only arguments is still in progress for two modules: + +a. Direction module (`PR #3254 `_) +b. Reconst module (`PR #3257 `_) + +Next Steps +^^^^^^^^^^ + +- Conduct testing and address feedback +- Merge changes once approved + +2. Lazy Loading +--------------- + +Lazy loading is a crucial optimization technique that has been implemented in DIPY to enhance resource management and improve performance, particularly for large-scale neuroimaging applications. By delaying the loading of modules until they are actually needed, we achieve a more efficient and responsive system. + +Implementation Details +^^^^^^^^^^^^^^^^^^^^^^ + +- **Package Used:** The ``lazy_loader`` package has been employed to implement lazy loading functionality in DIPY, allowing for on-demand module loading. +- **Inspiration:** This approach draws inspiration from the scikit-image library, which has effectively utilized lazy loading to optimize its performance and reduce memory usage. + +The core concept involves creating a stub file, such as `__init__.pyi`, where all functions from the module are imported. This stub file is then imported into the main initialization file, `__init__.py`, enabling lazy loading for the specified submodules. + +Here’s an outline of how it works: + +.. code-block:: python + :caption: __init__.pyi + + submodules = [ + "align", + "core", + "data", + "denoise", + "direction", + "io", + "nn", + "reconst", + "segment", + "sims", + "stats", + "tracking", + "utils", + "viz", + "workflows", + "tests", + "testing", + ] + + __all__ = submodules + +In the main initialization file, lazy loading is attached to the module using the following setup: + +.. code-block:: python + :caption: __init__.py + + __getattr__, __lazy_dir__, _ = lazy.attach_stub(__name__, __file__) + +This structure ensures that each submodule is only loaded when it is explicitly accessed, thereby optimizing both memory usage and load times. + +Benefits of Lazy Loading +^^^^^^^^^^^^^^^^^^^^^^^^ + +- **Reduced Initial Load Time:** By loading modules and objects only when they are needed, the initial load time of DIPY is significantly decreased, leading to a faster startup. +- **Memory Optimization:** On-demand loading minimizes unnecessary memory usage, which is particularly advantageous when working with large datasets in neuroimaging. +- **Improved Performance:** Lazy loading can lead to better overall performance, especially in scenarios where only specific subsets of DIPY’s functionality are used. +- **Enhanced Modularity:** This approach encourages a more modular code structure, improving maintainability and simplifying the addition of new features in the future. + +Current Status +^^^^^^^^^^^^^^ + +- The implementation of lazy loading across all planned modules has been completed. +- The changes are currently under review by the DIPY team, with final input and feedback pending. +- The updated modules are expected to be merged into the main codebase shortly after the review process is complete. + +Next Steps +^^^^^^^^^^ + +- **Incorporate Feedback:** Address any feedback or suggestions provided by the DIPY team during the review. +- **Final Testing:** Conduct thorough testing to ensure that all lazy-loaded components function as expected under various conditions. +- **Documentation Update:** Update the project documentation to reflect the new lazy loading behavior, providing clear guidance on how it impacts module usage. +- **Merge and Monitor:** Once approved, merge the changes into the main DIPY codebase and monitor initial user feedback and performance metrics post-deployment. + +This implementation of lazy loading represents a significant step forward in optimizing DIPY's performance and usability, particularly in resource-intensive environments. + +3. Docker Integration +--------------------- + +Current Status +^^^^^^^^^^^^^^ + +The Docker integration for DIPY is currently under development. A pull request has been submitted to introduce Docker support, aimed at enhancing the consistency and ease of development, testing, and deployment across different environments: + +- **Pull Request:** `PR #3322 `_ + +To achieve a minimal yet functional configuration, I created the following Dockerfile: + +.. code-block:: dockerfile + :caption: Dockerfile + + FROM python:3.10 + + RUN apt-get update && apt-get install -y --no-install-recommends \ + libhdf5-dev \ + gcc \ + && rm -rf /var/lib/apt/lists/* \ + && apt-get clean + + RUN pip install --no-cache-dir \ + numpy==1.26.4 \ + dipy \ + nibabel \ + scipy \ + matplotlib + +This Dockerfile sets up a Python 3.10 environment with the essential dependencies for DIPY, including `numpy`, `dipy`, `nibabel`, `scipy`, and `matplotlib`. The use of `--no-install-recommends` and `--no-cache-dir` ensures a lean and efficient image by minimizing unnecessary packages and caching. + +Additionally, I developed build and push scripts to automate the Docker image creation and deployment process. These scripts streamline the workflow, making it easier for developers to build and push images to a Docker registry with minimal effort. + +Next Steps +^^^^^^^^^^ + +1. **Incorporate Feedback**: Address any suggestions or concerns raised during the code review process. +2. **Cross-Environment Testing**: Test the Docker setup across various environments to ensure compatibility and stability. +3. **Documentation Update**: Revise the project documentation to include comprehensive guidelines on using Docker with DIPY, including setup, usage, and troubleshooting tips. +4. **CI/CD Integration**: Integrate Docker into the Continuous Integration/Continuous Deployment (CI/CD) pipeline to automate builds, tests, and deployments, ensuring a robust and consistent development workflow. + +The implementation of Docker is a critical step in modernizing DIPY's development process, providing a standardized environment that reduces setup time and ensures consistency across different platforms. We expect to finalize and merge this feature soon, following thorough testing and review. + +|future-work-title| +------------------- + +1. Addition of Tutorials +------------------------ + +As part of our ongoing efforts to improve DIPY's accessibility and user experience, I am planning to develop new tutorials. These tutorials will focus on two key areas: + +a. 2D Registration +^^^^^^^^^^^^^^^^^^ + +The planned 2D registration tutorial will cover: + +- Overview of 2D registration algorithms in DIPY +- Step-by-step guide for performing 2D registration +- Practical examples and use cases +- Tips for optimizing registration results + +b. Docker Usage +^^^^^^^^^^^^^^^ + +The Docker tutorial will include: + +- Introduction to Docker and its benefits for DIPY users +- Setting up Docker for use with DIPY +- Running DIPY in a Docker container +- Best practices for using DIPY with Docker +- Troubleshooting common Docker-related issues + +Objectives +^^^^^^^^^^ + +By adding these tutorials, we aim to: + +1. Lower the entry barrier for new DIPY users. +2. Provide clear, practical guidance on using DIPY's features. +3. Encourage adoption of best practices in neuroimaging analysis. +4. Facilitate reproducible research through consistent environments. + +|weekly-title| +-------------- + +- My blog posts can be found on the DIPY website. + +|timeline-title| +---------------- + +.. list-table:: Weekly Journey + :header-rows: 1 + :widths: 20 60 15 + + * - Date + - Title + - Blog Link + * - 27-05-2024 + - My Journey Begins: Community Bonding Period with DIPY + - `Week 0 `__ + * - 03-06-2024 + - Starting with keyword-only-decorator function + - `Week 1 `__ + * - 12-06-2024 + - Decorator function refinement, Lazy loading research + - `Week 2 `__ + * - 19-06-2024 + - Decorator implementation across modules, Lazy loading demo + - `Week 3 `__ + * - 30-06-2024 + - Decorator implementation fixes + - `Week 4 `__ + * - 01-07-2024 + - Health issues, Lazy loading preparation + - `Week 5 `__ + * - 07-07-2024 + - Lazy loading implementation, Decorator bug fixes + - `Week 6 `__ + * - 15-07-2024 + - Improvements in lazy loading + - `Week 7 `__ + * - 22-07-2024 + - Blog-post migration, GSoC wiki migration + - `Week 8 `__ + * - 29-07-2024 + - Docker research + - `Week 9 `__ + * - 05-08-2024 + - Fixing tutorials for application of keyword-only-decorator + - `Week 10 `__ + * - 12-08-2024 + - Docker PR + - `Week 11 `__ + * - 19-08-2024 + - Final week + - `Week 12 `__ diff --git a/dipy.org/pull/66/_static/basic.css b/dipy.org/pull/66/_static/basic.css new file mode 100644 index 0000000..2af6139 --- /dev/null +++ b/dipy.org/pull/66/_static/basic.css @@ -0,0 +1,925 @@ +/* + * basic.css + * ~~~~~~~~~ + * + * Sphinx stylesheet -- basic theme. + * + * :copyright: Copyright 2007-2024 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +/* -- main layout ----------------------------------------------------------- */ + +div.clearer { + clear: both; +} + +div.section::after { + display: block; + content: ''; + clear: left; +} + +/* -- relbar ---------------------------------------------------------------- */ + +div.related { + width: 100%; + font-size: 90%; +} + +div.related h3 { + display: none; +} + +div.related ul { + margin: 0; + padding: 0 0 0 10px; + list-style: none; +} + +div.related li { + display: inline; +} + +div.related li.right { + float: right; + margin-right: 5px; +} + +/* -- sidebar --------------------------------------------------------------- */ + +div.sphinxsidebarwrapper { + padding: 10px 5px 0 10px; +} + +div.sphinxsidebar { + float: left; + width: 270px; + margin-left: -100%; + font-size: 90%; + word-wrap: break-word; + overflow-wrap : break-word; +} + +div.sphinxsidebar ul { + list-style: none; +} + +div.sphinxsidebar ul ul, +div.sphinxsidebar ul.want-points { + margin-left: 20px; + list-style: square; +} + +div.sphinxsidebar ul ul { + margin-top: 0; + margin-bottom: 0; +} + +div.sphinxsidebar form { + margin-top: 10px; +} + +div.sphinxsidebar input { + border: 1px solid #98dbcc; + font-family: sans-serif; + font-size: 1em; +} + +div.sphinxsidebar #searchbox form.search { + overflow: hidden; +} + +div.sphinxsidebar #searchbox input[type="text"] { + float: left; + width: 80%; + padding: 0.25em; + box-sizing: border-box; +} + +div.sphinxsidebar #searchbox input[type="submit"] { + float: left; + width: 20%; + border-left: none; + padding: 0.25em; + box-sizing: border-box; +} + + +img { + border: 0; + max-width: 100%; +} + +/* -- search page ----------------------------------------------------------- */ + +ul.search { + margin: 10px 0 0 20px; + padding: 0; +} + +ul.search li { + padding: 5px 0 5px 20px; + background-image: url(file.png); + background-repeat: no-repeat; + background-position: 0 7px; +} + +ul.search li a { + font-weight: bold; +} + +ul.search li p.context { + color: #888; + margin: 2px 0 0 30px; + text-align: left; +} + +ul.keywordmatches li.goodmatch a { + font-weight: bold; +} + +/* -- index page ------------------------------------------------------------ */ + +table.contentstable { + width: 90%; + margin-left: auto; + margin-right: auto; +} + +table.contentstable p.biglink { + line-height: 150%; +} + +a.biglink { + font-size: 1.3em; +} + +span.linkdescr { + font-style: italic; + padding-top: 5px; + font-size: 90%; +} + +/* -- general index --------------------------------------------------------- */ + +table.indextable { + width: 100%; +} + +table.indextable td { + text-align: left; + vertical-align: top; +} + +table.indextable ul { + margin-top: 0; + margin-bottom: 0; + list-style-type: none; +} + +table.indextable > tbody > tr > td > ul { + padding-left: 0em; +} + +table.indextable tr.pcap { + height: 10px; +} + +table.indextable tr.cap { + margin-top: 10px; + background-color: #f2f2f2; +} + +img.toggler { + margin-right: 3px; + margin-top: 3px; + cursor: pointer; +} + +div.modindex-jumpbox { + border-top: 1px solid #ddd; + border-bottom: 1px solid #ddd; + margin: 1em 0 1em 0; + padding: 0.4em; +} + +div.genindex-jumpbox { + border-top: 1px solid #ddd; + border-bottom: 1px solid #ddd; + margin: 1em 0 1em 0; + padding: 0.4em; +} + +/* -- domain module index --------------------------------------------------- */ + +table.modindextable td { + padding: 2px; + border-collapse: collapse; +} + +/* -- general body styles --------------------------------------------------- */ + +div.body { + min-width: 360px; + max-width: 800px; +} + +div.body p, div.body dd, div.body li, div.body blockquote { + -moz-hyphens: auto; + -ms-hyphens: auto; + -webkit-hyphens: auto; + hyphens: auto; +} + +a.headerlink { + visibility: hidden; +} + +a:visited { + color: #551A8B; +} + +h1:hover > a.headerlink, +h2:hover > a.headerlink, +h3:hover > a.headerlink, +h4:hover > a.headerlink, +h5:hover > a.headerlink, +h6:hover > a.headerlink, +dt:hover > a.headerlink, +caption:hover > a.headerlink, +p.caption:hover > a.headerlink, +div.code-block-caption:hover > a.headerlink { + visibility: visible; +} + +div.body p.caption { + text-align: inherit; +} + +div.body td { + text-align: left; +} + +.first { + margin-top: 0 !important; +} + +p.rubric { + margin-top: 30px; + font-weight: bold; +} + +img.align-left, figure.align-left, .figure.align-left, object.align-left { + clear: left; + float: left; + margin-right: 1em; +} + +img.align-right, figure.align-right, .figure.align-right, object.align-right { + clear: right; + float: right; + margin-left: 1em; +} + +img.align-center, figure.align-center, .figure.align-center, object.align-center { + display: block; + margin-left: auto; + margin-right: auto; +} + +img.align-default, figure.align-default, .figure.align-default { + display: block; + margin-left: auto; + margin-right: auto; +} + +.align-left { + text-align: left; +} + +.align-center { + text-align: center; +} + +.align-default { + text-align: center; +} + +.align-right { + text-align: right; +} + +/* -- sidebars -------------------------------------------------------------- */ + +div.sidebar, +aside.sidebar { + margin: 0 0 0.5em 1em; + border: 1px solid #ddb; + padding: 7px; + background-color: #ffe; + width: 40%; + float: right; + clear: right; + overflow-x: auto; +} + +p.sidebar-title { + font-weight: bold; +} + +nav.contents, +aside.topic, +div.admonition, div.topic, blockquote { + clear: left; +} + +/* -- topics ---------------------------------------------------------------- */ + +nav.contents, +aside.topic, +div.topic { + border: 1px solid #ccc; + padding: 7px; + margin: 10px 0 10px 0; +} + +p.topic-title { + font-size: 1.1em; + font-weight: bold; + margin-top: 10px; +} + +/* -- admonitions ----------------------------------------------------------- */ + +div.admonition { + margin-top: 10px; + margin-bottom: 10px; + padding: 7px; +} + +div.admonition dt { + font-weight: bold; +} + +p.admonition-title { + margin: 0px 10px 5px 0px; + font-weight: bold; +} + +div.body p.centered { + text-align: center; + margin-top: 25px; +} + +/* -- content of sidebars/topics/admonitions -------------------------------- */ + +div.sidebar > :last-child, +aside.sidebar > :last-child, +nav.contents > :last-child, +aside.topic > :last-child, +div.topic > :last-child, +div.admonition > :last-child { + margin-bottom: 0; +} + +div.sidebar::after, +aside.sidebar::after, +nav.contents::after, +aside.topic::after, +div.topic::after, +div.admonition::after, +blockquote::after { + display: block; + content: ''; + clear: both; +} + +/* -- tables ---------------------------------------------------------------- */ + +table.docutils { + margin-top: 10px; + margin-bottom: 10px; + border: 0; + border-collapse: collapse; +} + +table.align-center { + margin-left: auto; + margin-right: auto; +} + +table.align-default { + margin-left: auto; + margin-right: auto; +} + +table caption span.caption-number { + font-style: italic; +} + +table caption span.caption-text { +} + +table.docutils td, table.docutils th { + padding: 1px 8px 1px 5px; + border-top: 0; + border-left: 0; + border-right: 0; + border-bottom: 1px solid #aaa; +} + +th { + text-align: left; + padding-right: 5px; +} + +table.citation { + border-left: solid 1px gray; + margin-left: 1px; +} + +table.citation td { + border-bottom: none; +} + +th > :first-child, +td > :first-child { + margin-top: 0px; +} + +th > :last-child, +td > :last-child { + margin-bottom: 0px; +} + +/* -- figures --------------------------------------------------------------- */ + +div.figure, figure { + margin: 0.5em; + padding: 0.5em; +} + +div.figure p.caption, figcaption { + padding: 0.3em; +} + +div.figure p.caption span.caption-number, +figcaption span.caption-number { + font-style: italic; +} + +div.figure p.caption span.caption-text, +figcaption span.caption-text { +} + +/* -- field list styles ----------------------------------------------------- */ + +table.field-list td, table.field-list th { + border: 0 !important; +} + +.field-list ul { + margin: 0; + padding-left: 1em; +} + +.field-list p { + margin: 0; +} + +.field-name { + -moz-hyphens: manual; + -ms-hyphens: manual; + -webkit-hyphens: manual; + hyphens: manual; +} + +/* -- hlist styles ---------------------------------------------------------- */ + +table.hlist { + margin: 1em 0; +} + +table.hlist td { + vertical-align: top; +} + +/* -- object description styles --------------------------------------------- */ + +.sig { + font-family: 'Consolas', 'Menlo', 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', monospace; +} + +.sig-name, code.descname { + background-color: transparent; + font-weight: bold; +} + +.sig-name { + font-size: 1.1em; +} + +code.descname { + font-size: 1.2em; +} + +.sig-prename, code.descclassname { + background-color: transparent; +} + +.optional { + font-size: 1.3em; +} + +.sig-paren { + font-size: larger; +} + +.sig-param.n { + font-style: italic; +} + +/* C++ specific styling */ + +.sig-inline.c-texpr, +.sig-inline.cpp-texpr { + font-family: unset; +} + +.sig.c .k, .sig.c .kt, +.sig.cpp .k, .sig.cpp .kt { + color: #0033B3; +} + +.sig.c .m, +.sig.cpp .m { + color: #1750EB; +} + +.sig.c .s, .sig.c .sc, +.sig.cpp .s, .sig.cpp .sc { + color: #067D17; +} + + +/* -- other body styles ----------------------------------------------------- */ + +ol.arabic { + list-style: decimal; +} + +ol.loweralpha { + list-style: lower-alpha; +} + +ol.upperalpha { + list-style: upper-alpha; +} + +ol.lowerroman { + list-style: lower-roman; +} + +ol.upperroman { + list-style: upper-roman; +} + +:not(li) > ol > li:first-child > :first-child, +:not(li) > ul > li:first-child > :first-child { + margin-top: 0px; +} + +:not(li) > ol > li:last-child > :last-child, +:not(li) > ul > li:last-child > :last-child { + margin-bottom: 0px; +} + +ol.simple ol p, +ol.simple ul p, +ul.simple ol p, +ul.simple ul p { + margin-top: 0; +} + +ol.simple > li:not(:first-child) > p, +ul.simple > li:not(:first-child) > p { + margin-top: 0; +} + +ol.simple p, +ul.simple p { + margin-bottom: 0; +} + +aside.footnote > span, +div.citation > span { + float: left; +} +aside.footnote > span:last-of-type, +div.citation > span:last-of-type { + padding-right: 0.5em; +} +aside.footnote > p { + margin-left: 2em; +} +div.citation > p { + margin-left: 4em; +} +aside.footnote > p:last-of-type, +div.citation > p:last-of-type { + margin-bottom: 0em; +} +aside.footnote > p:last-of-type:after, +div.citation > p:last-of-type:after { + content: ""; + clear: both; +} + +dl.field-list { + display: grid; + grid-template-columns: fit-content(30%) auto; +} + +dl.field-list > dt { + font-weight: bold; + word-break: break-word; + padding-left: 0.5em; + padding-right: 5px; +} + +dl.field-list > dd { + padding-left: 0.5em; + margin-top: 0em; + margin-left: 0em; + margin-bottom: 0em; +} + +dl { + margin-bottom: 15px; +} + +dd > :first-child { + margin-top: 0px; +} + +dd ul, dd table { + margin-bottom: 10px; +} + +dd { + margin-top: 3px; + margin-bottom: 10px; + margin-left: 30px; +} + +.sig dd { + margin-top: 0px; + margin-bottom: 0px; +} + +.sig dl { + margin-top: 0px; + margin-bottom: 0px; +} + +dl > dd:last-child, +dl > dd:last-child > :last-child { + margin-bottom: 0; +} + +dt:target, span.highlighted { + background-color: #fbe54e; +} + +rect.highlighted { + fill: #fbe54e; +} + +dl.glossary dt { + font-weight: bold; + font-size: 1.1em; +} + +.versionmodified { + font-style: italic; +} + +.system-message { + background-color: #fda; + padding: 5px; + border: 3px solid red; +} + +.footnote:target { + background-color: #ffa; +} + +.line-block { + display: block; + margin-top: 1em; + margin-bottom: 1em; +} + +.line-block .line-block { + margin-top: 0; + margin-bottom: 0; + margin-left: 1.5em; +} + +.guilabel, .menuselection { + font-family: sans-serif; +} + +.accelerator { + text-decoration: underline; +} + +.classifier { + font-style: oblique; +} + +.classifier:before { + font-style: normal; + margin: 0 0.5em; + content: ":"; + display: inline-block; +} + +abbr, acronym { + border-bottom: dotted 1px; + cursor: help; +} + +.translated { + background-color: rgba(207, 255, 207, 0.2) +} + +.untranslated { + background-color: rgba(255, 207, 207, 0.2) +} + +/* -- code displays --------------------------------------------------------- */ + +pre { + overflow: auto; + overflow-y: hidden; /* fixes display issues on Chrome browsers */ +} + +pre, div[class*="highlight-"] { + clear: both; +} + +span.pre { + -moz-hyphens: none; + -ms-hyphens: none; + -webkit-hyphens: none; + hyphens: none; + white-space: nowrap; +} + +div[class*="highlight-"] { + margin: 1em 0; +} + +td.linenos pre { + border: 0; + background-color: transparent; + color: #aaa; +} + +table.highlighttable { + display: block; +} + +table.highlighttable tbody { + display: block; +} + +table.highlighttable tr { + display: flex; +} + +table.highlighttable td { + margin: 0; + padding: 0; +} + +table.highlighttable td.linenos { + padding-right: 0.5em; +} + +table.highlighttable td.code { + flex: 1; + overflow: hidden; +} + +.highlight .hll { + display: block; +} + +div.highlight pre, +table.highlighttable pre { + margin: 0; +} + +div.code-block-caption + div { + margin-top: 0; +} + +div.code-block-caption { + margin-top: 1em; + padding: 2px 5px; + font-size: small; +} + +div.code-block-caption code { + background-color: transparent; +} + +table.highlighttable td.linenos, +span.linenos, +div.highlight span.gp { /* gp: Generic.Prompt */ + user-select: none; + -webkit-user-select: text; /* Safari fallback only */ + -webkit-user-select: none; /* Chrome/Safari */ + -moz-user-select: none; /* Firefox */ + -ms-user-select: none; /* IE10+ */ +} + +div.code-block-caption span.caption-number { + padding: 0.1em 0.3em; + font-style: italic; +} + +div.code-block-caption span.caption-text { +} + +div.literal-block-wrapper { + margin: 1em 0; +} + +code.xref, a code { + background-color: transparent; + font-weight: bold; +} + +h1 code, h2 code, h3 code, h4 code, h5 code, h6 code { + background-color: transparent; +} + +.viewcode-link { + float: right; +} + +.viewcode-back { + float: right; + font-family: sans-serif; +} + +div.viewcode-block:target { + margin: -1px -10px; + padding: 0 10px; +} + +/* -- math display ---------------------------------------------------------- */ + +img.math { + vertical-align: middle; +} + +div.body div.math p { + text-align: center; +} + +span.eqno { + float: right; +} + +span.eqno a.headerlink { + position: absolute; + z-index: 1; +} + +div.math:hover a.headerlink { + visibility: visible; +} + +/* -- printout stylesheet --------------------------------------------------- */ + +@media print { + div.document, + div.documentwrapper, + div.bodywrapper { + margin: 0 !important; + width: 100%; + } + + div.sphinxsidebar, + div.related, + div.footer, + #top-link { + display: none; + } +} \ No newline at end of file diff --git a/dipy.org/pull/66/_static/css/common/calendar.css b/dipy.org/pull/66/_static/css/common/calendar.css new file mode 100644 index 0000000..2979c81 --- /dev/null +++ b/dipy.org/pull/66/_static/css/common/calendar.css @@ -0,0 +1,4 @@ +.calendar { + width: 100%; + min-height: 400px; +} \ No newline at end of file diff --git a/dipy.org/pull/66/_static/css/common/override.css b/dipy.org/pull/66/_static/css/common/override.css new file mode 100644 index 0000000..d413be1 --- /dev/null +++ b/dipy.org/pull/66/_static/css/common/override.css @@ -0,0 +1,16 @@ +/* +This CSS file represents all the style changes to +override the pydata-theme and sphinx. +*/ + +.navbar-brand.logo { + padding: 1.1rem 1.1rem 1.1rem 0; +} + +.no-background { + background: transparent !important; +} + +.gsoc-title { + color: #fd8d25 !important; +} diff --git a/dipy.org/pull/66/_static/css/common/variables.css b/dipy.org/pull/66/_static/css/common/variables.css new file mode 100644 index 0000000..8ce117b --- /dev/null +++ b/dipy.org/pull/66/_static/css/common/variables.css @@ -0,0 +1,15 @@ +html[data-theme="light"] { + --gst-color-primary: #FD8D25; + --gst-color-secondary: #1B8BF4; + --gst-color-bg: var(--bs-white); + --gst-color-bg-gray: var(--bs-gray-100); + --gst-color-light-border: var(--gst-color-border); +} + +html[data-theme="dark"] { + --gst-color-primary: #FD8D25; + --gst-color-secondary: #1B8BF4; + --gst-color-bg: var(--bs-gray-900); + --gst-color-bg-gray: var(--bs-gray-900); + --gst-color-light-border: transparent; +} diff --git a/dipy.org/pull/66/_static/css/dipy.css b/dipy.org/pull/66/_static/css/dipy.css new file mode 100644 index 0000000..307819f --- /dev/null +++ b/dipy.org/pull/66/_static/css/dipy.css @@ -0,0 +1,13 @@ +/* Index CSS file */ +/* Do not add any css directly here.... */ + +/* util css is no longer required as it is fetched from grg-sphinx-theme */ + +/* Common CSS */ + +@import url("./common/variables.css"); +@import url("./common/override.css"); +@import url("./common/calendar.css"); + +/* Home CSS */ +@import "./home/index.css"; \ No newline at end of file diff --git a/dipy.org/pull/66/_static/css/home/carousel.css b/dipy.org/pull/66/_static/css/home/carousel.css new file mode 100644 index 0000000..f281cd2 --- /dev/null +++ b/dipy.org/pull/66/_static/css/home/carousel.css @@ -0,0 +1,92 @@ +.carousel-item { + height: 440px; +} + +.carousel-item img { + border: 1px solid var(--gst-color-border); +} + +.carousel-indicators [data-bs-target] { + background-color: var(--gst-color-primary); +} + + +.carousel-caption { + opacity: 0.9; + width: fit-content; + bottom: auto; + top: 40px; + left: 0; + background-color: var(--gst-color-primary); + box-shadow: var(--gst-shadow); +} + +.carousel-caption::after { + position: absolute; + content: ""; + height: 100%; + clip-path: polygon(0 0, 100% 0, 100% 100%, 0 100%, 35% 50%); + background-color: var(--gst-color-primary); + width: 60px; + transform: scaleX(-1); + right: -40px; + top: 0; +} + +.carousel-caption::before { + position: absolute; + left: 0; + content: ""; + width: 20px; + height: 100%; + background-image: linear-gradient(45deg, transparent 50%, var(--gst-color-primary-border) 50%); + background-size: 20px 20px; + background-repeat: no-repeat; + background-position: bottom right; + z-index: -1; +} + + +.carousel-caption h5 { + color: var(--bs-white); + font-size: 24px; + font-weight: 900; + margin: 0; +} + +.carousel-caption a, +.carousel-caption a:visited { + color: var(--bs-white); +} + +@media only screen and (max-width: 768px) { + .carousel-item { + height: 260px; + } + + .carousel-caption { + opacity: 0.9; + bottom: 70px; + padding: 10px 20px !important; + top: auto; + width: 80%; + left: 0; + right: 0; + margin: 0 auto; + border-radius: 12px; + background-color: var(--gst-color-primary); + box-shadow: none; + } + + .carousel-caption::after { + content: none; + } + + .carousel-caption::before { + content: none; + } + + .carousel-caption h5 { + font-size: 18px; + } +} \ No newline at end of file diff --git a/dipy.org/pull/66/_static/css/home/cite.css b/dipy.org/pull/66/_static/css/home/cite.css new file mode 100644 index 0000000..db1bde7 --- /dev/null +++ b/dipy.org/pull/66/_static/css/home/cite.css @@ -0,0 +1,59 @@ +.cite { + display: grid; + grid-template-columns: 100px 1fr; + gap: 1em; + border: 2px solid var(--gst-color-light-border); + background-color: var(--gst-color-bg); + border-radius: 12px; + width: fit-content; + margin: 0 auto; +} + +.cite-icon-bg { + height: 90px; + width: 90px; + border-radius: 50%; + + background-color: var(--gst-color-primary); + color: var(--bs-white); + font-size: 52px; + margin: 0 auto; + + display: flex; + justify-content: center; + align-items: center; + + box-shadow: var(--gst-shadow); +} + + +.cite-detail { + display: flex; + flex-direction: column; + align-items: start; + justify-content: center; +} + +.cite-detail p { + margin: 0; + margin-bottom: 0; + font-size: 18px; +} + +.cite-link, +.cite-link:visited { + color: var(--gst-color-primary); + text-decoration: underline; + font-weight: 600; +} + +@media only screen and (max-width: 768px) { + .cite { + grid-template-columns: 1fr; + } + + .cite-detail { + flex-direction: column; + align-items: start; + } +} \ No newline at end of file diff --git a/dipy.org/pull/66/_static/css/home/explore.css b/dipy.org/pull/66/_static/css/home/explore.css new file mode 100644 index 0000000..5272d6b --- /dev/null +++ b/dipy.org/pull/66/_static/css/home/explore.css @@ -0,0 +1,44 @@ +.explore { + display: grid; + gap: 3em; +} + +.explore-tile { + border-radius: 12px; + background-color: var(--gst-color-bg-gray); + border: 1px solid var(--gst-color-border); + transition: all 0.2s ease-in-out; +} + +.explore-tile:hover, +.explore-tile:hover .explore-tile-icon { + box-shadow: var(--gst-shadow); +} + +.explore-tile-icon { + background-color: var(--gst-color-primary); + height: 55px; + width: 55px; + border-radius: 12px; + /* TODO This should be removed as the util will be available */ + padding: 12px; + margin-bottom: 24px; + transition: all 0.2s ease-in-out; +} + +.explore-tile-icon img { + filter: invert(1) !important; +} + +.explore-tile-title { + font-weight: 600; + color: var(--gst-color-section-heading); +} + +.explore-tile-know-more, +.explore-tile-know-more:visited { + color: var(--gst-color-primary); + text-transform: uppercase; + font-size: 14px; + font-weight: 600; +} \ No newline at end of file diff --git a/dipy.org/pull/66/_static/css/home/index.css b/dipy.org/pull/66/_static/css/home/index.css new file mode 100644 index 0000000..19eae6e --- /dev/null +++ b/dipy.org/pull/66/_static/css/home/index.css @@ -0,0 +1,41 @@ +@import "./intro.css"; +@import "./explore.css"; +@import "./carousel.css"; +@import "./sponsors.css"; +@import "./cite.css"; + +/* CSS which is common across the home page */ +.dipy-highlight { + color: var(--pst-color-primary); +} + +div.section:nth-child(n+3) { + margin: 72px 0; + padding: 0 20px; +} + +h2.section-title { + padding: 10px 0; + margin: 10px 0; + text-align: center; + font-weight: 900; + font-size: 50px; + color: var(--gst-color-heading); +} + +a:link, a:visited, a:active { + color: inherit; + text-decoration: inherit; +} + +a:hover { + text-decoration: underline; + color: #1b8bf4; +} + +@media only screen and (max-width: 768px) { + /* Your CSS styles for phones go here */ + h2.section-title { + font-size: 30px; + } +} \ No newline at end of file diff --git a/dipy.org/pull/66/_static/css/home/intro.css b/dipy.org/pull/66/_static/css/home/intro.css new file mode 100644 index 0000000..3bfc86c --- /dev/null +++ b/dipy.org/pull/66/_static/css/home/intro.css @@ -0,0 +1,38 @@ +.intro { + width: 100%; + padding: 96px 32px; + text-align: center; +} + +.tagline { + color: var(--gst-color-heading); + font-size: 76px; + line-height: 1.25; + font-weight: 900; + margin: 0 auto; +} + +.description { + line-height: 1.25; + font-size: 20px; + max-width: 900px; + margin: 24px auto 40px; + color: var(--pst-color-text-muted); +} + +.actions { + margin: 0 auto; +} + +@media only screen and (max-width: 768px) { + /* Your CSS styles for phones go here */ + .intro { + padding: 90px 20px; + } + .tagline { + font-size: 40px; + } + .description { + font-size: 16px; + } +} \ No newline at end of file diff --git a/dipy.org/pull/66/_static/css/home/sponsors.css b/dipy.org/pull/66/_static/css/home/sponsors.css new file mode 100644 index 0000000..1c84b30 --- /dev/null +++ b/dipy.org/pull/66/_static/css/home/sponsors.css @@ -0,0 +1,24 @@ +.sponsors { + gap: 5px; +} +.sponsors-item { + background-color: var(--gst-color-bg-gray); + border-radius: 2px; + display: flex; + justify-content: space-around; + align-items: center; + padding: 20px; + transition: all 0.2s ease-in-out; +} + +html[data-theme=dark] .sponsors-item:hover { + background-color: var(--bs-gray-100); +} + +html[data-theme=dark] .sponsors-item:hover img:not(.only-dark):not(.dark-light) { + filter: none; +} + +html[data-theme=dark] .sponsors-item img:not(.only-dark):not(.dark-light) { + filter: grayscale(1) invert(1); +} \ No newline at end of file diff --git a/dipy.org/pull/66/_static/dipy-favicon.png b/dipy.org/pull/66/_static/dipy-favicon.png new file mode 100644 index 0000000..caffe0d Binary files /dev/null and b/dipy.org/pull/66/_static/dipy-favicon.png differ diff --git a/dipy.org/pull/66/_static/dipy-logo.png b/dipy.org/pull/66/_static/dipy-logo.png new file mode 100644 index 0000000..d9bac5e Binary files /dev/null and b/dipy.org/pull/66/_static/dipy-logo.png differ diff --git a/dipy.org/pull/66/_static/doctools.js b/dipy.org/pull/66/_static/doctools.js new file mode 100644 index 0000000..4d67807 --- /dev/null +++ b/dipy.org/pull/66/_static/doctools.js @@ -0,0 +1,156 @@ +/* + * doctools.js + * ~~~~~~~~~~~ + * + * Base JavaScript utilities for all Sphinx HTML documentation. + * + * :copyright: Copyright 2007-2024 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ +"use strict"; + +const BLACKLISTED_KEY_CONTROL_ELEMENTS = new Set([ + "TEXTAREA", + "INPUT", + "SELECT", + "BUTTON", +]); + +const _ready = (callback) => { + if (document.readyState !== "loading") { + callback(); + } else { + document.addEventListener("DOMContentLoaded", callback); + } +}; + +/** + * Small JavaScript module for the documentation. + */ +const Documentation = { + init: () => { + Documentation.initDomainIndexTable(); + Documentation.initOnKeyListeners(); + }, + + /** + * i18n support + */ + TRANSLATIONS: {}, + PLURAL_EXPR: (n) => (n === 1 ? 0 : 1), + LOCALE: "unknown", + + // gettext and ngettext don't access this so that the functions + // can safely bound to a different name (_ = Documentation.gettext) + gettext: (string) => { + const translated = Documentation.TRANSLATIONS[string]; + switch (typeof translated) { + case "undefined": + return string; // no translation + case "string": + return translated; // translation exists + default: + return translated[0]; // (singular, plural) translation tuple exists + } + }, + + ngettext: (singular, plural, n) => { + const translated = Documentation.TRANSLATIONS[singular]; + if (typeof translated !== "undefined") + return translated[Documentation.PLURAL_EXPR(n)]; + return n === 1 ? singular : plural; + }, + + addTranslations: (catalog) => { + Object.assign(Documentation.TRANSLATIONS, catalog.messages); + Documentation.PLURAL_EXPR = new Function( + "n", + `return (${catalog.plural_expr})` + ); + Documentation.LOCALE = catalog.locale; + }, + + /** + * helper function to focus on search bar + */ + focusSearchBar: () => { + document.querySelectorAll("input[name=q]")[0]?.focus(); + }, + + /** + * Initialise the domain index toggle buttons + */ + initDomainIndexTable: () => { + const toggler = (el) => { + const idNumber = el.id.substr(7); + const toggledRows = document.querySelectorAll(`tr.cg-${idNumber}`); + if (el.src.substr(-9) === "minus.png") { + el.src = `${el.src.substr(0, el.src.length - 9)}plus.png`; + toggledRows.forEach((el) => (el.style.display = "none")); + } else { + el.src = `${el.src.substr(0, el.src.length - 8)}minus.png`; + toggledRows.forEach((el) => (el.style.display = "")); + } + }; + + const togglerElements = document.querySelectorAll("img.toggler"); + togglerElements.forEach((el) => + el.addEventListener("click", (event) => toggler(event.currentTarget)) + ); + togglerElements.forEach((el) => (el.style.display = "")); + if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) togglerElements.forEach(toggler); + }, + + initOnKeyListeners: () => { + // only install a listener if it is really needed + if ( + !DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS && + !DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS + ) + return; + + document.addEventListener("keydown", (event) => { + // bail for input elements + if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return; + // bail with special keys + if (event.altKey || event.ctrlKey || event.metaKey) return; + + if (!event.shiftKey) { + switch (event.key) { + case "ArrowLeft": + if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break; + + const prevLink = document.querySelector('link[rel="prev"]'); + if (prevLink && prevLink.href) { + window.location.href = prevLink.href; + event.preventDefault(); + } + break; + case "ArrowRight": + if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break; + + const nextLink = document.querySelector('link[rel="next"]'); + if (nextLink && nextLink.href) { + window.location.href = nextLink.href; + event.preventDefault(); + } + break; + } + } + + // some keyboard layouts may need Shift to get / + switch (event.key) { + case "/": + if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) break; + Documentation.focusSearchBar(); + event.preventDefault(); + } + }); + }, +}; + +// quick alias for translations +const _ = Documentation.gettext; + +_ready(Documentation.init); diff --git a/dipy.org/pull/66/_static/documentation_options.js b/dipy.org/pull/66/_static/documentation_options.js new file mode 100644 index 0000000..7e4c114 --- /dev/null +++ b/dipy.org/pull/66/_static/documentation_options.js @@ -0,0 +1,13 @@ +const DOCUMENTATION_OPTIONS = { + VERSION: '', + LANGUAGE: 'en', + COLLAPSE_INDEX: false, + BUILDER: 'html', + FILE_SUFFIX: '.html', + LINK_SUFFIX: '.html', + HAS_SOURCE: true, + SOURCELINK_SUFFIX: '.txt', + NAVIGATION_WITH_KEYS: false, + SHOW_SEARCH_SUMMARY: true, + ENABLE_SEARCH_SHORTCUTS: true, +}; \ No newline at end of file diff --git a/dipy.org/pull/66/_static/file.png b/dipy.org/pull/66/_static/file.png new file mode 100644 index 0000000..a858a41 Binary files /dev/null and b/dipy.org/pull/66/_static/file.png differ diff --git a/dipy.org/pull/66/_static/images/banner/dipy_workshop_2024.png b/dipy.org/pull/66/_static/images/banner/dipy_workshop_2024.png new file mode 100644 index 0000000..ef38e0d Binary files /dev/null and b/dipy.org/pull/66/_static/images/banner/dipy_workshop_2024.png differ diff --git a/dipy.org/pull/66/_static/images/colorfa.png b/dipy.org/pull/66/_static/images/colorfa.png new file mode 100644 index 0000000..7603564 Binary files /dev/null and b/dipy.org/pull/66/_static/images/colorfa.png differ diff --git a/dipy.org/pull/66/_static/images/dipy-banner.png b/dipy.org/pull/66/_static/images/dipy-banner.png new file mode 100644 index 0000000..e7a170e Binary files /dev/null and b/dipy.org/pull/66/_static/images/dipy-banner.png differ diff --git a/dipy.org/pull/66/_static/images/dipy-logo.png b/dipy.org/pull/66/_static/images/dipy-logo.png new file mode 100644 index 0000000..d9bac5e Binary files /dev/null and b/dipy.org/pull/66/_static/images/dipy-logo.png differ diff --git a/dipy.org/pull/66/_static/images/dipy-ws-header.png b/dipy.org/pull/66/_static/images/dipy-ws-header.png new file mode 100644 index 0000000..615b846 Binary files /dev/null and b/dipy.org/pull/66/_static/images/dipy-ws-header.png differ diff --git a/dipy.org/pull/66/_static/images/dipy_paper_logo.jpg b/dipy.org/pull/66/_static/images/dipy_paper_logo.jpg new file mode 100644 index 0000000..c923cdc Binary files /dev/null and b/dipy.org/pull/66/_static/images/dipy_paper_logo.jpg differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2015/rafael/AWF_v1.png b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/AWF_v1.png new file mode 100644 index 0000000..4a2c96a Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/AWF_v1.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Code_ODF.png b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Code_ODF.png new file mode 100644 index 0000000..cc51326 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Code_ODF.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2015/rafael/DSC03995.JPG b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/DSC03995.JPG new file mode 100644 index 0000000..4659633 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/DSC03995.JPG differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Diffusion_tensor_measures_from_DTI_and_DKI.png b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Diffusion_tensor_measures_from_DTI_and_DKI.png new file mode 100644 index 0000000..95e8987 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Diffusion_tensor_measures_from_DTI_and_DKI.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Diffusion_tensor_measures_from_DTI_and_DKI_A.png b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Diffusion_tensor_measures_from_DTI_and_DKI_A.png new file mode 100644 index 0000000..58e4479 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Diffusion_tensor_measures_from_DTI_and_DKI_A.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Diffusion_tensor_measures_from_DTI_and_DKI_B.png b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Diffusion_tensor_measures_from_DTI_and_DKI_B.png new file mode 100644 index 0000000..8adb158 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Diffusion_tensor_measures_from_DTI_and_DKI_B.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2015/rafael/F0.png b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/F0.png new file mode 100644 index 0000000..0334a05 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/F0.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2015/rafael/F1.png b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/F1.png new file mode 100644 index 0000000..0ef30f3 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/F1.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2015/rafael/F2.png b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/F2.png new file mode 100644 index 0000000..c00e488 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/F2.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2015/rafael/F3.png b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/F3.png new file mode 100644 index 0000000..48d02fc Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/F3.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2015/rafael/F4.png b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/F4.png new file mode 100644 index 0000000..8160970 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/F4.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2015/rafael/F5.png b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/F5.png new file mode 100644 index 0000000..3029d9e Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/F5.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Figure1_Midterm.png b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Figure1_Midterm.png new file mode 100644 index 0000000..ec9e6fb Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Figure1_Midterm.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Figure2_midterm.png b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Figure2_midterm.png new file mode 100644 index 0000000..53c15ab Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Figure2_midterm.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Kurtosis_tensor_standard_measures.png b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Kurtosis_tensor_standard_measures.png new file mode 100644 index 0000000..16b00b1 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Kurtosis_tensor_standard_measures.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Kurtosis_tensor_standard_measures_A.png b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Kurtosis_tensor_standard_measures_A.png new file mode 100644 index 0000000..dfe5866 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Kurtosis_tensor_standard_measures_A.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Kurtosis_tensor_standard_measures_B.png b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Kurtosis_tensor_standard_measures_B.png new file mode 100644 index 0000000..c18b25d Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Kurtosis_tensor_standard_measures_B.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Kurtosis_tensor_standard_measures_final_post.png b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Kurtosis_tensor_standard_measures_final_post.png new file mode 100644 index 0000000..9a3fd55 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Kurtosis_tensor_standard_measures_final_post.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2015/rafael/MK_comparison.png b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/MK_comparison.png new file mode 100644 index 0000000..7f66caf Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/MK_comparison.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2015/rafael/MK_sigularities_compared_to_MK_nm_zoom.png b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/MK_sigularities_compared_to_MK_nm_zoom.png new file mode 100644 index 0000000..9373213 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/MK_sigularities_compared_to_MK_nm_zoom.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2015/rafael/MK_sigularities_resolved.png b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/MK_sigularities_resolved.png new file mode 100644 index 0000000..8d477a4 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/MK_sigularities_resolved.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2015/rafael/MK_singularities_compared_to_MK_nm.png b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/MK_singularities_compared_to_MK_nm.png new file mode 100644 index 0000000..ecc26bc Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/MK_singularities_compared_to_MK_nm.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2015/rafael/ODF_final_code.png b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/ODF_final_code.png new file mode 100644 index 0000000..edf1451 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/ODF_final_code.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Picture1.png b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Picture1.png new file mode 100644 index 0000000..26ea220 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Picture1.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Picture2.png b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Picture2.png new file mode 100644 index 0000000..4f774c2 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Picture2.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Picture2A.png b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Picture2A.png new file mode 100644 index 0000000..bf1f9f4 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Picture2A.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Picture3.png b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Picture3.png new file mode 100644 index 0000000..89eb0f9 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Picture3.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Picture4.png b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Picture4.png new file mode 100644 index 0000000..a559000 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Picture4.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Picture5.png b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Picture5.png new file mode 100644 index 0000000..d77ec58 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Picture5.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Picture6.png b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Picture6.png new file mode 100644 index 0000000..eeae18b Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Picture6.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Picture7.png b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Picture7.png new file mode 100644 index 0000000..0713e61 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Picture7.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Picture8.png b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Picture8.png new file mode 100644 index 0000000..6f8f86f Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Picture8.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2015/rafael/RK_numerical_noise_free.png b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/RK_numerical_noise_free.png new file mode 100644 index 0000000..c6c0772 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/RK_numerical_noise_free.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Step1.png b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Step1.png new file mode 100644 index 0000000..e6ae0c1 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Step1.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Step2.png b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Step2.png new file mode 100644 index 0000000..f4f743a Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Step2.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Vij_equation.png b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Vij_equation.png new file mode 100644 index 0000000..c795e7c Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/Vij_equation.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2015/rafael/corpuscallosum_axial.png b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/corpuscallosum_axial.png new file mode 100644 index 0000000..8876213 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/corpuscallosum_axial.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2015/rafael/denoise_b0.png b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/denoise_b0.png new file mode 100644 index 0000000..681314b Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/denoise_b0.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2015/rafael/denoise_b0_piesno.png b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/denoise_b0_piesno.png new file mode 100644 index 0000000..f7c8c9b Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/denoise_b0_piesno.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2015/rafael/denoise_b2000.png b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/denoise_b2000.png new file mode 100644 index 0000000..807f219 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/denoise_b2000.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2015/rafael/dki_dirs_final_post.png b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/dki_dirs_final_post.png new file mode 100644 index 0000000..ee4bf52 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/dki_dirs_final_post.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2015/rafael/dki_geometries.png b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/dki_geometries.png new file mode 100644 index 0000000..b10ddb3 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/dki_geometries.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2015/rafael/dki_odfs_final_post.png b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/dki_odfs_final_post.png new file mode 100644 index 0000000..ad421f1 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/dki_odfs_final_post.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2015/rafael/dki_peaks.png b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/dki_peaks.png new file mode 100644 index 0000000..6738859 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/dki_peaks.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2015/rafael/eigenvalues.png b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/eigenvalues.png new file mode 100644 index 0000000..c869c0a Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/eigenvalues.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2015/rafael/gODF_equa.png b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/gODF_equa.png new file mode 100644 index 0000000..daa3140 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/gODF_equa.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2015/rafael/geometry_of_dki_tensors.png b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/geometry_of_dki_tensors.png new file mode 100644 index 0000000..18f074b Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/geometry_of_dki_tensors.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2015/rafael/piesno_DKI.png b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/piesno_DKI.png new file mode 100644 index 0000000..4705f45 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/piesno_DKI.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2015/rafael/piesno_mask.png b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/piesno_mask.png new file mode 100644 index 0000000..c86f1af Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/piesno_mask.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2015/rafael/tensor_ellipsoids.png b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/tensor_ellipsoids.png new file mode 100644 index 0000000..1b4f2b4 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2015/rafael/tensor_ellipsoids.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2016/bishakh/dipy1.png b/dipy.org/pull/66/_static/images/gsoc/2016/bishakh/dipy1.png new file mode 100644 index 0000000..4620546 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2016/bishakh/dipy1.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2016/bishakh/dipy10.jpeg b/dipy.org/pull/66/_static/images/gsoc/2016/bishakh/dipy10.jpeg new file mode 100644 index 0000000..8a7a8c2 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2016/bishakh/dipy10.jpeg differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2016/bishakh/dipy11.jpeg b/dipy.org/pull/66/_static/images/gsoc/2016/bishakh/dipy11.jpeg new file mode 100644 index 0000000..de09be2 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2016/bishakh/dipy11.jpeg differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2016/bishakh/dipy12.jpeg b/dipy.org/pull/66/_static/images/gsoc/2016/bishakh/dipy12.jpeg new file mode 100644 index 0000000..f2be1f0 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2016/bishakh/dipy12.jpeg differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2016/bishakh/dipy13.png b/dipy.org/pull/66/_static/images/gsoc/2016/bishakh/dipy13.png new file mode 100644 index 0000000..f798d0f Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2016/bishakh/dipy13.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2016/bishakh/dipy14.jpg b/dipy.org/pull/66/_static/images/gsoc/2016/bishakh/dipy14.jpg new file mode 100644 index 0000000..9388dc8 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2016/bishakh/dipy14.jpg differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2016/bishakh/dipy2.png b/dipy.org/pull/66/_static/images/gsoc/2016/bishakh/dipy2.png new file mode 100644 index 0000000..f989f0a Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2016/bishakh/dipy2.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2016/bishakh/dipy3.jpeg b/dipy.org/pull/66/_static/images/gsoc/2016/bishakh/dipy3.jpeg new file mode 100644 index 0000000..2485390 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2016/bishakh/dipy3.jpeg differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2016/bishakh/dipy4.jpeg b/dipy.org/pull/66/_static/images/gsoc/2016/bishakh/dipy4.jpeg new file mode 100644 index 0000000..0d4377f Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2016/bishakh/dipy4.jpeg differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2016/bishakh/dipy5.jpeg b/dipy.org/pull/66/_static/images/gsoc/2016/bishakh/dipy5.jpeg new file mode 100644 index 0000000..d9cd65d Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2016/bishakh/dipy5.jpeg differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2016/bishakh/dipy6.png b/dipy.org/pull/66/_static/images/gsoc/2016/bishakh/dipy6.png new file mode 100644 index 0000000..5e931a9 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2016/bishakh/dipy6.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2016/bishakh/dipy7.png b/dipy.org/pull/66/_static/images/gsoc/2016/bishakh/dipy7.png new file mode 100644 index 0000000..003b0ea Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2016/bishakh/dipy7.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2016/bishakh/dipy8.jpeg b/dipy.org/pull/66/_static/images/gsoc/2016/bishakh/dipy8.jpeg new file mode 100644 index 0000000..ca67184 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2016/bishakh/dipy8.jpeg differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2016/bishakh/dipy9.jpeg b/dipy.org/pull/66/_static/images/gsoc/2016/bishakh/dipy9.jpeg new file mode 100644 index 0000000..6d66087 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2016/bishakh/dipy9.jpeg differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2023/shilpi/formula_.png b/dipy.org/pull/66/_static/images/gsoc/2023/shilpi/formula_.png new file mode 100644 index 0000000..3851b15 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2023/shilpi/formula_.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2023/vara/DM-MNIST-112epoch.png b/dipy.org/pull/66/_static/images/gsoc/2023/vara/DM-MNIST-112epoch.png new file mode 100644 index 0000000..1ef4ee3 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2023/vara/DM-MNIST-112epoch.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2023/vara/DM-MNIST-DDIM300-108epoch.png b/dipy.org/pull/66/_static/images/gsoc/2023/vara/DM-MNIST-DDIM300-108epoch.png new file mode 100644 index 0000000..ad73269 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2023/vara/DM-MNIST-DDIM300-108epoch.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2023/vara/dm3d-monai-B8-DM500.png b/dipy.org/pull/66/_static/images/gsoc/2023/vara/dm3d-monai-B8-DM500.png new file mode 100644 index 0000000..b20b8e0 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2023/vara/dm3d-monai-B8-DM500.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2023/vara/dm3d-monai-training-curves.png b/dipy.org/pull/66/_static/images/gsoc/2023/vara/dm3d-monai-training-curves.png new file mode 100644 index 0000000..60a23bb Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2023/vara/dm3d-monai-training-curves.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2023/vara/dm3d-reconst-D200-D300.png b/dipy.org/pull/66/_static/images/gsoc/2023/vara/dm3d-reconst-D200-D300.png new file mode 100644 index 0000000..5026964 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2023/vara/dm3d-reconst-D200-D300.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2023/vara/dm3d-training-curves.png b/dipy.org/pull/66/_static/images/gsoc/2023/vara/dm3d-training-curves.png new file mode 100644 index 0000000..e090f39 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2023/vara/dm3d-training-curves.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2023/vara/vq-vae-results.png b/dipy.org/pull/66/_static/images/gsoc/2023/vara/vq-vae-results.png new file mode 100644 index 0000000..a57b574 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2023/vara/vq-vae-results.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2023/vara/vqvae-f3-higher-epochs.png b/dipy.org/pull/66/_static/images/gsoc/2023/vara/vqvae-f3-higher-epochs.png new file mode 100644 index 0000000..6535dc5 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2023/vara/vqvae-f3-higher-epochs.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2023/vara/vqvae-monai-B12-CC.png b/dipy.org/pull/66/_static/images/gsoc/2023/vara/vqvae-monai-B12-CC.png new file mode 100644 index 0000000..43ad5ea Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2023/vara/vqvae-monai-B12-CC.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2023/vara/vqvae-monai-B12-both.png b/dipy.org/pull/66/_static/images/gsoc/2023/vara/vqvae-monai-B12-both.png new file mode 100644 index 0000000..28e2f63 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2023/vara/vqvae-monai-B12-both.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2023/vara/vqvae-reconstructions-comparison.png b/dipy.org/pull/66/_static/images/gsoc/2023/vara/vqvae-reconstructions-comparison.png new file mode 100644 index 0000000..62822c5 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2023/vara/vqvae-reconstructions-comparison.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2023/vara/vqvae3d-monai-B10-ICNR.png b/dipy.org/pull/66/_static/images/gsoc/2023/vara/vqvae3d-monai-B10-ICNR.png new file mode 100644 index 0000000..e4f41ab Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2023/vara/vqvae3d-monai-B10-ICNR.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2023/vara/vqvae3d-monai-B10.png b/dipy.org/pull/66/_static/images/gsoc/2023/vara/vqvae3d-monai-B10.png new file mode 100644 index 0000000..0f5b4ea Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2023/vara/vqvae3d-monai-B10.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2023/vara/vqvae3d-monai-B5.png b/dipy.org/pull/66/_static/images/gsoc/2023/vara/vqvae3d-monai-B5.png new file mode 100644 index 0000000..c194da2 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2023/vara/vqvae3d-monai-B5.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2023/vara/vqvae3d-monai-training-plots.png b/dipy.org/pull/66/_static/images/gsoc/2023/vara/vqvae3d-monai-training-plots.png new file mode 100644 index 0000000..020d298 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2023/vara/vqvae3d-monai-training-plots.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2023/vara/vqvae3d-monai-training.png b/dipy.org/pull/66/_static/images/gsoc/2023/vara/vqvae3d-monai-training.png new file mode 100644 index 0000000..d9c5701 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2023/vara/vqvae3d-monai-training.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2023/vara/vqvae3d-reconst-f2.png b/dipy.org/pull/66/_static/images/gsoc/2023/vara/vqvae3d-reconst-f2.png new file mode 100644 index 0000000..27ed0e2 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2023/vara/vqvae3d-reconst-f2.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2023/vara/vqvae3d-reconst-f3.png b/dipy.org/pull/66/_static/images/gsoc/2023/vara/vqvae3d-reconst-f3.png new file mode 100644 index 0000000..2c90387 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2023/vara/vqvae3d-reconst-f3.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2023/vara/vqvae3d-training-curves.png b/dipy.org/pull/66/_static/images/gsoc/2023/vara/vqvae3d-training-curves.png new file mode 100644 index 0000000..0f117a0 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2023/vara/vqvae3d-training-curves.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2024/inigo/adversarial_ae_with_abr.png b/dipy.org/pull/66/_static/images/gsoc/2024/inigo/adversarial_ae_with_abr.png new file mode 100644 index 0000000..7068394 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2024/inigo/adversarial_ae_with_abr.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2024/inigo/conditional_vae_architecture_diagram.png b/dipy.org/pull/66/_static/images/gsoc/2024/inigo/conditional_vae_architecture_diagram.png new file mode 100644 index 0000000..133c9bc Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2024/inigo/conditional_vae_architecture_diagram.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2024/inigo/conditioning_validation_using_mse.png b/dipy.org/pull/66/_static/images/gsoc/2024/inigo/conditioning_validation_using_mse.png new file mode 100644 index 0000000..06caa76 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2024/inigo/conditioning_validation_using_mse.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2024/inigo/cvae_first_reconstruction_result.png b/dipy.org/pull/66/_static/images/gsoc/2024/inigo/cvae_first_reconstruction_result.png new file mode 100644 index 0000000..caf138e Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2024/inigo/cvae_first_reconstruction_result.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2024/inigo/fibercup_better_results.png b/dipy.org/pull/66/_static/images/gsoc/2024/inigo/fibercup_better_results.png new file mode 100644 index 0000000..046f56f Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2024/inigo/fibercup_better_results.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2024/inigo/fibercup_preliminary_results.png b/dipy.org/pull/66/_static/images/gsoc/2024/inigo/fibercup_preliminary_results.png new file mode 100644 index 0000000..704b995 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2024/inigo/fibercup_preliminary_results.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2024/inigo/fibercup_replicated.png b/dipy.org/pull/66/_static/images/gsoc/2024/inigo/fibercup_replicated.png new file mode 100644 index 0000000..347eb01 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2024/inigo/fibercup_replicated.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2024/inigo/inigo_preliminary_vae_result_fibercup.png b/dipy.org/pull/66/_static/images/gsoc/2024/inigo/inigo_preliminary_vae_result_fibercup.png new file mode 100644 index 0000000..02547e4 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2024/inigo/inigo_preliminary_vae_result_fibercup.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2024/inigo/inigo_vanilla_autoencoder.png b/dipy.org/pull/66/_static/images/gsoc/2024/inigo/inigo_vanilla_autoencoder.png new file mode 100644 index 0000000..5c8b50f Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2024/inigo/inigo_vanilla_autoencoder.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2024/inigo/inigo_variational_autoencoder.png b/dipy.org/pull/66/_static/images/gsoc/2024/inigo/inigo_variational_autoencoder.png new file mode 100644 index 0000000..9b2c066 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2024/inigo/inigo_variational_autoencoder.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2024/inigo/latent_space_comparison_VAE_cVAE_colored_by_streamline_length.png b/dipy.org/pull/66/_static/images/gsoc/2024/inigo/latent_space_comparison_VAE_cVAE_colored_by_streamline_length.png new file mode 100644 index 0000000..e2691d9 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2024/inigo/latent_space_comparison_VAE_cVAE_colored_by_streamline_length.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2024/inigo/streamlines_short_long.png b/dipy.org/pull/66/_static/images/gsoc/2024/inigo/streamlines_short_long.png new file mode 100644 index 0000000..d16b5aa Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2024/inigo/streamlines_short_long.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2024/inigo/vae_conditioning_validation.png b/dipy.org/pull/66/_static/images/gsoc/2024/inigo/vae_conditioning_validation.png new file mode 100644 index 0000000..44e8436 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2024/inigo/vae_conditioning_validation.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2024/inigo/vanilla_vae_120_epoch_results.png b/dipy.org/pull/66/_static/images/gsoc/2024/inigo/vanilla_vae_120_epoch_results.png new file mode 100644 index 0000000..ef0cf1e Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2024/inigo/vanilla_vae_120_epoch_results.png differ diff --git a/dipy.org/pull/66/_static/images/gsoc/2024/kaustav/docker_issue_fury.png b/dipy.org/pull/66/_static/images/gsoc/2024/kaustav/docker_issue_fury.png new file mode 100644 index 0000000..3111cb0 Binary files /dev/null and b/dipy.org/pull/66/_static/images/gsoc/2024/kaustav/docker_issue_fury.png differ diff --git a/dipy.org/pull/66/_static/images/hbm2015_exhibitors.jpg b/dipy.org/pull/66/_static/images/hbm2015_exhibitors.jpg new file mode 100644 index 0000000..7e1c5f1 Binary files /dev/null and b/dipy.org/pull/66/_static/images/hbm2015_exhibitors.jpg differ diff --git a/dipy.org/pull/66/_static/images/icons/chart.png b/dipy.org/pull/66/_static/images/icons/chart.png new file mode 100644 index 0000000..9fd259c Binary files /dev/null and b/dipy.org/pull/66/_static/images/icons/chart.png differ diff --git a/dipy.org/pull/66/_static/images/icons/cli.svg b/dipy.org/pull/66/_static/images/icons/cli.svg new file mode 100644 index 0000000..a082ad9 --- /dev/null +++ b/dipy.org/pull/66/_static/images/icons/cli.svg @@ -0,0 +1 @@ +Created by Braja Omar Justicofrom the Noun Project \ No newline at end of file diff --git a/dipy.org/pull/66/_static/images/icons/denoise.png b/dipy.org/pull/66/_static/images/icons/denoise.png new file mode 100644 index 0000000..3878205 Binary files /dev/null and b/dipy.org/pull/66/_static/images/icons/denoise.png differ diff --git a/dipy.org/pull/66/_static/images/icons/preprocessing.png b/dipy.org/pull/66/_static/images/icons/preprocessing.png new file mode 100644 index 0000000..fdc91e2 Binary files /dev/null and b/dipy.org/pull/66/_static/images/icons/preprocessing.png differ diff --git a/dipy.org/pull/66/_static/images/icons/reconstruction.svg b/dipy.org/pull/66/_static/images/icons/reconstruction.svg new file mode 100644 index 0000000..92d5cdc --- /dev/null +++ b/dipy.org/pull/66/_static/images/icons/reconstruction.svg @@ -0,0 +1 @@ +Created by Flowiconfrom the Noun Project \ No newline at end of file diff --git a/dipy.org/pull/66/_static/images/icons/registration.svg b/dipy.org/pull/66/_static/images/icons/registration.svg new file mode 100644 index 0000000..8579166 --- /dev/null +++ b/dipy.org/pull/66/_static/images/icons/registration.svg @@ -0,0 +1 @@ +Created by Zulfa Mahendrafrom the Noun Project \ No newline at end of file diff --git a/dipy.org/pull/66/_static/images/icons/stat.svg b/dipy.org/pull/66/_static/images/icons/stat.svg new file mode 100644 index 0000000..b7a23fa --- /dev/null +++ b/dipy.org/pull/66/_static/images/icons/stat.svg @@ -0,0 +1 @@ +Artboard 38Created by Anggara Putrafrom the Noun Project \ No newline at end of file diff --git a/dipy.org/pull/66/_static/images/icons/tractography.svg b/dipy.org/pull/66/_static/images/icons/tractography.svg new file mode 100644 index 0000000..8ce0f6c --- /dev/null +++ b/dipy.org/pull/66/_static/images/icons/tractography.svg @@ -0,0 +1,5 @@ +Created by Kartika Sholehatinfrom the Noun Project \ No newline at end of file diff --git a/dipy.org/pull/66/_static/images/logos/dipy-favicon.png b/dipy.org/pull/66/_static/images/logos/dipy-favicon.png new file mode 100644 index 0000000..caffe0d Binary files /dev/null and b/dipy.org/pull/66/_static/images/logos/dipy-favicon.png differ diff --git a/dipy.org/pull/66/_static/images/logos/dipy-logo-2.png b/dipy.org/pull/66/_static/images/logos/dipy-logo-2.png new file mode 100644 index 0000000..4668353 Binary files /dev/null and b/dipy.org/pull/66/_static/images/logos/dipy-logo-2.png differ diff --git a/dipy.org/pull/66/_static/images/logos/dipy-logo.png b/dipy.org/pull/66/_static/images/logos/dipy-logo.png new file mode 100644 index 0000000..d9bac5e Binary files /dev/null and b/dipy.org/pull/66/_static/images/logos/dipy-logo.png differ diff --git a/dipy.org/pull/66/_static/images/logos/gsoc-logo.png b/dipy.org/pull/66/_static/images/logos/gsoc-logo.png new file mode 100644 index 0000000..635b90d Binary files /dev/null and b/dipy.org/pull/66/_static/images/logos/gsoc-logo.png differ diff --git a/dipy.org/pull/66/_static/images/logos/python-logo.png b/dipy.org/pull/66/_static/images/logos/python-logo.png new file mode 100644 index 0000000..2bc7ed2 Binary files /dev/null and b/dipy.org/pull/66/_static/images/logos/python-logo.png differ diff --git a/dipy.org/pull/66/_static/images/pretty_tracks.png b/dipy.org/pull/66/_static/images/pretty_tracks.png new file mode 100644 index 0000000..c424d3b Binary files /dev/null and b/dipy.org/pull/66/_static/images/pretty_tracks.png differ diff --git a/dipy.org/pull/66/_static/images/simplified_tractography.png b/dipy.org/pull/66/_static/images/simplified_tractography.png new file mode 100644 index 0000000..ffef19d Binary files /dev/null and b/dipy.org/pull/66/_static/images/simplified_tractography.png differ diff --git a/dipy.org/pull/66/_static/images/sponsors/NIH_NIBIB.png b/dipy.org/pull/66/_static/images/sponsors/NIH_NIBIB.png new file mode 100644 index 0000000..6ebddde Binary files /dev/null and b/dipy.org/pull/66/_static/images/sponsors/NIH_NIBIB.png differ diff --git a/dipy.org/pull/66/_static/images/sponsors/czi_logo.png b/dipy.org/pull/66/_static/images/sponsors/czi_logo.png new file mode 100644 index 0000000..a008138 Binary files /dev/null and b/dipy.org/pull/66/_static/images/sponsors/czi_logo.png differ diff --git a/dipy.org/pull/66/_static/images/sponsors/gsoc.png b/dipy.org/pull/66/_static/images/sponsors/gsoc.png new file mode 100644 index 0000000..4665b4a Binary files /dev/null and b/dipy.org/pull/66/_static/images/sponsors/gsoc.png differ diff --git a/dipy.org/pull/66/_static/images/sponsors/iu.webp b/dipy.org/pull/66/_static/images/sponsors/iu.webp new file mode 100644 index 0000000..c76c512 Binary files /dev/null and b/dipy.org/pull/66/_static/images/sponsors/iu.webp differ diff --git a/dipy.org/pull/66/_static/images/sponsors/luddy.png b/dipy.org/pull/66/_static/images/sponsors/luddy.png new file mode 100644 index 0000000..7c00fc6 Binary files /dev/null and b/dipy.org/pull/66/_static/images/sponsors/luddy.png differ diff --git a/dipy.org/pull/66/_static/images/sponsors/uwescience.png b/dipy.org/pull/66/_static/images/sponsors/uwescience.png new file mode 100644 index 0000000..cf45f39 Binary files /dev/null and b/dipy.org/pull/66/_static/images/sponsors/uwescience.png differ diff --git a/dipy.org/pull/66/_static/images/three_brains_golden_new_small.png b/dipy.org/pull/66/_static/images/three_brains_golden_new_small.png new file mode 100644 index 0000000..cf84099 Binary files /dev/null and b/dipy.org/pull/66/_static/images/three_brains_golden_new_small.png differ diff --git a/dipy.org/pull/66/_static/js/dipy.js b/dipy.org/pull/66/_static/js/dipy.js new file mode 100644 index 0000000..a52b88e --- /dev/null +++ b/dipy.org/pull/66/_static/js/dipy.js @@ -0,0 +1,10 @@ +function subscriptionClick(email) { + const recipient = "dipy@python.org"; + const subject = "subscription to dipy mailing list"; + + const mailtoLink = `mailto:${recipient}?subject=${encodeURIComponent( + subject + )}`; + + window.location.href = mailtoLink; +} diff --git a/dipy.org/pull/66/_static/language_data.js b/dipy.org/pull/66/_static/language_data.js new file mode 100644 index 0000000..367b8ed --- /dev/null +++ b/dipy.org/pull/66/_static/language_data.js @@ -0,0 +1,199 @@ +/* + * language_data.js + * ~~~~~~~~~~~~~~~~ + * + * This script contains the language-specific data used by searchtools.js, + * namely the list of stopwords, stemmer, scorer and splitter. + * + * :copyright: Copyright 2007-2024 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +var stopwords = ["a", "and", "are", "as", "at", "be", "but", "by", "for", "if", "in", "into", "is", "it", "near", "no", "not", "of", "on", "or", "such", "that", "the", "their", "then", "there", "these", "they", "this", "to", "was", "will", "with"]; + + +/* Non-minified version is copied as a separate JS file, if available */ + +/** + * Porter Stemmer + */ +var Stemmer = function() { + + var step2list = { + ational: 'ate', + tional: 'tion', + enci: 'ence', + anci: 'ance', + izer: 'ize', + bli: 'ble', + alli: 'al', + entli: 'ent', + eli: 'e', + ousli: 'ous', + ization: 'ize', + ation: 'ate', + ator: 'ate', + alism: 'al', + iveness: 'ive', + fulness: 'ful', + ousness: 'ous', + aliti: 'al', + iviti: 'ive', + biliti: 'ble', + logi: 'log' + }; + + var step3list = { + icate: 'ic', + ative: '', + alize: 'al', + iciti: 'ic', + ical: 'ic', + ful: '', + ness: '' + }; + + var c = "[^aeiou]"; // consonant + var v = "[aeiouy]"; // vowel + var C = c + "[^aeiouy]*"; // consonant sequence + var V = v + "[aeiou]*"; // vowel sequence + + var mgr0 = "^(" + C + ")?" + V + C; // [C]VC... is m>0 + var meq1 = "^(" + C + ")?" + V + C + "(" + V + ")?$"; // [C]VC[V] is m=1 + var mgr1 = "^(" + C + ")?" + V + C + V + C; // [C]VCVC... is m>1 + var s_v = "^(" + C + ")?" + v; // vowel in stem + + this.stemWord = function (w) { + var stem; + var suffix; + var firstch; + var origword = w; + + if (w.length < 3) + return w; + + var re; + var re2; + var re3; + var re4; + + firstch = w.substr(0,1); + if (firstch == "y") + w = firstch.toUpperCase() + w.substr(1); + + // Step 1a + re = /^(.+?)(ss|i)es$/; + re2 = /^(.+?)([^s])s$/; + + if (re.test(w)) + w = w.replace(re,"$1$2"); + else if (re2.test(w)) + w = w.replace(re2,"$1$2"); + + // Step 1b + re = /^(.+?)eed$/; + re2 = /^(.+?)(ed|ing)$/; + if (re.test(w)) { + var fp = re.exec(w); + re = new RegExp(mgr0); + if (re.test(fp[1])) { + re = /.$/; + w = w.replace(re,""); + } + } + else if (re2.test(w)) { + var fp = re2.exec(w); + stem = fp[1]; + re2 = new RegExp(s_v); + if (re2.test(stem)) { + w = stem; + re2 = /(at|bl|iz)$/; + re3 = new RegExp("([^aeiouylsz])\\1$"); + re4 = new RegExp("^" + C + v + "[^aeiouwxy]$"); + if (re2.test(w)) + w = w + "e"; + else if (re3.test(w)) { + re = /.$/; + w = w.replace(re,""); + } + else if (re4.test(w)) + w = w + "e"; + } + } + + // Step 1c + re = /^(.+?)y$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(s_v); + if (re.test(stem)) + w = stem + "i"; + } + + // Step 2 + re = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + suffix = fp[2]; + re = new RegExp(mgr0); + if (re.test(stem)) + w = stem + step2list[suffix]; + } + + // Step 3 + re = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + suffix = fp[2]; + re = new RegExp(mgr0); + if (re.test(stem)) + w = stem + step3list[suffix]; + } + + // Step 4 + re = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|iti|ous|ive|ize)$/; + re2 = /^(.+?)(s|t)(ion)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(mgr1); + if (re.test(stem)) + w = stem; + } + else if (re2.test(w)) { + var fp = re2.exec(w); + stem = fp[1] + fp[2]; + re2 = new RegExp(mgr1); + if (re2.test(stem)) + w = stem; + } + + // Step 5 + re = /^(.+?)e$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(mgr1); + re2 = new RegExp(meq1); + re3 = new RegExp("^" + C + v + "[^aeiouwxy]$"); + if (re.test(stem) || (re2.test(stem) && !(re3.test(stem)))) + w = stem; + } + re = /ll$/; + re2 = new RegExp(mgr1); + if (re.test(w) && re2.test(w)) { + re = /.$/; + w = w.replace(re,""); + } + + // and turn initial Y back to y + if (firstch == "y") + w = firstch.toLowerCase() + w.substr(1); + return w; + } +} + diff --git a/dipy.org/pull/66/_static/macros.html b/dipy.org/pull/66/_static/macros.html new file mode 100644 index 0000000..3be4682 --- /dev/null +++ b/dipy.org/pull/66/_static/macros.html @@ -0,0 +1,7 @@ +{% macro head_assets() %} + +{% endmacro %} + +{% macro body_scripts() %} + +{% endmacro %} \ No newline at end of file diff --git a/dipy.org/pull/66/_static/minus.png b/dipy.org/pull/66/_static/minus.png new file mode 100644 index 0000000..d96755f Binary files /dev/null and b/dipy.org/pull/66/_static/minus.png differ diff --git a/dipy.org/pull/66/_static/plus.png b/dipy.org/pull/66/_static/plus.png new file mode 100644 index 0000000..7107cec Binary files /dev/null and b/dipy.org/pull/66/_static/plus.png differ diff --git a/dipy.org/pull/66/_static/pygments.css b/dipy.org/pull/66/_static/pygments.css new file mode 100644 index 0000000..012e6a0 --- /dev/null +++ b/dipy.org/pull/66/_static/pygments.css @@ -0,0 +1,152 @@ +html[data-theme="light"] .highlight pre { line-height: 125%; } +html[data-theme="light"] .highlight td.linenos .normal { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } +html[data-theme="light"] .highlight span.linenos { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } +html[data-theme="light"] .highlight td.linenos .special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } +html[data-theme="light"] .highlight span.linenos.special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } +html[data-theme="light"] .highlight .hll { background-color: #fae4c2 } +html[data-theme="light"] .highlight { background: #fefefe; color: #080808 } +html[data-theme="light"] .highlight .c { color: #515151 } /* Comment */ +html[data-theme="light"] .highlight .err { color: #a12236 } /* Error */ +html[data-theme="light"] .highlight .k { color: #6730c5 } /* Keyword */ +html[data-theme="light"] .highlight .l { color: #7f4707 } /* Literal */ +html[data-theme="light"] .highlight .n { color: #080808 } /* Name */ +html[data-theme="light"] .highlight .o { color: #00622f } /* Operator */ +html[data-theme="light"] .highlight .p { color: #080808 } /* Punctuation */ +html[data-theme="light"] .highlight .ch { color: #515151 } /* Comment.Hashbang */ +html[data-theme="light"] .highlight .cm { color: #515151 } /* Comment.Multiline */ +html[data-theme="light"] .highlight .cp { color: #515151 } /* Comment.Preproc */ +html[data-theme="light"] .highlight .cpf { color: #515151 } /* Comment.PreprocFile */ +html[data-theme="light"] .highlight .c1 { color: #515151 } /* Comment.Single */ +html[data-theme="light"] .highlight .cs { color: #515151 } /* Comment.Special */ +html[data-theme="light"] .highlight .gd { color: #005b82 } /* Generic.Deleted */ +html[data-theme="light"] .highlight .ge { font-style: italic } /* Generic.Emph */ +html[data-theme="light"] .highlight .gh { color: #005b82 } /* Generic.Heading */ +html[data-theme="light"] .highlight .gs { font-weight: bold } /* Generic.Strong */ +html[data-theme="light"] .highlight .gu { color: #005b82 } /* Generic.Subheading */ +html[data-theme="light"] .highlight .kc { color: #6730c5 } /* Keyword.Constant */ +html[data-theme="light"] .highlight .kd { color: #6730c5 } /* Keyword.Declaration */ +html[data-theme="light"] .highlight .kn { color: #6730c5 } /* Keyword.Namespace */ +html[data-theme="light"] .highlight .kp { color: #6730c5 } /* Keyword.Pseudo */ +html[data-theme="light"] .highlight .kr { color: #6730c5 } /* Keyword.Reserved */ +html[data-theme="light"] .highlight .kt { color: #7f4707 } /* Keyword.Type */ +html[data-theme="light"] .highlight .ld { color: #7f4707 } /* Literal.Date */ +html[data-theme="light"] .highlight .m { color: #7f4707 } /* Literal.Number */ +html[data-theme="light"] .highlight .s { color: #00622f } /* Literal.String */ +html[data-theme="light"] .highlight .na { color: #912583 } /* Name.Attribute */ +html[data-theme="light"] .highlight .nb { color: #7f4707 } /* Name.Builtin */ +html[data-theme="light"] .highlight .nc { color: #005b82 } /* Name.Class */ +html[data-theme="light"] .highlight .no { color: #005b82 } /* Name.Constant */ +html[data-theme="light"] .highlight .nd { color: #7f4707 } /* Name.Decorator */ +html[data-theme="light"] .highlight .ni { color: #00622f } /* Name.Entity */ +html[data-theme="light"] .highlight .ne { color: #6730c5 } /* Name.Exception */ +html[data-theme="light"] .highlight .nf { color: #005b82 } /* Name.Function */ +html[data-theme="light"] .highlight .nl { color: #7f4707 } /* Name.Label */ +html[data-theme="light"] .highlight .nn { color: #080808 } /* Name.Namespace */ +html[data-theme="light"] .highlight .nx { color: #080808 } /* Name.Other */ +html[data-theme="light"] .highlight .py { color: #005b82 } /* Name.Property */ +html[data-theme="light"] .highlight .nt { color: #005b82 } /* Name.Tag */ +html[data-theme="light"] .highlight .nv { color: #a12236 } /* Name.Variable */ +html[data-theme="light"] .highlight .ow { color: #6730c5 } /* Operator.Word */ +html[data-theme="light"] .highlight .pm { color: #080808 } /* Punctuation.Marker */ +html[data-theme="light"] .highlight .w { color: #080808 } /* Text.Whitespace */ +html[data-theme="light"] .highlight .mb { color: #7f4707 } /* Literal.Number.Bin */ +html[data-theme="light"] .highlight .mf { color: #7f4707 } /* Literal.Number.Float */ +html[data-theme="light"] .highlight .mh { color: #7f4707 } /* Literal.Number.Hex */ +html[data-theme="light"] .highlight .mi { color: #7f4707 } /* Literal.Number.Integer */ +html[data-theme="light"] .highlight .mo { color: #7f4707 } /* Literal.Number.Oct */ +html[data-theme="light"] .highlight .sa { color: #00622f } /* Literal.String.Affix */ +html[data-theme="light"] .highlight .sb { color: #00622f } /* Literal.String.Backtick */ +html[data-theme="light"] .highlight .sc { color: #00622f } /* Literal.String.Char */ +html[data-theme="light"] .highlight .dl { color: #00622f } /* Literal.String.Delimiter */ +html[data-theme="light"] .highlight .sd { color: #00622f } /* Literal.String.Doc */ +html[data-theme="light"] .highlight .s2 { color: #00622f } /* Literal.String.Double */ +html[data-theme="light"] .highlight .se { color: #00622f } /* Literal.String.Escape */ +html[data-theme="light"] .highlight .sh { color: #00622f } /* Literal.String.Heredoc */ +html[data-theme="light"] .highlight .si { color: #00622f } /* Literal.String.Interpol */ +html[data-theme="light"] .highlight .sx { color: #00622f } /* Literal.String.Other */ +html[data-theme="light"] .highlight .sr { color: #a12236 } /* Literal.String.Regex */ +html[data-theme="light"] .highlight .s1 { color: #00622f } /* Literal.String.Single */ +html[data-theme="light"] .highlight .ss { color: #005b82 } /* Literal.String.Symbol */ +html[data-theme="light"] .highlight .bp { color: #7f4707 } /* Name.Builtin.Pseudo */ +html[data-theme="light"] .highlight .fm { color: #005b82 } /* Name.Function.Magic */ +html[data-theme="light"] .highlight .vc { color: #a12236 } /* Name.Variable.Class */ +html[data-theme="light"] .highlight .vg { color: #a12236 } /* Name.Variable.Global */ +html[data-theme="light"] .highlight .vi { color: #a12236 } /* Name.Variable.Instance */ +html[data-theme="light"] .highlight .vm { color: #7f4707 } /* Name.Variable.Magic */ +html[data-theme="light"] .highlight .il { color: #7f4707 } /* Literal.Number.Integer.Long */ +html[data-theme="dark"] .highlight pre { line-height: 125%; } +html[data-theme="dark"] .highlight td.linenos .normal { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } +html[data-theme="dark"] .highlight span.linenos { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } +html[data-theme="dark"] .highlight td.linenos .special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } +html[data-theme="dark"] .highlight span.linenos.special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } +html[data-theme="dark"] .highlight .hll { background-color: #ffd9002e } +html[data-theme="dark"] .highlight { background: #2b2b2b; color: #f8f8f2 } +html[data-theme="dark"] .highlight .c { color: #ffd900 } /* Comment */ +html[data-theme="dark"] .highlight .err { color: #ffa07a } /* Error */ +html[data-theme="dark"] .highlight .k { color: #dcc6e0 } /* Keyword */ +html[data-theme="dark"] .highlight .l { color: #ffd900 } /* Literal */ +html[data-theme="dark"] .highlight .n { color: #f8f8f2 } /* Name */ +html[data-theme="dark"] .highlight .o { color: #abe338 } /* Operator */ +html[data-theme="dark"] .highlight .p { color: #f8f8f2 } /* Punctuation */ +html[data-theme="dark"] .highlight .ch { color: #ffd900 } /* Comment.Hashbang */ +html[data-theme="dark"] .highlight .cm { color: #ffd900 } /* Comment.Multiline */ +html[data-theme="dark"] .highlight .cp { color: #ffd900 } /* Comment.Preproc */ +html[data-theme="dark"] .highlight .cpf { color: #ffd900 } /* Comment.PreprocFile */ +html[data-theme="dark"] .highlight .c1 { color: #ffd900 } /* Comment.Single */ +html[data-theme="dark"] .highlight .cs { color: #ffd900 } /* Comment.Special */ +html[data-theme="dark"] .highlight .gd { color: #00e0e0 } /* Generic.Deleted */ +html[data-theme="dark"] .highlight .ge { font-style: italic } /* Generic.Emph */ +html[data-theme="dark"] .highlight .gh { color: #00e0e0 } /* Generic.Heading */ +html[data-theme="dark"] .highlight .gs { font-weight: bold } /* Generic.Strong */ +html[data-theme="dark"] .highlight .gu { color: #00e0e0 } /* Generic.Subheading */ +html[data-theme="dark"] .highlight .kc { color: #dcc6e0 } /* Keyword.Constant */ +html[data-theme="dark"] .highlight .kd { color: #dcc6e0 } /* Keyword.Declaration */ +html[data-theme="dark"] .highlight .kn { color: #dcc6e0 } /* Keyword.Namespace */ +html[data-theme="dark"] .highlight .kp { color: #dcc6e0 } /* Keyword.Pseudo */ +html[data-theme="dark"] .highlight .kr { color: #dcc6e0 } /* Keyword.Reserved */ +html[data-theme="dark"] .highlight .kt { color: #ffd900 } /* Keyword.Type */ +html[data-theme="dark"] .highlight .ld { color: #ffd900 } /* Literal.Date */ +html[data-theme="dark"] .highlight .m { color: #ffd900 } /* Literal.Number */ +html[data-theme="dark"] .highlight .s { color: #abe338 } /* Literal.String */ +html[data-theme="dark"] .highlight .na { color: #ffd900 } /* Name.Attribute */ +html[data-theme="dark"] .highlight .nb { color: #ffd900 } /* Name.Builtin */ +html[data-theme="dark"] .highlight .nc { color: #00e0e0 } /* Name.Class */ +html[data-theme="dark"] .highlight .no { color: #00e0e0 } /* Name.Constant */ +html[data-theme="dark"] .highlight .nd { color: #ffd900 } /* Name.Decorator */ +html[data-theme="dark"] .highlight .ni { color: #abe338 } /* Name.Entity */ +html[data-theme="dark"] .highlight .ne { color: #dcc6e0 } /* Name.Exception */ +html[data-theme="dark"] .highlight .nf { color: #00e0e0 } /* Name.Function */ +html[data-theme="dark"] .highlight .nl { color: #ffd900 } /* Name.Label */ +html[data-theme="dark"] .highlight .nn { color: #f8f8f2 } /* Name.Namespace */ +html[data-theme="dark"] .highlight .nx { color: #f8f8f2 } /* Name.Other */ +html[data-theme="dark"] .highlight .py { color: #00e0e0 } /* Name.Property */ +html[data-theme="dark"] .highlight .nt { color: #00e0e0 } /* Name.Tag */ +html[data-theme="dark"] .highlight .nv { color: #ffa07a } /* Name.Variable */ +html[data-theme="dark"] .highlight .ow { color: #dcc6e0 } /* Operator.Word */ +html[data-theme="dark"] .highlight .pm { color: #f8f8f2 } /* Punctuation.Marker */ +html[data-theme="dark"] .highlight .w { color: #f8f8f2 } /* Text.Whitespace */ +html[data-theme="dark"] .highlight .mb { color: #ffd900 } /* Literal.Number.Bin */ +html[data-theme="dark"] .highlight .mf { color: #ffd900 } /* Literal.Number.Float */ +html[data-theme="dark"] .highlight .mh { color: #ffd900 } /* Literal.Number.Hex */ +html[data-theme="dark"] .highlight .mi { color: #ffd900 } /* Literal.Number.Integer */ +html[data-theme="dark"] .highlight .mo { color: #ffd900 } /* Literal.Number.Oct */ +html[data-theme="dark"] .highlight .sa { color: #abe338 } /* Literal.String.Affix */ +html[data-theme="dark"] .highlight .sb { color: #abe338 } /* Literal.String.Backtick */ +html[data-theme="dark"] .highlight .sc { color: #abe338 } /* Literal.String.Char */ +html[data-theme="dark"] .highlight .dl { color: #abe338 } /* Literal.String.Delimiter */ +html[data-theme="dark"] .highlight .sd { color: #abe338 } /* Literal.String.Doc */ +html[data-theme="dark"] .highlight .s2 { color: #abe338 } /* Literal.String.Double */ +html[data-theme="dark"] .highlight .se { color: #abe338 } /* Literal.String.Escape */ +html[data-theme="dark"] .highlight .sh { color: #abe338 } /* Literal.String.Heredoc */ +html[data-theme="dark"] .highlight .si { color: #abe338 } /* Literal.String.Interpol */ +html[data-theme="dark"] .highlight .sx { color: #abe338 } /* Literal.String.Other */ +html[data-theme="dark"] .highlight .sr { color: #ffa07a } /* Literal.String.Regex */ +html[data-theme="dark"] .highlight .s1 { color: #abe338 } /* Literal.String.Single */ +html[data-theme="dark"] .highlight .ss { color: #00e0e0 } /* Literal.String.Symbol */ +html[data-theme="dark"] .highlight .bp { color: #ffd900 } /* Name.Builtin.Pseudo */ +html[data-theme="dark"] .highlight .fm { color: #00e0e0 } /* Name.Function.Magic */ +html[data-theme="dark"] .highlight .vc { color: #ffa07a } /* Name.Variable.Class */ +html[data-theme="dark"] .highlight .vg { color: #ffa07a } /* Name.Variable.Global */ +html[data-theme="dark"] .highlight .vi { color: #ffa07a } /* Name.Variable.Instance */ +html[data-theme="dark"] .highlight .vm { color: #ffd900 } /* Name.Variable.Magic */ +html[data-theme="dark"] .highlight .il { color: #ffd900 } /* Literal.Number.Integer.Long */ \ No newline at end of file diff --git a/dipy.org/pull/66/_static/scripts/bootstrap.js b/dipy.org/pull/66/_static/scripts/bootstrap.js new file mode 100644 index 0000000..c8178de --- /dev/null +++ b/dipy.org/pull/66/_static/scripts/bootstrap.js @@ -0,0 +1,3 @@ +/*! For license information please see bootstrap.js.LICENSE.txt */ +(()=>{"use strict";var t={d:(e,i)=>{for(var n in i)t.o(i,n)&&!t.o(e,n)&&Object.defineProperty(e,n,{enumerable:!0,get:i[n]})},o:(t,e)=>Object.prototype.hasOwnProperty.call(t,e),r:t=>{"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(t,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(t,"__esModule",{value:!0})}},e={};t.r(e),t.d(e,{afterMain:()=>E,afterRead:()=>v,afterWrite:()=>C,applyStyles:()=>$,arrow:()=>J,auto:()=>a,basePlacements:()=>l,beforeMain:()=>y,beforeRead:()=>_,beforeWrite:()=>A,bottom:()=>s,clippingParents:()=>d,computeStyles:()=>it,createPopper:()=>Dt,createPopperBase:()=>St,createPopperLite:()=>$t,detectOverflow:()=>_t,end:()=>h,eventListeners:()=>st,flip:()=>bt,hide:()=>wt,left:()=>r,main:()=>w,modifierPhases:()=>O,offset:()=>Et,placements:()=>g,popper:()=>f,popperGenerator:()=>Lt,popperOffsets:()=>At,preventOverflow:()=>Tt,read:()=>b,reference:()=>p,right:()=>o,start:()=>c,top:()=>n,variationPlacements:()=>m,viewport:()=>u,write:()=>T});var i={};t.r(i),t.d(i,{Alert:()=>Oe,Button:()=>ke,Carousel:()=>li,Collapse:()=>Ei,Dropdown:()=>Ki,Modal:()=>Ln,Offcanvas:()=>Kn,Popover:()=>bs,ScrollSpy:()=>Ls,Tab:()=>Js,Toast:()=>po,Tooltip:()=>fs});var n="top",s="bottom",o="right",r="left",a="auto",l=[n,s,o,r],c="start",h="end",d="clippingParents",u="viewport",f="popper",p="reference",m=l.reduce((function(t,e){return t.concat([e+"-"+c,e+"-"+h])}),[]),g=[].concat(l,[a]).reduce((function(t,e){return t.concat([e,e+"-"+c,e+"-"+h])}),[]),_="beforeRead",b="read",v="afterRead",y="beforeMain",w="main",E="afterMain",A="beforeWrite",T="write",C="afterWrite",O=[_,b,v,y,w,E,A,T,C];function x(t){return t?(t.nodeName||"").toLowerCase():null}function k(t){if(null==t)return window;if("[object Window]"!==t.toString()){var e=t.ownerDocument;return e&&e.defaultView||window}return t}function L(t){return t instanceof k(t).Element||t instanceof Element}function S(t){return t instanceof k(t).HTMLElement||t instanceof HTMLElement}function D(t){return"undefined"!=typeof ShadowRoot&&(t instanceof k(t).ShadowRoot||t instanceof ShadowRoot)}const $={name:"applyStyles",enabled:!0,phase:"write",fn:function(t){var e=t.state;Object.keys(e.elements).forEach((function(t){var i=e.styles[t]||{},n=e.attributes[t]||{},s=e.elements[t];S(s)&&x(s)&&(Object.assign(s.style,i),Object.keys(n).forEach((function(t){var e=n[t];!1===e?s.removeAttribute(t):s.setAttribute(t,!0===e?"":e)})))}))},effect:function(t){var e=t.state,i={popper:{position:e.options.strategy,left:"0",top:"0",margin:"0"},arrow:{position:"absolute"},reference:{}};return Object.assign(e.elements.popper.style,i.popper),e.styles=i,e.elements.arrow&&Object.assign(e.elements.arrow.style,i.arrow),function(){Object.keys(e.elements).forEach((function(t){var n=e.elements[t],s=e.attributes[t]||{},o=Object.keys(e.styles.hasOwnProperty(t)?e.styles[t]:i[t]).reduce((function(t,e){return t[e]="",t}),{});S(n)&&x(n)&&(Object.assign(n.style,o),Object.keys(s).forEach((function(t){n.removeAttribute(t)})))}))}},requires:["computeStyles"]};function I(t){return t.split("-")[0]}var N=Math.max,P=Math.min,M=Math.round;function j(){var t=navigator.userAgentData;return null!=t&&t.brands&&Array.isArray(t.brands)?t.brands.map((function(t){return t.brand+"/"+t.version})).join(" "):navigator.userAgent}function F(){return!/^((?!chrome|android).)*safari/i.test(j())}function H(t,e,i){void 0===e&&(e=!1),void 0===i&&(i=!1);var n=t.getBoundingClientRect(),s=1,o=1;e&&S(t)&&(s=t.offsetWidth>0&&M(n.width)/t.offsetWidth||1,o=t.offsetHeight>0&&M(n.height)/t.offsetHeight||1);var r=(L(t)?k(t):window).visualViewport,a=!F()&&i,l=(n.left+(a&&r?r.offsetLeft:0))/s,c=(n.top+(a&&r?r.offsetTop:0))/o,h=n.width/s,d=n.height/o;return{width:h,height:d,top:c,right:l+h,bottom:c+d,left:l,x:l,y:c}}function B(t){var e=H(t),i=t.offsetWidth,n=t.offsetHeight;return Math.abs(e.width-i)<=1&&(i=e.width),Math.abs(e.height-n)<=1&&(n=e.height),{x:t.offsetLeft,y:t.offsetTop,width:i,height:n}}function W(t,e){var i=e.getRootNode&&e.getRootNode();if(t.contains(e))return!0;if(i&&D(i)){var n=e;do{if(n&&t.isSameNode(n))return!0;n=n.parentNode||n.host}while(n)}return!1}function z(t){return k(t).getComputedStyle(t)}function R(t){return["table","td","th"].indexOf(x(t))>=0}function q(t){return((L(t)?t.ownerDocument:t.document)||window.document).documentElement}function V(t){return"html"===x(t)?t:t.assignedSlot||t.parentNode||(D(t)?t.host:null)||q(t)}function Y(t){return S(t)&&"fixed"!==z(t).position?t.offsetParent:null}function K(t){for(var e=k(t),i=Y(t);i&&R(i)&&"static"===z(i).position;)i=Y(i);return i&&("html"===x(i)||"body"===x(i)&&"static"===z(i).position)?e:i||function(t){var e=/firefox/i.test(j());if(/Trident/i.test(j())&&S(t)&&"fixed"===z(t).position)return null;var i=V(t);for(D(i)&&(i=i.host);S(i)&&["html","body"].indexOf(x(i))<0;){var n=z(i);if("none"!==n.transform||"none"!==n.perspective||"paint"===n.contain||-1!==["transform","perspective"].indexOf(n.willChange)||e&&"filter"===n.willChange||e&&n.filter&&"none"!==n.filter)return i;i=i.parentNode}return null}(t)||e}function Q(t){return["top","bottom"].indexOf(t)>=0?"x":"y"}function X(t,e,i){return N(t,P(e,i))}function U(t){return Object.assign({},{top:0,right:0,bottom:0,left:0},t)}function G(t,e){return e.reduce((function(e,i){return e[i]=t,e}),{})}const J={name:"arrow",enabled:!0,phase:"main",fn:function(t){var e,i=t.state,a=t.name,c=t.options,h=i.elements.arrow,d=i.modifiersData.popperOffsets,u=I(i.placement),f=Q(u),p=[r,o].indexOf(u)>=0?"height":"width";if(h&&d){var m=function(t,e){return U("number"!=typeof(t="function"==typeof t?t(Object.assign({},e.rects,{placement:e.placement})):t)?t:G(t,l))}(c.padding,i),g=B(h),_="y"===f?n:r,b="y"===f?s:o,v=i.rects.reference[p]+i.rects.reference[f]-d[f]-i.rects.popper[p],y=d[f]-i.rects.reference[f],w=K(h),E=w?"y"===f?w.clientHeight||0:w.clientWidth||0:0,A=v/2-y/2,T=m[_],C=E-g[p]-m[b],O=E/2-g[p]/2+A,x=X(T,O,C),k=f;i.modifiersData[a]=((e={})[k]=x,e.centerOffset=x-O,e)}},effect:function(t){var e=t.state,i=t.options.element,n=void 0===i?"[data-popper-arrow]":i;null!=n&&("string"!=typeof n||(n=e.elements.popper.querySelector(n)))&&W(e.elements.popper,n)&&(e.elements.arrow=n)},requires:["popperOffsets"],requiresIfExists:["preventOverflow"]};function Z(t){return t.split("-")[1]}var tt={top:"auto",right:"auto",bottom:"auto",left:"auto"};function et(t){var e,i=t.popper,a=t.popperRect,l=t.placement,c=t.variation,d=t.offsets,u=t.position,f=t.gpuAcceleration,p=t.adaptive,m=t.roundOffsets,g=t.isFixed,_=d.x,b=void 0===_?0:_,v=d.y,y=void 0===v?0:v,w="function"==typeof m?m({x:b,y}):{x:b,y};b=w.x,y=w.y;var E=d.hasOwnProperty("x"),A=d.hasOwnProperty("y"),T=r,C=n,O=window;if(p){var x=K(i),L="clientHeight",S="clientWidth";x===k(i)&&"static"!==z(x=q(i)).position&&"absolute"===u&&(L="scrollHeight",S="scrollWidth"),(l===n||(l===r||l===o)&&c===h)&&(C=s,y-=(g&&x===O&&O.visualViewport?O.visualViewport.height:x[L])-a.height,y*=f?1:-1),l!==r&&(l!==n&&l!==s||c!==h)||(T=o,b-=(g&&x===O&&O.visualViewport?O.visualViewport.width:x[S])-a.width,b*=f?1:-1)}var D,$=Object.assign({position:u},p&&tt),I=!0===m?function(t,e){var i=t.x,n=t.y,s=e.devicePixelRatio||1;return{x:M(i*s)/s||0,y:M(n*s)/s||0}}({x:b,y},k(i)):{x:b,y};return b=I.x,y=I.y,f?Object.assign({},$,((D={})[C]=A?"0":"",D[T]=E?"0":"",D.transform=(O.devicePixelRatio||1)<=1?"translate("+b+"px, "+y+"px)":"translate3d("+b+"px, "+y+"px, 0)",D)):Object.assign({},$,((e={})[C]=A?y+"px":"",e[T]=E?b+"px":"",e.transform="",e))}const it={name:"computeStyles",enabled:!0,phase:"beforeWrite",fn:function(t){var e=t.state,i=t.options,n=i.gpuAcceleration,s=void 0===n||n,o=i.adaptive,r=void 0===o||o,a=i.roundOffsets,l=void 0===a||a,c={placement:I(e.placement),variation:Z(e.placement),popper:e.elements.popper,popperRect:e.rects.popper,gpuAcceleration:s,isFixed:"fixed"===e.options.strategy};null!=e.modifiersData.popperOffsets&&(e.styles.popper=Object.assign({},e.styles.popper,et(Object.assign({},c,{offsets:e.modifiersData.popperOffsets,position:e.options.strategy,adaptive:r,roundOffsets:l})))),null!=e.modifiersData.arrow&&(e.styles.arrow=Object.assign({},e.styles.arrow,et(Object.assign({},c,{offsets:e.modifiersData.arrow,position:"absolute",adaptive:!1,roundOffsets:l})))),e.attributes.popper=Object.assign({},e.attributes.popper,{"data-popper-placement":e.placement})},data:{}};var nt={passive:!0};const st={name:"eventListeners",enabled:!0,phase:"write",fn:function(){},effect:function(t){var e=t.state,i=t.instance,n=t.options,s=n.scroll,o=void 0===s||s,r=n.resize,a=void 0===r||r,l=k(e.elements.popper),c=[].concat(e.scrollParents.reference,e.scrollParents.popper);return o&&c.forEach((function(t){t.addEventListener("scroll",i.update,nt)})),a&&l.addEventListener("resize",i.update,nt),function(){o&&c.forEach((function(t){t.removeEventListener("scroll",i.update,nt)})),a&&l.removeEventListener("resize",i.update,nt)}},data:{}};var ot={left:"right",right:"left",bottom:"top",top:"bottom"};function rt(t){return t.replace(/left|right|bottom|top/g,(function(t){return ot[t]}))}var at={start:"end",end:"start"};function lt(t){return t.replace(/start|end/g,(function(t){return at[t]}))}function ct(t){var e=k(t);return{scrollLeft:e.pageXOffset,scrollTop:e.pageYOffset}}function ht(t){return H(q(t)).left+ct(t).scrollLeft}function dt(t){var e=z(t),i=e.overflow,n=e.overflowX,s=e.overflowY;return/auto|scroll|overlay|hidden/.test(i+s+n)}function ut(t){return["html","body","#document"].indexOf(x(t))>=0?t.ownerDocument.body:S(t)&&dt(t)?t:ut(V(t))}function ft(t,e){var i;void 0===e&&(e=[]);var n=ut(t),s=n===(null==(i=t.ownerDocument)?void 0:i.body),o=k(n),r=s?[o].concat(o.visualViewport||[],dt(n)?n:[]):n,a=e.concat(r);return s?a:a.concat(ft(V(r)))}function pt(t){return Object.assign({},t,{left:t.x,top:t.y,right:t.x+t.width,bottom:t.y+t.height})}function mt(t,e,i){return e===u?pt(function(t,e){var i=k(t),n=q(t),s=i.visualViewport,o=n.clientWidth,r=n.clientHeight,a=0,l=0;if(s){o=s.width,r=s.height;var c=F();(c||!c&&"fixed"===e)&&(a=s.offsetLeft,l=s.offsetTop)}return{width:o,height:r,x:a+ht(t),y:l}}(t,i)):L(e)?function(t,e){var i=H(t,!1,"fixed"===e);return i.top=i.top+t.clientTop,i.left=i.left+t.clientLeft,i.bottom=i.top+t.clientHeight,i.right=i.left+t.clientWidth,i.width=t.clientWidth,i.height=t.clientHeight,i.x=i.left,i.y=i.top,i}(e,i):pt(function(t){var e,i=q(t),n=ct(t),s=null==(e=t.ownerDocument)?void 0:e.body,o=N(i.scrollWidth,i.clientWidth,s?s.scrollWidth:0,s?s.clientWidth:0),r=N(i.scrollHeight,i.clientHeight,s?s.scrollHeight:0,s?s.clientHeight:0),a=-n.scrollLeft+ht(t),l=-n.scrollTop;return"rtl"===z(s||i).direction&&(a+=N(i.clientWidth,s?s.clientWidth:0)-o),{width:o,height:r,x:a,y:l}}(q(t)))}function gt(t){var e,i=t.reference,a=t.element,l=t.placement,d=l?I(l):null,u=l?Z(l):null,f=i.x+i.width/2-a.width/2,p=i.y+i.height/2-a.height/2;switch(d){case n:e={x:f,y:i.y-a.height};break;case s:e={x:f,y:i.y+i.height};break;case o:e={x:i.x+i.width,y:p};break;case r:e={x:i.x-a.width,y:p};break;default:e={x:i.x,y:i.y}}var m=d?Q(d):null;if(null!=m){var g="y"===m?"height":"width";switch(u){case c:e[m]=e[m]-(i[g]/2-a[g]/2);break;case h:e[m]=e[m]+(i[g]/2-a[g]/2)}}return e}function _t(t,e){void 0===e&&(e={});var i=e,r=i.placement,a=void 0===r?t.placement:r,c=i.strategy,h=void 0===c?t.strategy:c,m=i.boundary,g=void 0===m?d:m,_=i.rootBoundary,b=void 0===_?u:_,v=i.elementContext,y=void 0===v?f:v,w=i.altBoundary,E=void 0!==w&&w,A=i.padding,T=void 0===A?0:A,C=U("number"!=typeof T?T:G(T,l)),O=y===f?p:f,k=t.rects.popper,D=t.elements[E?O:y],$=function(t,e,i,n){var s="clippingParents"===e?function(t){var e=ft(V(t)),i=["absolute","fixed"].indexOf(z(t).position)>=0&&S(t)?K(t):t;return L(i)?e.filter((function(t){return L(t)&&W(t,i)&&"body"!==x(t)})):[]}(t):[].concat(e),o=[].concat(s,[i]),r=o[0],a=o.reduce((function(e,i){var s=mt(t,i,n);return e.top=N(s.top,e.top),e.right=P(s.right,e.right),e.bottom=P(s.bottom,e.bottom),e.left=N(s.left,e.left),e}),mt(t,r,n));return a.width=a.right-a.left,a.height=a.bottom-a.top,a.x=a.left,a.y=a.top,a}(L(D)?D:D.contextElement||q(t.elements.popper),g,b,h),I=H(t.elements.reference),M=gt({reference:I,element:k,strategy:"absolute",placement:a}),j=pt(Object.assign({},k,M)),F=y===f?j:I,B={top:$.top-F.top+C.top,bottom:F.bottom-$.bottom+C.bottom,left:$.left-F.left+C.left,right:F.right-$.right+C.right},R=t.modifiersData.offset;if(y===f&&R){var Y=R[a];Object.keys(B).forEach((function(t){var e=[o,s].indexOf(t)>=0?1:-1,i=[n,s].indexOf(t)>=0?"y":"x";B[t]+=Y[i]*e}))}return B}const bt={name:"flip",enabled:!0,phase:"main",fn:function(t){var e=t.state,i=t.options,h=t.name;if(!e.modifiersData[h]._skip){for(var d=i.mainAxis,u=void 0===d||d,f=i.altAxis,p=void 0===f||f,_=i.fallbackPlacements,b=i.padding,v=i.boundary,y=i.rootBoundary,w=i.altBoundary,E=i.flipVariations,A=void 0===E||E,T=i.allowedAutoPlacements,C=e.options.placement,O=I(C),x=_||(O!==C&&A?function(t){if(I(t)===a)return[];var e=rt(t);return[lt(t),e,lt(e)]}(C):[rt(C)]),k=[C].concat(x).reduce((function(t,i){return t.concat(I(i)===a?function(t,e){void 0===e&&(e={});var i=e,n=i.placement,s=i.boundary,o=i.rootBoundary,r=i.padding,a=i.flipVariations,c=i.allowedAutoPlacements,h=void 0===c?g:c,d=Z(n),u=d?a?m:m.filter((function(t){return Z(t)===d})):l,f=u.filter((function(t){return h.indexOf(t)>=0}));0===f.length&&(f=u);var p=f.reduce((function(e,i){return e[i]=_t(t,{placement:i,boundary:s,rootBoundary:o,padding:r})[I(i)],e}),{});return Object.keys(p).sort((function(t,e){return p[t]-p[e]}))}(e,{placement:i,boundary:v,rootBoundary:y,padding:b,flipVariations:A,allowedAutoPlacements:T}):i)}),[]),L=e.rects.reference,S=e.rects.popper,D=new Map,$=!0,N=k[0],P=0;P=0,B=H?"width":"height",W=_t(e,{placement:M,boundary:v,rootBoundary:y,altBoundary:w,padding:b}),z=H?F?o:r:F?s:n;L[B]>S[B]&&(z=rt(z));var R=rt(z),q=[];if(u&&q.push(W[j]<=0),p&&q.push(W[z]<=0,W[R]<=0),q.every((function(t){return t}))){N=M,$=!1;break}D.set(M,q)}if($)for(var V=function(t){var e=k.find((function(e){var i=D.get(e);if(i)return i.slice(0,t).every((function(t){return t}))}));if(e)return N=e,"break"},Y=A?3:1;Y>0&&"break"!==V(Y);Y--);e.placement!==N&&(e.modifiersData[h]._skip=!0,e.placement=N,e.reset=!0)}},requiresIfExists:["offset"],data:{_skip:!1}};function vt(t,e,i){return void 0===i&&(i={x:0,y:0}),{top:t.top-e.height-i.y,right:t.right-e.width+i.x,bottom:t.bottom-e.height+i.y,left:t.left-e.width-i.x}}function yt(t){return[n,o,s,r].some((function(e){return t[e]>=0}))}const wt={name:"hide",enabled:!0,phase:"main",requiresIfExists:["preventOverflow"],fn:function(t){var e=t.state,i=t.name,n=e.rects.reference,s=e.rects.popper,o=e.modifiersData.preventOverflow,r=_t(e,{elementContext:"reference"}),a=_t(e,{altBoundary:!0}),l=vt(r,n),c=vt(a,s,o),h=yt(l),d=yt(c);e.modifiersData[i]={referenceClippingOffsets:l,popperEscapeOffsets:c,isReferenceHidden:h,hasPopperEscaped:d},e.attributes.popper=Object.assign({},e.attributes.popper,{"data-popper-reference-hidden":h,"data-popper-escaped":d})}},Et={name:"offset",enabled:!0,phase:"main",requires:["popperOffsets"],fn:function(t){var e=t.state,i=t.options,s=t.name,a=i.offset,l=void 0===a?[0,0]:a,c=g.reduce((function(t,i){return t[i]=function(t,e,i){var s=I(t),a=[r,n].indexOf(s)>=0?-1:1,l="function"==typeof i?i(Object.assign({},e,{placement:t})):i,c=l[0],h=l[1];return c=c||0,h=(h||0)*a,[r,o].indexOf(s)>=0?{x:h,y:c}:{x:c,y:h}}(i,e.rects,l),t}),{}),h=c[e.placement],d=h.x,u=h.y;null!=e.modifiersData.popperOffsets&&(e.modifiersData.popperOffsets.x+=d,e.modifiersData.popperOffsets.y+=u),e.modifiersData[s]=c}},At={name:"popperOffsets",enabled:!0,phase:"read",fn:function(t){var e=t.state,i=t.name;e.modifiersData[i]=gt({reference:e.rects.reference,element:e.rects.popper,strategy:"absolute",placement:e.placement})},data:{}},Tt={name:"preventOverflow",enabled:!0,phase:"main",fn:function(t){var e=t.state,i=t.options,a=t.name,l=i.mainAxis,h=void 0===l||l,d=i.altAxis,u=void 0!==d&&d,f=i.boundary,p=i.rootBoundary,m=i.altBoundary,g=i.padding,_=i.tether,b=void 0===_||_,v=i.tetherOffset,y=void 0===v?0:v,w=_t(e,{boundary:f,rootBoundary:p,padding:g,altBoundary:m}),E=I(e.placement),A=Z(e.placement),T=!A,C=Q(E),O="x"===C?"y":"x",x=e.modifiersData.popperOffsets,k=e.rects.reference,L=e.rects.popper,S="function"==typeof y?y(Object.assign({},e.rects,{placement:e.placement})):y,D="number"==typeof S?{mainAxis:S,altAxis:S}:Object.assign({mainAxis:0,altAxis:0},S),$=e.modifiersData.offset?e.modifiersData.offset[e.placement]:null,M={x:0,y:0};if(x){if(h){var j,F="y"===C?n:r,H="y"===C?s:o,W="y"===C?"height":"width",z=x[C],R=z+w[F],q=z-w[H],V=b?-L[W]/2:0,Y=A===c?k[W]:L[W],U=A===c?-L[W]:-k[W],G=e.elements.arrow,J=b&&G?B(G):{width:0,height:0},tt=e.modifiersData["arrow#persistent"]?e.modifiersData["arrow#persistent"].padding:{top:0,right:0,bottom:0,left:0},et=tt[F],it=tt[H],nt=X(0,k[W],J[W]),st=T?k[W]/2-V-nt-et-D.mainAxis:Y-nt-et-D.mainAxis,ot=T?-k[W]/2+V+nt+it+D.mainAxis:U+nt+it+D.mainAxis,rt=e.elements.arrow&&K(e.elements.arrow),at=rt?"y"===C?rt.clientTop||0:rt.clientLeft||0:0,lt=null!=(j=null==$?void 0:$[C])?j:0,ct=z+ot-lt,ht=X(b?P(R,z+st-lt-at):R,z,b?N(q,ct):q);x[C]=ht,M[C]=ht-z}if(u){var dt,ut="x"===C?n:r,ft="x"===C?s:o,pt=x[O],mt="y"===O?"height":"width",gt=pt+w[ut],bt=pt-w[ft],vt=-1!==[n,r].indexOf(E),yt=null!=(dt=null==$?void 0:$[O])?dt:0,wt=vt?gt:pt-k[mt]-L[mt]-yt+D.altAxis,Et=vt?pt+k[mt]+L[mt]-yt-D.altAxis:bt,At=b&&vt?function(t,e,i){var n=X(t,e,i);return n>i?i:n}(wt,pt,Et):X(b?wt:gt,pt,b?Et:bt);x[O]=At,M[O]=At-pt}e.modifiersData[a]=M}},requiresIfExists:["offset"]};function Ct(t,e,i){void 0===i&&(i=!1);var n,s,o=S(e),r=S(e)&&function(t){var e=t.getBoundingClientRect(),i=M(e.width)/t.offsetWidth||1,n=M(e.height)/t.offsetHeight||1;return 1!==i||1!==n}(e),a=q(e),l=H(t,r,i),c={scrollLeft:0,scrollTop:0},h={x:0,y:0};return(o||!o&&!i)&&(("body"!==x(e)||dt(a))&&(c=(n=e)!==k(n)&&S(n)?{scrollLeft:(s=n).scrollLeft,scrollTop:s.scrollTop}:ct(n)),S(e)?((h=H(e,!0)).x+=e.clientLeft,h.y+=e.clientTop):a&&(h.x=ht(a))),{x:l.left+c.scrollLeft-h.x,y:l.top+c.scrollTop-h.y,width:l.width,height:l.height}}function Ot(t){var e=new Map,i=new Set,n=[];function s(t){i.add(t.name),[].concat(t.requires||[],t.requiresIfExists||[]).forEach((function(t){if(!i.has(t)){var n=e.get(t);n&&s(n)}})),n.push(t)}return t.forEach((function(t){e.set(t.name,t)})),t.forEach((function(t){i.has(t.name)||s(t)})),n}var xt={placement:"bottom",modifiers:[],strategy:"absolute"};function kt(){for(var t=arguments.length,e=new Array(t),i=0;iIt.has(t)&&It.get(t).get(e)||null,remove(t,e){if(!It.has(t))return;const i=It.get(t);i.delete(e),0===i.size&&It.delete(t)}},Pt="transitionend",Mt=t=>(t&&window.CSS&&window.CSS.escape&&(t=t.replace(/#([^\s"#']+)/g,((t,e)=>`#${CSS.escape(e)}`))),t),jt=t=>{t.dispatchEvent(new Event(Pt))},Ft=t=>!(!t||"object"!=typeof t)&&(void 0!==t.jquery&&(t=t[0]),void 0!==t.nodeType),Ht=t=>Ft(t)?t.jquery?t[0]:t:"string"==typeof t&&t.length>0?document.querySelector(Mt(t)):null,Bt=t=>{if(!Ft(t)||0===t.getClientRects().length)return!1;const e="visible"===getComputedStyle(t).getPropertyValue("visibility"),i=t.closest("details:not([open])");if(!i)return e;if(i!==t){const e=t.closest("summary");if(e&&e.parentNode!==i)return!1;if(null===e)return!1}return e},Wt=t=>!t||t.nodeType!==Node.ELEMENT_NODE||!!t.classList.contains("disabled")||(void 0!==t.disabled?t.disabled:t.hasAttribute("disabled")&&"false"!==t.getAttribute("disabled")),zt=t=>{if(!document.documentElement.attachShadow)return null;if("function"==typeof t.getRootNode){const e=t.getRootNode();return e instanceof ShadowRoot?e:null}return t instanceof ShadowRoot?t:t.parentNode?zt(t.parentNode):null},Rt=()=>{},qt=t=>{t.offsetHeight},Vt=()=>window.jQuery&&!document.body.hasAttribute("data-bs-no-jquery")?window.jQuery:null,Yt=[],Kt=()=>"rtl"===document.documentElement.dir,Qt=t=>{var e;e=()=>{const e=Vt();if(e){const i=t.NAME,n=e.fn[i];e.fn[i]=t.jQueryInterface,e.fn[i].Constructor=t,e.fn[i].noConflict=()=>(e.fn[i]=n,t.jQueryInterface)}},"loading"===document.readyState?(Yt.length||document.addEventListener("DOMContentLoaded",(()=>{for(const t of Yt)t()})),Yt.push(e)):e()},Xt=(t,e=[],i=t)=>"function"==typeof t?t(...e):i,Ut=(t,e,i=!0)=>{if(!i)return void Xt(t);const n=(t=>{if(!t)return 0;let{transitionDuration:e,transitionDelay:i}=window.getComputedStyle(t);const n=Number.parseFloat(e),s=Number.parseFloat(i);return n||s?(e=e.split(",")[0],i=i.split(",")[0],1e3*(Number.parseFloat(e)+Number.parseFloat(i))):0})(e)+5;let s=!1;const o=({target:i})=>{i===e&&(s=!0,e.removeEventListener(Pt,o),Xt(t))};e.addEventListener(Pt,o),setTimeout((()=>{s||jt(e)}),n)},Gt=(t,e,i,n)=>{const s=t.length;let o=t.indexOf(e);return-1===o?!i&&n?t[s-1]:t[0]:(o+=i?1:-1,n&&(o=(o+s)%s),t[Math.max(0,Math.min(o,s-1))])},Jt=/[^.]*(?=\..*)\.|.*/,Zt=/\..*/,te=/::\d+$/,ee={};let ie=1;const ne={mouseenter:"mouseover",mouseleave:"mouseout"},se=new Set(["click","dblclick","mouseup","mousedown","contextmenu","mousewheel","DOMMouseScroll","mouseover","mouseout","mousemove","selectstart","selectend","keydown","keypress","keyup","orientationchange","touchstart","touchmove","touchend","touchcancel","pointerdown","pointermove","pointerup","pointerleave","pointercancel","gesturestart","gesturechange","gestureend","focus","blur","change","reset","select","submit","focusin","focusout","load","unload","beforeunload","resize","move","DOMContentLoaded","readystatechange","error","abort","scroll"]);function oe(t,e){return e&&`${e}::${ie++}`||t.uidEvent||ie++}function re(t){const e=oe(t);return t.uidEvent=e,ee[e]=ee[e]||{},ee[e]}function ae(t,e,i=null){return Object.values(t).find((t=>t.callable===e&&t.delegationSelector===i))}function le(t,e,i){const n="string"==typeof e,s=n?i:e||i;let o=ue(t);return se.has(o)||(o=t),[n,s,o]}function ce(t,e,i,n,s){if("string"!=typeof e||!t)return;let[o,r,a]=le(e,i,n);if(e in ne){const t=t=>function(e){if(!e.relatedTarget||e.relatedTarget!==e.delegateTarget&&!e.delegateTarget.contains(e.relatedTarget))return t.call(this,e)};r=t(r)}const l=re(t),c=l[a]||(l[a]={}),h=ae(c,r,o?i:null);if(h)return void(h.oneOff=h.oneOff&&s);const d=oe(r,e.replace(Jt,"")),u=o?function(t,e,i){return function n(s){const o=t.querySelectorAll(e);for(let{target:r}=s;r&&r!==this;r=r.parentNode)for(const a of o)if(a===r)return pe(s,{delegateTarget:r}),n.oneOff&&fe.off(t,s.type,e,i),i.apply(r,[s])}}(t,i,r):function(t,e){return function i(n){return pe(n,{delegateTarget:t}),i.oneOff&&fe.off(t,n.type,e),e.apply(t,[n])}}(t,r);u.delegationSelector=o?i:null,u.callable=r,u.oneOff=s,u.uidEvent=d,c[d]=u,t.addEventListener(a,u,o)}function he(t,e,i,n,s){const o=ae(e[i],n,s);o&&(t.removeEventListener(i,o,Boolean(s)),delete e[i][o.uidEvent])}function de(t,e,i,n){const s=e[i]||{};for(const[o,r]of Object.entries(s))o.includes(n)&&he(t,e,i,r.callable,r.delegationSelector)}function ue(t){return t=t.replace(Zt,""),ne[t]||t}const fe={on(t,e,i,n){ce(t,e,i,n,!1)},one(t,e,i,n){ce(t,e,i,n,!0)},off(t,e,i,n){if("string"!=typeof e||!t)return;const[s,o,r]=le(e,i,n),a=r!==e,l=re(t),c=l[r]||{},h=e.startsWith(".");if(void 0===o){if(h)for(const i of Object.keys(l))de(t,l,i,e.slice(1));for(const[i,n]of Object.entries(c)){const s=i.replace(te,"");a&&!e.includes(s)||he(t,l,r,n.callable,n.delegationSelector)}}else{if(!Object.keys(c).length)return;he(t,l,r,o,s?i:null)}},trigger(t,e,i){if("string"!=typeof e||!t)return null;const n=Vt();let s=null,o=!0,r=!0,a=!1;e!==ue(e)&&n&&(s=n.Event(e,i),n(t).trigger(s),o=!s.isPropagationStopped(),r=!s.isImmediatePropagationStopped(),a=s.isDefaultPrevented());const l=pe(new Event(e,{bubbles:o,cancelable:!0}),i);return a&&l.preventDefault(),r&&t.dispatchEvent(l),l.defaultPrevented&&s&&s.preventDefault(),l}};function pe(t,e={}){for(const[i,n]of Object.entries(e))try{t[i]=n}catch(e){Object.defineProperty(t,i,{configurable:!0,get:()=>n})}return t}function me(t){if("true"===t)return!0;if("false"===t)return!1;if(t===Number(t).toString())return Number(t);if(""===t||"null"===t)return null;if("string"!=typeof t)return t;try{return JSON.parse(decodeURIComponent(t))}catch(e){return t}}function ge(t){return t.replace(/[A-Z]/g,(t=>`-${t.toLowerCase()}`))}const _e={setDataAttribute(t,e,i){t.setAttribute(`data-bs-${ge(e)}`,i)},removeDataAttribute(t,e){t.removeAttribute(`data-bs-${ge(e)}`)},getDataAttributes(t){if(!t)return{};const e={},i=Object.keys(t.dataset).filter((t=>t.startsWith("bs")&&!t.startsWith("bsConfig")));for(const n of i){let i=n.replace(/^bs/,"");i=i.charAt(0).toLowerCase()+i.slice(1,i.length),e[i]=me(t.dataset[n])}return e},getDataAttribute:(t,e)=>me(t.getAttribute(`data-bs-${ge(e)}`))};class be{static get Default(){return{}}static get DefaultType(){return{}}static get NAME(){throw new Error('You have to implement the static method "NAME", for each component!')}_getConfig(t){return t=this._mergeConfigObj(t),t=this._configAfterMerge(t),this._typeCheckConfig(t),t}_configAfterMerge(t){return t}_mergeConfigObj(t,e){const i=Ft(e)?_e.getDataAttribute(e,"config"):{};return{...this.constructor.Default,..."object"==typeof i?i:{},...Ft(e)?_e.getDataAttributes(e):{},..."object"==typeof t?t:{}}}_typeCheckConfig(t,e=this.constructor.DefaultType){for(const[n,s]of Object.entries(e)){const e=t[n],o=Ft(e)?"element":null==(i=e)?`${i}`:Object.prototype.toString.call(i).match(/\s([a-z]+)/i)[1].toLowerCase();if(!new RegExp(s).test(o))throw new TypeError(`${this.constructor.NAME.toUpperCase()}: Option "${n}" provided type "${o}" but expected type "${s}".`)}var i}}class ve extends be{constructor(t,e){super(),(t=Ht(t))&&(this._element=t,this._config=this._getConfig(e),Nt.set(this._element,this.constructor.DATA_KEY,this))}dispose(){Nt.remove(this._element,this.constructor.DATA_KEY),fe.off(this._element,this.constructor.EVENT_KEY);for(const t of Object.getOwnPropertyNames(this))this[t]=null}_queueCallback(t,e,i=!0){Ut(t,e,i)}_getConfig(t){return t=this._mergeConfigObj(t,this._element),t=this._configAfterMerge(t),this._typeCheckConfig(t),t}static getInstance(t){return Nt.get(Ht(t),this.DATA_KEY)}static getOrCreateInstance(t,e={}){return this.getInstance(t)||new this(t,"object"==typeof e?e:null)}static get VERSION(){return"5.3.3"}static get DATA_KEY(){return`bs.${this.NAME}`}static get EVENT_KEY(){return`.${this.DATA_KEY}`}static eventName(t){return`${t}${this.EVENT_KEY}`}}const ye=t=>{let e=t.getAttribute("data-bs-target");if(!e||"#"===e){let i=t.getAttribute("href");if(!i||!i.includes("#")&&!i.startsWith("."))return null;i.includes("#")&&!i.startsWith("#")&&(i=`#${i.split("#")[1]}`),e=i&&"#"!==i?i.trim():null}return e?e.split(",").map((t=>Mt(t))).join(","):null},we={find:(t,e=document.documentElement)=>[].concat(...Element.prototype.querySelectorAll.call(e,t)),findOne:(t,e=document.documentElement)=>Element.prototype.querySelector.call(e,t),children:(t,e)=>[].concat(...t.children).filter((t=>t.matches(e))),parents(t,e){const i=[];let n=t.parentNode.closest(e);for(;n;)i.push(n),n=n.parentNode.closest(e);return i},prev(t,e){let i=t.previousElementSibling;for(;i;){if(i.matches(e))return[i];i=i.previousElementSibling}return[]},next(t,e){let i=t.nextElementSibling;for(;i;){if(i.matches(e))return[i];i=i.nextElementSibling}return[]},focusableChildren(t){const e=["a","button","input","textarea","select","details","[tabindex]",'[contenteditable="true"]'].map((t=>`${t}:not([tabindex^="-"])`)).join(",");return this.find(e,t).filter((t=>!Wt(t)&&Bt(t)))},getSelectorFromElement(t){const e=ye(t);return e&&we.findOne(e)?e:null},getElementFromSelector(t){const e=ye(t);return e?we.findOne(e):null},getMultipleElementsFromSelector(t){const e=ye(t);return e?we.find(e):[]}},Ee=(t,e="hide")=>{const i=`click.dismiss${t.EVENT_KEY}`,n=t.NAME;fe.on(document,i,`[data-bs-dismiss="${n}"]`,(function(i){if(["A","AREA"].includes(this.tagName)&&i.preventDefault(),Wt(this))return;const s=we.getElementFromSelector(this)||this.closest(`.${n}`);t.getOrCreateInstance(s)[e]()}))},Ae=".bs.alert",Te=`close${Ae}`,Ce=`closed${Ae}`;class Oe extends ve{static get NAME(){return"alert"}close(){if(fe.trigger(this._element,Te).defaultPrevented)return;this._element.classList.remove("show");const t=this._element.classList.contains("fade");this._queueCallback((()=>this._destroyElement()),this._element,t)}_destroyElement(){this._element.remove(),fe.trigger(this._element,Ce),this.dispose()}static jQueryInterface(t){return this.each((function(){const e=Oe.getOrCreateInstance(this);if("string"==typeof t){if(void 0===e[t]||t.startsWith("_")||"constructor"===t)throw new TypeError(`No method named "${t}"`);e[t](this)}}))}}Ee(Oe,"close"),Qt(Oe);const xe='[data-bs-toggle="button"]';class ke extends ve{static get NAME(){return"button"}toggle(){this._element.setAttribute("aria-pressed",this._element.classList.toggle("active"))}static jQueryInterface(t){return this.each((function(){const e=ke.getOrCreateInstance(this);"toggle"===t&&e[t]()}))}}fe.on(document,"click.bs.button.data-api",xe,(t=>{t.preventDefault();const e=t.target.closest(xe);ke.getOrCreateInstance(e).toggle()})),Qt(ke);const Le=".bs.swipe",Se=`touchstart${Le}`,De=`touchmove${Le}`,$e=`touchend${Le}`,Ie=`pointerdown${Le}`,Ne=`pointerup${Le}`,Pe={endCallback:null,leftCallback:null,rightCallback:null},Me={endCallback:"(function|null)",leftCallback:"(function|null)",rightCallback:"(function|null)"};class je extends be{constructor(t,e){super(),this._element=t,t&&je.isSupported()&&(this._config=this._getConfig(e),this._deltaX=0,this._supportPointerEvents=Boolean(window.PointerEvent),this._initEvents())}static get Default(){return Pe}static get DefaultType(){return Me}static get NAME(){return"swipe"}dispose(){fe.off(this._element,Le)}_start(t){this._supportPointerEvents?this._eventIsPointerPenTouch(t)&&(this._deltaX=t.clientX):this._deltaX=t.touches[0].clientX}_end(t){this._eventIsPointerPenTouch(t)&&(this._deltaX=t.clientX-this._deltaX),this._handleSwipe(),Xt(this._config.endCallback)}_move(t){this._deltaX=t.touches&&t.touches.length>1?0:t.touches[0].clientX-this._deltaX}_handleSwipe(){const t=Math.abs(this._deltaX);if(t<=40)return;const e=t/this._deltaX;this._deltaX=0,e&&Xt(e>0?this._config.rightCallback:this._config.leftCallback)}_initEvents(){this._supportPointerEvents?(fe.on(this._element,Ie,(t=>this._start(t))),fe.on(this._element,Ne,(t=>this._end(t))),this._element.classList.add("pointer-event")):(fe.on(this._element,Se,(t=>this._start(t))),fe.on(this._element,De,(t=>this._move(t))),fe.on(this._element,$e,(t=>this._end(t))))}_eventIsPointerPenTouch(t){return this._supportPointerEvents&&("pen"===t.pointerType||"touch"===t.pointerType)}static isSupported(){return"ontouchstart"in document.documentElement||navigator.maxTouchPoints>0}}const Fe=".bs.carousel",He=".data-api",Be="ArrowLeft",We="ArrowRight",ze="next",Re="prev",qe="left",Ve="right",Ye=`slide${Fe}`,Ke=`slid${Fe}`,Qe=`keydown${Fe}`,Xe=`mouseenter${Fe}`,Ue=`mouseleave${Fe}`,Ge=`dragstart${Fe}`,Je=`load${Fe}${He}`,Ze=`click${Fe}${He}`,ti="carousel",ei="active",ii=".active",ni=".carousel-item",si=ii+ni,oi={[Be]:Ve,[We]:qe},ri={interval:5e3,keyboard:!0,pause:"hover",ride:!1,touch:!0,wrap:!0},ai={interval:"(number|boolean)",keyboard:"boolean",pause:"(string|boolean)",ride:"(boolean|string)",touch:"boolean",wrap:"boolean"};class li extends ve{constructor(t,e){super(t,e),this._interval=null,this._activeElement=null,this._isSliding=!1,this.touchTimeout=null,this._swipeHelper=null,this._indicatorsElement=we.findOne(".carousel-indicators",this._element),this._addEventListeners(),this._config.ride===ti&&this.cycle()}static get Default(){return ri}static get DefaultType(){return ai}static get NAME(){return"carousel"}next(){this._slide(ze)}nextWhenVisible(){!document.hidden&&Bt(this._element)&&this.next()}prev(){this._slide(Re)}pause(){this._isSliding&&jt(this._element),this._clearInterval()}cycle(){this._clearInterval(),this._updateInterval(),this._interval=setInterval((()=>this.nextWhenVisible()),this._config.interval)}_maybeEnableCycle(){this._config.ride&&(this._isSliding?fe.one(this._element,Ke,(()=>this.cycle())):this.cycle())}to(t){const e=this._getItems();if(t>e.length-1||t<0)return;if(this._isSliding)return void fe.one(this._element,Ke,(()=>this.to(t)));const i=this._getItemIndex(this._getActive());if(i===t)return;const n=t>i?ze:Re;this._slide(n,e[t])}dispose(){this._swipeHelper&&this._swipeHelper.dispose(),super.dispose()}_configAfterMerge(t){return t.defaultInterval=t.interval,t}_addEventListeners(){this._config.keyboard&&fe.on(this._element,Qe,(t=>this._keydown(t))),"hover"===this._config.pause&&(fe.on(this._element,Xe,(()=>this.pause())),fe.on(this._element,Ue,(()=>this._maybeEnableCycle()))),this._config.touch&&je.isSupported()&&this._addTouchEventListeners()}_addTouchEventListeners(){for(const t of we.find(".carousel-item img",this._element))fe.on(t,Ge,(t=>t.preventDefault()));const t={leftCallback:()=>this._slide(this._directionToOrder(qe)),rightCallback:()=>this._slide(this._directionToOrder(Ve)),endCallback:()=>{"hover"===this._config.pause&&(this.pause(),this.touchTimeout&&clearTimeout(this.touchTimeout),this.touchTimeout=setTimeout((()=>this._maybeEnableCycle()),500+this._config.interval))}};this._swipeHelper=new je(this._element,t)}_keydown(t){if(/input|textarea/i.test(t.target.tagName))return;const e=oi[t.key];e&&(t.preventDefault(),this._slide(this._directionToOrder(e)))}_getItemIndex(t){return this._getItems().indexOf(t)}_setActiveIndicatorElement(t){if(!this._indicatorsElement)return;const e=we.findOne(ii,this._indicatorsElement);e.classList.remove(ei),e.removeAttribute("aria-current");const i=we.findOne(`[data-bs-slide-to="${t}"]`,this._indicatorsElement);i&&(i.classList.add(ei),i.setAttribute("aria-current","true"))}_updateInterval(){const t=this._activeElement||this._getActive();if(!t)return;const e=Number.parseInt(t.getAttribute("data-bs-interval"),10);this._config.interval=e||this._config.defaultInterval}_slide(t,e=null){if(this._isSliding)return;const i=this._getActive(),n=t===ze,s=e||Gt(this._getItems(),i,n,this._config.wrap);if(s===i)return;const o=this._getItemIndex(s),r=e=>fe.trigger(this._element,e,{relatedTarget:s,direction:this._orderToDirection(t),from:this._getItemIndex(i),to:o});if(r(Ye).defaultPrevented)return;if(!i||!s)return;const a=Boolean(this._interval);this.pause(),this._isSliding=!0,this._setActiveIndicatorElement(o),this._activeElement=s;const l=n?"carousel-item-start":"carousel-item-end",c=n?"carousel-item-next":"carousel-item-prev";s.classList.add(c),qt(s),i.classList.add(l),s.classList.add(l),this._queueCallback((()=>{s.classList.remove(l,c),s.classList.add(ei),i.classList.remove(ei,c,l),this._isSliding=!1,r(Ke)}),i,this._isAnimated()),a&&this.cycle()}_isAnimated(){return this._element.classList.contains("slide")}_getActive(){return we.findOne(si,this._element)}_getItems(){return we.find(ni,this._element)}_clearInterval(){this._interval&&(clearInterval(this._interval),this._interval=null)}_directionToOrder(t){return Kt()?t===qe?Re:ze:t===qe?ze:Re}_orderToDirection(t){return Kt()?t===Re?qe:Ve:t===Re?Ve:qe}static jQueryInterface(t){return this.each((function(){const e=li.getOrCreateInstance(this,t);if("number"!=typeof t){if("string"==typeof t){if(void 0===e[t]||t.startsWith("_")||"constructor"===t)throw new TypeError(`No method named "${t}"`);e[t]()}}else e.to(t)}))}}fe.on(document,Ze,"[data-bs-slide], [data-bs-slide-to]",(function(t){const e=we.getElementFromSelector(this);if(!e||!e.classList.contains(ti))return;t.preventDefault();const i=li.getOrCreateInstance(e),n=this.getAttribute("data-bs-slide-to");return n?(i.to(n),void i._maybeEnableCycle()):"next"===_e.getDataAttribute(this,"slide")?(i.next(),void i._maybeEnableCycle()):(i.prev(),void i._maybeEnableCycle())})),fe.on(window,Je,(()=>{const t=we.find('[data-bs-ride="carousel"]');for(const e of t)li.getOrCreateInstance(e)})),Qt(li);const ci=".bs.collapse",hi=`show${ci}`,di=`shown${ci}`,ui=`hide${ci}`,fi=`hidden${ci}`,pi=`click${ci}.data-api`,mi="show",gi="collapse",_i="collapsing",bi=`:scope .${gi} .${gi}`,vi='[data-bs-toggle="collapse"]',yi={parent:null,toggle:!0},wi={parent:"(null|element)",toggle:"boolean"};class Ei extends ve{constructor(t,e){super(t,e),this._isTransitioning=!1,this._triggerArray=[];const i=we.find(vi);for(const t of i){const e=we.getSelectorFromElement(t),i=we.find(e).filter((t=>t===this._element));null!==e&&i.length&&this._triggerArray.push(t)}this._initializeChildren(),this._config.parent||this._addAriaAndCollapsedClass(this._triggerArray,this._isShown()),this._config.toggle&&this.toggle()}static get Default(){return yi}static get DefaultType(){return wi}static get NAME(){return"collapse"}toggle(){this._isShown()?this.hide():this.show()}show(){if(this._isTransitioning||this._isShown())return;let t=[];if(this._config.parent&&(t=this._getFirstLevelChildren(".collapse.show, .collapse.collapsing").filter((t=>t!==this._element)).map((t=>Ei.getOrCreateInstance(t,{toggle:!1})))),t.length&&t[0]._isTransitioning)return;if(fe.trigger(this._element,hi).defaultPrevented)return;for(const e of t)e.hide();const e=this._getDimension();this._element.classList.remove(gi),this._element.classList.add(_i),this._element.style[e]=0,this._addAriaAndCollapsedClass(this._triggerArray,!0),this._isTransitioning=!0;const i=`scroll${e[0].toUpperCase()+e.slice(1)}`;this._queueCallback((()=>{this._isTransitioning=!1,this._element.classList.remove(_i),this._element.classList.add(gi,mi),this._element.style[e]="",fe.trigger(this._element,di)}),this._element,!0),this._element.style[e]=`${this._element[i]}px`}hide(){if(this._isTransitioning||!this._isShown())return;if(fe.trigger(this._element,ui).defaultPrevented)return;const t=this._getDimension();this._element.style[t]=`${this._element.getBoundingClientRect()[t]}px`,qt(this._element),this._element.classList.add(_i),this._element.classList.remove(gi,mi);for(const t of this._triggerArray){const e=we.getElementFromSelector(t);e&&!this._isShown(e)&&this._addAriaAndCollapsedClass([t],!1)}this._isTransitioning=!0,this._element.style[t]="",this._queueCallback((()=>{this._isTransitioning=!1,this._element.classList.remove(_i),this._element.classList.add(gi),fe.trigger(this._element,fi)}),this._element,!0)}_isShown(t=this._element){return t.classList.contains(mi)}_configAfterMerge(t){return t.toggle=Boolean(t.toggle),t.parent=Ht(t.parent),t}_getDimension(){return this._element.classList.contains("collapse-horizontal")?"width":"height"}_initializeChildren(){if(!this._config.parent)return;const t=this._getFirstLevelChildren(vi);for(const e of t){const t=we.getElementFromSelector(e);t&&this._addAriaAndCollapsedClass([e],this._isShown(t))}}_getFirstLevelChildren(t){const e=we.find(bi,this._config.parent);return we.find(t,this._config.parent).filter((t=>!e.includes(t)))}_addAriaAndCollapsedClass(t,e){if(t.length)for(const i of t)i.classList.toggle("collapsed",!e),i.setAttribute("aria-expanded",e)}static jQueryInterface(t){const e={};return"string"==typeof t&&/show|hide/.test(t)&&(e.toggle=!1),this.each((function(){const i=Ei.getOrCreateInstance(this,e);if("string"==typeof t){if(void 0===i[t])throw new TypeError(`No method named "${t}"`);i[t]()}}))}}fe.on(document,pi,vi,(function(t){("A"===t.target.tagName||t.delegateTarget&&"A"===t.delegateTarget.tagName)&&t.preventDefault();for(const t of we.getMultipleElementsFromSelector(this))Ei.getOrCreateInstance(t,{toggle:!1}).toggle()})),Qt(Ei);const Ai="dropdown",Ti=".bs.dropdown",Ci=".data-api",Oi="ArrowUp",xi="ArrowDown",ki=`hide${Ti}`,Li=`hidden${Ti}`,Si=`show${Ti}`,Di=`shown${Ti}`,$i=`click${Ti}${Ci}`,Ii=`keydown${Ti}${Ci}`,Ni=`keyup${Ti}${Ci}`,Pi="show",Mi='[data-bs-toggle="dropdown"]:not(.disabled):not(:disabled)',ji=`${Mi}.${Pi}`,Fi=".dropdown-menu",Hi=Kt()?"top-end":"top-start",Bi=Kt()?"top-start":"top-end",Wi=Kt()?"bottom-end":"bottom-start",zi=Kt()?"bottom-start":"bottom-end",Ri=Kt()?"left-start":"right-start",qi=Kt()?"right-start":"left-start",Vi={autoClose:!0,boundary:"clippingParents",display:"dynamic",offset:[0,2],popperConfig:null,reference:"toggle"},Yi={autoClose:"(boolean|string)",boundary:"(string|element)",display:"string",offset:"(array|string|function)",popperConfig:"(null|object|function)",reference:"(string|element|object)"};class Ki extends ve{constructor(t,e){super(t,e),this._popper=null,this._parent=this._element.parentNode,this._menu=we.next(this._element,Fi)[0]||we.prev(this._element,Fi)[0]||we.findOne(Fi,this._parent),this._inNavbar=this._detectNavbar()}static get Default(){return Vi}static get DefaultType(){return Yi}static get NAME(){return Ai}toggle(){return this._isShown()?this.hide():this.show()}show(){if(Wt(this._element)||this._isShown())return;const t={relatedTarget:this._element};if(!fe.trigger(this._element,Si,t).defaultPrevented){if(this._createPopper(),"ontouchstart"in document.documentElement&&!this._parent.closest(".navbar-nav"))for(const t of[].concat(...document.body.children))fe.on(t,"mouseover",Rt);this._element.focus(),this._element.setAttribute("aria-expanded",!0),this._menu.classList.add(Pi),this._element.classList.add(Pi),fe.trigger(this._element,Di,t)}}hide(){if(Wt(this._element)||!this._isShown())return;const t={relatedTarget:this._element};this._completeHide(t)}dispose(){this._popper&&this._popper.destroy(),super.dispose()}update(){this._inNavbar=this._detectNavbar(),this._popper&&this._popper.update()}_completeHide(t){if(!fe.trigger(this._element,ki,t).defaultPrevented){if("ontouchstart"in document.documentElement)for(const t of[].concat(...document.body.children))fe.off(t,"mouseover",Rt);this._popper&&this._popper.destroy(),this._menu.classList.remove(Pi),this._element.classList.remove(Pi),this._element.setAttribute("aria-expanded","false"),_e.removeDataAttribute(this._menu,"popper"),fe.trigger(this._element,Li,t)}}_getConfig(t){if("object"==typeof(t=super._getConfig(t)).reference&&!Ft(t.reference)&&"function"!=typeof t.reference.getBoundingClientRect)throw new TypeError(`${Ai.toUpperCase()}: Option "reference" provided type "object" without a required "getBoundingClientRect" method.`);return t}_createPopper(){if(void 0===e)throw new TypeError("Bootstrap's dropdowns require Popper (https://popper.js.org)");let t=this._element;"parent"===this._config.reference?t=this._parent:Ft(this._config.reference)?t=Ht(this._config.reference):"object"==typeof this._config.reference&&(t=this._config.reference);const i=this._getPopperConfig();this._popper=Dt(t,this._menu,i)}_isShown(){return this._menu.classList.contains(Pi)}_getPlacement(){const t=this._parent;if(t.classList.contains("dropend"))return Ri;if(t.classList.contains("dropstart"))return qi;if(t.classList.contains("dropup-center"))return"top";if(t.classList.contains("dropdown-center"))return"bottom";const e="end"===getComputedStyle(this._menu).getPropertyValue("--bs-position").trim();return t.classList.contains("dropup")?e?Bi:Hi:e?zi:Wi}_detectNavbar(){return null!==this._element.closest(".navbar")}_getOffset(){const{offset:t}=this._config;return"string"==typeof t?t.split(",").map((t=>Number.parseInt(t,10))):"function"==typeof t?e=>t(e,this._element):t}_getPopperConfig(){const t={placement:this._getPlacement(),modifiers:[{name:"preventOverflow",options:{boundary:this._config.boundary}},{name:"offset",options:{offset:this._getOffset()}}]};return(this._inNavbar||"static"===this._config.display)&&(_e.setDataAttribute(this._menu,"popper","static"),t.modifiers=[{name:"applyStyles",enabled:!1}]),{...t,...Xt(this._config.popperConfig,[t])}}_selectMenuItem({key:t,target:e}){const i=we.find(".dropdown-menu .dropdown-item:not(.disabled):not(:disabled)",this._menu).filter((t=>Bt(t)));i.length&&Gt(i,e,t===xi,!i.includes(e)).focus()}static jQueryInterface(t){return this.each((function(){const e=Ki.getOrCreateInstance(this,t);if("string"==typeof t){if(void 0===e[t])throw new TypeError(`No method named "${t}"`);e[t]()}}))}static clearMenus(t){if(2===t.button||"keyup"===t.type&&"Tab"!==t.key)return;const e=we.find(ji);for(const i of e){const e=Ki.getInstance(i);if(!e||!1===e._config.autoClose)continue;const n=t.composedPath(),s=n.includes(e._menu);if(n.includes(e._element)||"inside"===e._config.autoClose&&!s||"outside"===e._config.autoClose&&s)continue;if(e._menu.contains(t.target)&&("keyup"===t.type&&"Tab"===t.key||/input|select|option|textarea|form/i.test(t.target.tagName)))continue;const o={relatedTarget:e._element};"click"===t.type&&(o.clickEvent=t),e._completeHide(o)}}static dataApiKeydownHandler(t){const e=/input|textarea/i.test(t.target.tagName),i="Escape"===t.key,n=[Oi,xi].includes(t.key);if(!n&&!i)return;if(e&&!i)return;t.preventDefault();const s=this.matches(Mi)?this:we.prev(this,Mi)[0]||we.next(this,Mi)[0]||we.findOne(Mi,t.delegateTarget.parentNode),o=Ki.getOrCreateInstance(s);if(n)return t.stopPropagation(),o.show(),void o._selectMenuItem(t);o._isShown()&&(t.stopPropagation(),o.hide(),s.focus())}}fe.on(document,Ii,Mi,Ki.dataApiKeydownHandler),fe.on(document,Ii,Fi,Ki.dataApiKeydownHandler),fe.on(document,$i,Ki.clearMenus),fe.on(document,Ni,Ki.clearMenus),fe.on(document,$i,Mi,(function(t){t.preventDefault(),Ki.getOrCreateInstance(this).toggle()})),Qt(Ki);const Qi="backdrop",Xi="show",Ui=`mousedown.bs.${Qi}`,Gi={className:"modal-backdrop",clickCallback:null,isAnimated:!1,isVisible:!0,rootElement:"body"},Ji={className:"string",clickCallback:"(function|null)",isAnimated:"boolean",isVisible:"boolean",rootElement:"(element|string)"};class Zi extends be{constructor(t){super(),this._config=this._getConfig(t),this._isAppended=!1,this._element=null}static get Default(){return Gi}static get DefaultType(){return Ji}static get NAME(){return Qi}show(t){if(!this._config.isVisible)return void Xt(t);this._append();const e=this._getElement();this._config.isAnimated&&qt(e),e.classList.add(Xi),this._emulateAnimation((()=>{Xt(t)}))}hide(t){this._config.isVisible?(this._getElement().classList.remove(Xi),this._emulateAnimation((()=>{this.dispose(),Xt(t)}))):Xt(t)}dispose(){this._isAppended&&(fe.off(this._element,Ui),this._element.remove(),this._isAppended=!1)}_getElement(){if(!this._element){const t=document.createElement("div");t.className=this._config.className,this._config.isAnimated&&t.classList.add("fade"),this._element=t}return this._element}_configAfterMerge(t){return t.rootElement=Ht(t.rootElement),t}_append(){if(this._isAppended)return;const t=this._getElement();this._config.rootElement.append(t),fe.on(t,Ui,(()=>{Xt(this._config.clickCallback)})),this._isAppended=!0}_emulateAnimation(t){Ut(t,this._getElement(),this._config.isAnimated)}}const tn=".bs.focustrap",en=`focusin${tn}`,nn=`keydown.tab${tn}`,sn="backward",on={autofocus:!0,trapElement:null},rn={autofocus:"boolean",trapElement:"element"};class an extends be{constructor(t){super(),this._config=this._getConfig(t),this._isActive=!1,this._lastTabNavDirection=null}static get Default(){return on}static get DefaultType(){return rn}static get NAME(){return"focustrap"}activate(){this._isActive||(this._config.autofocus&&this._config.trapElement.focus(),fe.off(document,tn),fe.on(document,en,(t=>this._handleFocusin(t))),fe.on(document,nn,(t=>this._handleKeydown(t))),this._isActive=!0)}deactivate(){this._isActive&&(this._isActive=!1,fe.off(document,tn))}_handleFocusin(t){const{trapElement:e}=this._config;if(t.target===document||t.target===e||e.contains(t.target))return;const i=we.focusableChildren(e);0===i.length?e.focus():this._lastTabNavDirection===sn?i[i.length-1].focus():i[0].focus()}_handleKeydown(t){"Tab"===t.key&&(this._lastTabNavDirection=t.shiftKey?sn:"forward")}}const ln=".fixed-top, .fixed-bottom, .is-fixed, .sticky-top",cn=".sticky-top",hn="padding-right",dn="margin-right";class un{constructor(){this._element=document.body}getWidth(){const t=document.documentElement.clientWidth;return Math.abs(window.innerWidth-t)}hide(){const t=this.getWidth();this._disableOverFlow(),this._setElementAttributes(this._element,hn,(e=>e+t)),this._setElementAttributes(ln,hn,(e=>e+t)),this._setElementAttributes(cn,dn,(e=>e-t))}reset(){this._resetElementAttributes(this._element,"overflow"),this._resetElementAttributes(this._element,hn),this._resetElementAttributes(ln,hn),this._resetElementAttributes(cn,dn)}isOverflowing(){return this.getWidth()>0}_disableOverFlow(){this._saveInitialAttribute(this._element,"overflow"),this._element.style.overflow="hidden"}_setElementAttributes(t,e,i){const n=this.getWidth();this._applyManipulationCallback(t,(t=>{if(t!==this._element&&window.innerWidth>t.clientWidth+n)return;this._saveInitialAttribute(t,e);const s=window.getComputedStyle(t).getPropertyValue(e);t.style.setProperty(e,`${i(Number.parseFloat(s))}px`)}))}_saveInitialAttribute(t,e){const i=t.style.getPropertyValue(e);i&&_e.setDataAttribute(t,e,i)}_resetElementAttributes(t,e){this._applyManipulationCallback(t,(t=>{const i=_e.getDataAttribute(t,e);null!==i?(_e.removeDataAttribute(t,e),t.style.setProperty(e,i)):t.style.removeProperty(e)}))}_applyManipulationCallback(t,e){if(Ft(t))e(t);else for(const i of we.find(t,this._element))e(i)}}const fn=".bs.modal",pn=`hide${fn}`,mn=`hidePrevented${fn}`,gn=`hidden${fn}`,_n=`show${fn}`,bn=`shown${fn}`,vn=`resize${fn}`,yn=`click.dismiss${fn}`,wn=`mousedown.dismiss${fn}`,En=`keydown.dismiss${fn}`,An=`click${fn}.data-api`,Tn="modal-open",Cn="show",On="modal-static",xn={backdrop:!0,focus:!0,keyboard:!0},kn={backdrop:"(boolean|string)",focus:"boolean",keyboard:"boolean"};class Ln extends ve{constructor(t,e){super(t,e),this._dialog=we.findOne(".modal-dialog",this._element),this._backdrop=this._initializeBackDrop(),this._focustrap=this._initializeFocusTrap(),this._isShown=!1,this._isTransitioning=!1,this._scrollBar=new un,this._addEventListeners()}static get Default(){return xn}static get DefaultType(){return kn}static get NAME(){return"modal"}toggle(t){return this._isShown?this.hide():this.show(t)}show(t){this._isShown||this._isTransitioning||fe.trigger(this._element,_n,{relatedTarget:t}).defaultPrevented||(this._isShown=!0,this._isTransitioning=!0,this._scrollBar.hide(),document.body.classList.add(Tn),this._adjustDialog(),this._backdrop.show((()=>this._showElement(t))))}hide(){this._isShown&&!this._isTransitioning&&(fe.trigger(this._element,pn).defaultPrevented||(this._isShown=!1,this._isTransitioning=!0,this._focustrap.deactivate(),this._element.classList.remove(Cn),this._queueCallback((()=>this._hideModal()),this._element,this._isAnimated())))}dispose(){fe.off(window,fn),fe.off(this._dialog,fn),this._backdrop.dispose(),this._focustrap.deactivate(),super.dispose()}handleUpdate(){this._adjustDialog()}_initializeBackDrop(){return new Zi({isVisible:Boolean(this._config.backdrop),isAnimated:this._isAnimated()})}_initializeFocusTrap(){return new an({trapElement:this._element})}_showElement(t){document.body.contains(this._element)||document.body.append(this._element),this._element.style.display="block",this._element.removeAttribute("aria-hidden"),this._element.setAttribute("aria-modal",!0),this._element.setAttribute("role","dialog"),this._element.scrollTop=0;const e=we.findOne(".modal-body",this._dialog);e&&(e.scrollTop=0),qt(this._element),this._element.classList.add(Cn),this._queueCallback((()=>{this._config.focus&&this._focustrap.activate(),this._isTransitioning=!1,fe.trigger(this._element,bn,{relatedTarget:t})}),this._dialog,this._isAnimated())}_addEventListeners(){fe.on(this._element,En,(t=>{"Escape"===t.key&&(this._config.keyboard?this.hide():this._triggerBackdropTransition())})),fe.on(window,vn,(()=>{this._isShown&&!this._isTransitioning&&this._adjustDialog()})),fe.on(this._element,wn,(t=>{fe.one(this._element,yn,(e=>{this._element===t.target&&this._element===e.target&&("static"!==this._config.backdrop?this._config.backdrop&&this.hide():this._triggerBackdropTransition())}))}))}_hideModal(){this._element.style.display="none",this._element.setAttribute("aria-hidden",!0),this._element.removeAttribute("aria-modal"),this._element.removeAttribute("role"),this._isTransitioning=!1,this._backdrop.hide((()=>{document.body.classList.remove(Tn),this._resetAdjustments(),this._scrollBar.reset(),fe.trigger(this._element,gn)}))}_isAnimated(){return this._element.classList.contains("fade")}_triggerBackdropTransition(){if(fe.trigger(this._element,mn).defaultPrevented)return;const t=this._element.scrollHeight>document.documentElement.clientHeight,e=this._element.style.overflowY;"hidden"===e||this._element.classList.contains(On)||(t||(this._element.style.overflowY="hidden"),this._element.classList.add(On),this._queueCallback((()=>{this._element.classList.remove(On),this._queueCallback((()=>{this._element.style.overflowY=e}),this._dialog)}),this._dialog),this._element.focus())}_adjustDialog(){const t=this._element.scrollHeight>document.documentElement.clientHeight,e=this._scrollBar.getWidth(),i=e>0;if(i&&!t){const t=Kt()?"paddingLeft":"paddingRight";this._element.style[t]=`${e}px`}if(!i&&t){const t=Kt()?"paddingRight":"paddingLeft";this._element.style[t]=`${e}px`}}_resetAdjustments(){this._element.style.paddingLeft="",this._element.style.paddingRight=""}static jQueryInterface(t,e){return this.each((function(){const i=Ln.getOrCreateInstance(this,t);if("string"==typeof t){if(void 0===i[t])throw new TypeError(`No method named "${t}"`);i[t](e)}}))}}fe.on(document,An,'[data-bs-toggle="modal"]',(function(t){const e=we.getElementFromSelector(this);["A","AREA"].includes(this.tagName)&&t.preventDefault(),fe.one(e,_n,(t=>{t.defaultPrevented||fe.one(e,gn,(()=>{Bt(this)&&this.focus()}))}));const i=we.findOne(".modal.show");i&&Ln.getInstance(i).hide(),Ln.getOrCreateInstance(e).toggle(this)})),Ee(Ln),Qt(Ln);const Sn=".bs.offcanvas",Dn=".data-api",$n=`load${Sn}${Dn}`,In="show",Nn="showing",Pn="hiding",Mn=".offcanvas.show",jn=`show${Sn}`,Fn=`shown${Sn}`,Hn=`hide${Sn}`,Bn=`hidePrevented${Sn}`,Wn=`hidden${Sn}`,zn=`resize${Sn}`,Rn=`click${Sn}${Dn}`,qn=`keydown.dismiss${Sn}`,Vn={backdrop:!0,keyboard:!0,scroll:!1},Yn={backdrop:"(boolean|string)",keyboard:"boolean",scroll:"boolean"};class Kn extends ve{constructor(t,e){super(t,e),this._isShown=!1,this._backdrop=this._initializeBackDrop(),this._focustrap=this._initializeFocusTrap(),this._addEventListeners()}static get Default(){return Vn}static get DefaultType(){return Yn}static get NAME(){return"offcanvas"}toggle(t){return this._isShown?this.hide():this.show(t)}show(t){this._isShown||fe.trigger(this._element,jn,{relatedTarget:t}).defaultPrevented||(this._isShown=!0,this._backdrop.show(),this._config.scroll||(new un).hide(),this._element.setAttribute("aria-modal",!0),this._element.setAttribute("role","dialog"),this._element.classList.add(Nn),this._queueCallback((()=>{this._config.scroll&&!this._config.backdrop||this._focustrap.activate(),this._element.classList.add(In),this._element.classList.remove(Nn),fe.trigger(this._element,Fn,{relatedTarget:t})}),this._element,!0))}hide(){this._isShown&&(fe.trigger(this._element,Hn).defaultPrevented||(this._focustrap.deactivate(),this._element.blur(),this._isShown=!1,this._element.classList.add(Pn),this._backdrop.hide(),this._queueCallback((()=>{this._element.classList.remove(In,Pn),this._element.removeAttribute("aria-modal"),this._element.removeAttribute("role"),this._config.scroll||(new un).reset(),fe.trigger(this._element,Wn)}),this._element,!0)))}dispose(){this._backdrop.dispose(),this._focustrap.deactivate(),super.dispose()}_initializeBackDrop(){const t=Boolean(this._config.backdrop);return new Zi({className:"offcanvas-backdrop",isVisible:t,isAnimated:!0,rootElement:this._element.parentNode,clickCallback:t?()=>{"static"!==this._config.backdrop?this.hide():fe.trigger(this._element,Bn)}:null})}_initializeFocusTrap(){return new an({trapElement:this._element})}_addEventListeners(){fe.on(this._element,qn,(t=>{"Escape"===t.key&&(this._config.keyboard?this.hide():fe.trigger(this._element,Bn))}))}static jQueryInterface(t){return this.each((function(){const e=Kn.getOrCreateInstance(this,t);if("string"==typeof t){if(void 0===e[t]||t.startsWith("_")||"constructor"===t)throw new TypeError(`No method named "${t}"`);e[t](this)}}))}}fe.on(document,Rn,'[data-bs-toggle="offcanvas"]',(function(t){const e=we.getElementFromSelector(this);if(["A","AREA"].includes(this.tagName)&&t.preventDefault(),Wt(this))return;fe.one(e,Wn,(()=>{Bt(this)&&this.focus()}));const i=we.findOne(Mn);i&&i!==e&&Kn.getInstance(i).hide(),Kn.getOrCreateInstance(e).toggle(this)})),fe.on(window,$n,(()=>{for(const t of we.find(Mn))Kn.getOrCreateInstance(t).show()})),fe.on(window,zn,(()=>{for(const t of we.find("[aria-modal][class*=show][class*=offcanvas-]"))"fixed"!==getComputedStyle(t).position&&Kn.getOrCreateInstance(t).hide()})),Ee(Kn),Qt(Kn);const Qn={"*":["class","dir","id","lang","role",/^aria-[\w-]*$/i],a:["target","href","title","rel"],area:[],b:[],br:[],col:[],code:[],dd:[],div:[],dl:[],dt:[],em:[],hr:[],h1:[],h2:[],h3:[],h4:[],h5:[],h6:[],i:[],img:["src","srcset","alt","title","width","height"],li:[],ol:[],p:[],pre:[],s:[],small:[],span:[],sub:[],sup:[],strong:[],u:[],ul:[]},Xn=new Set(["background","cite","href","itemtype","longdesc","poster","src","xlink:href"]),Un=/^(?!javascript:)(?:[a-z0-9+.-]+:|[^&:/?#]*(?:[/?#]|$))/i,Gn=(t,e)=>{const i=t.nodeName.toLowerCase();return e.includes(i)?!Xn.has(i)||Boolean(Un.test(t.nodeValue)):e.filter((t=>t instanceof RegExp)).some((t=>t.test(i)))},Jn={allowList:Qn,content:{},extraClass:"",html:!1,sanitize:!0,sanitizeFn:null,template:"
"},Zn={allowList:"object",content:"object",extraClass:"(string|function)",html:"boolean",sanitize:"boolean",sanitizeFn:"(null|function)",template:"string"},ts={entry:"(string|element|function|null)",selector:"(string|element)"};class es extends be{constructor(t){super(),this._config=this._getConfig(t)}static get Default(){return Jn}static get DefaultType(){return Zn}static get NAME(){return"TemplateFactory"}getContent(){return Object.values(this._config.content).map((t=>this._resolvePossibleFunction(t))).filter(Boolean)}hasContent(){return this.getContent().length>0}changeContent(t){return this._checkContent(t),this._config.content={...this._config.content,...t},this}toHtml(){const t=document.createElement("div");t.innerHTML=this._maybeSanitize(this._config.template);for(const[e,i]of Object.entries(this._config.content))this._setContent(t,i,e);const e=t.children[0],i=this._resolvePossibleFunction(this._config.extraClass);return i&&e.classList.add(...i.split(" ")),e}_typeCheckConfig(t){super._typeCheckConfig(t),this._checkContent(t.content)}_checkContent(t){for(const[e,i]of Object.entries(t))super._typeCheckConfig({selector:e,entry:i},ts)}_setContent(t,e,i){const n=we.findOne(i,t);n&&((e=this._resolvePossibleFunction(e))?Ft(e)?this._putElementInTemplate(Ht(e),n):this._config.html?n.innerHTML=this._maybeSanitize(e):n.textContent=e:n.remove())}_maybeSanitize(t){return this._config.sanitize?function(t,e,i){if(!t.length)return t;if(i&&"function"==typeof i)return i(t);const n=(new window.DOMParser).parseFromString(t,"text/html"),s=[].concat(...n.body.querySelectorAll("*"));for(const t of s){const i=t.nodeName.toLowerCase();if(!Object.keys(e).includes(i)){t.remove();continue}const n=[].concat(...t.attributes),s=[].concat(e["*"]||[],e[i]||[]);for(const e of n)Gn(e,s)||t.removeAttribute(e.nodeName)}return n.body.innerHTML}(t,this._config.allowList,this._config.sanitizeFn):t}_resolvePossibleFunction(t){return Xt(t,[this])}_putElementInTemplate(t,e){if(this._config.html)return e.innerHTML="",void e.append(t);e.textContent=t.textContent}}const is=new Set(["sanitize","allowList","sanitizeFn"]),ns="fade",ss="show",os=".tooltip-inner",rs=".modal",as="hide.bs.modal",ls="hover",cs="focus",hs={AUTO:"auto",TOP:"top",RIGHT:Kt()?"left":"right",BOTTOM:"bottom",LEFT:Kt()?"right":"left"},ds={allowList:Qn,animation:!0,boundary:"clippingParents",container:!1,customClass:"",delay:0,fallbackPlacements:["top","right","bottom","left"],html:!1,offset:[0,6],placement:"top",popperConfig:null,sanitize:!0,sanitizeFn:null,selector:!1,template:'',title:"",trigger:"hover focus"},us={allowList:"object",animation:"boolean",boundary:"(string|element)",container:"(string|element|boolean)",customClass:"(string|function)",delay:"(number|object)",fallbackPlacements:"array",html:"boolean",offset:"(array|string|function)",placement:"(string|function)",popperConfig:"(null|object|function)",sanitize:"boolean",sanitizeFn:"(null|function)",selector:"(string|boolean)",template:"string",title:"(string|element|function)",trigger:"string"};class fs extends ve{constructor(t,i){if(void 0===e)throw new TypeError("Bootstrap's tooltips require Popper (https://popper.js.org)");super(t,i),this._isEnabled=!0,this._timeout=0,this._isHovered=null,this._activeTrigger={},this._popper=null,this._templateFactory=null,this._newContent=null,this.tip=null,this._setListeners(),this._config.selector||this._fixTitle()}static get Default(){return ds}static get DefaultType(){return us}static get NAME(){return"tooltip"}enable(){this._isEnabled=!0}disable(){this._isEnabled=!1}toggleEnabled(){this._isEnabled=!this._isEnabled}toggle(){this._isEnabled&&(this._activeTrigger.click=!this._activeTrigger.click,this._isShown()?this._leave():this._enter())}dispose(){clearTimeout(this._timeout),fe.off(this._element.closest(rs),as,this._hideModalHandler),this._element.getAttribute("data-bs-original-title")&&this._element.setAttribute("title",this._element.getAttribute("data-bs-original-title")),this._disposePopper(),super.dispose()}show(){if("none"===this._element.style.display)throw new Error("Please use show on visible elements");if(!this._isWithContent()||!this._isEnabled)return;const t=fe.trigger(this._element,this.constructor.eventName("show")),e=(zt(this._element)||this._element.ownerDocument.documentElement).contains(this._element);if(t.defaultPrevented||!e)return;this._disposePopper();const i=this._getTipElement();this._element.setAttribute("aria-describedby",i.getAttribute("id"));const{container:n}=this._config;if(this._element.ownerDocument.documentElement.contains(this.tip)||(n.append(i),fe.trigger(this._element,this.constructor.eventName("inserted"))),this._popper=this._createPopper(i),i.classList.add(ss),"ontouchstart"in document.documentElement)for(const t of[].concat(...document.body.children))fe.on(t,"mouseover",Rt);this._queueCallback((()=>{fe.trigger(this._element,this.constructor.eventName("shown")),!1===this._isHovered&&this._leave(),this._isHovered=!1}),this.tip,this._isAnimated())}hide(){if(this._isShown()&&!fe.trigger(this._element,this.constructor.eventName("hide")).defaultPrevented){if(this._getTipElement().classList.remove(ss),"ontouchstart"in document.documentElement)for(const t of[].concat(...document.body.children))fe.off(t,"mouseover",Rt);this._activeTrigger.click=!1,this._activeTrigger[cs]=!1,this._activeTrigger[ls]=!1,this._isHovered=null,this._queueCallback((()=>{this._isWithActiveTrigger()||(this._isHovered||this._disposePopper(),this._element.removeAttribute("aria-describedby"),fe.trigger(this._element,this.constructor.eventName("hidden")))}),this.tip,this._isAnimated())}}update(){this._popper&&this._popper.update()}_isWithContent(){return Boolean(this._getTitle())}_getTipElement(){return this.tip||(this.tip=this._createTipElement(this._newContent||this._getContentForTemplate())),this.tip}_createTipElement(t){const e=this._getTemplateFactory(t).toHtml();if(!e)return null;e.classList.remove(ns,ss),e.classList.add(`bs-${this.constructor.NAME}-auto`);const i=(t=>{do{t+=Math.floor(1e6*Math.random())}while(document.getElementById(t));return t})(this.constructor.NAME).toString();return e.setAttribute("id",i),this._isAnimated()&&e.classList.add(ns),e}setContent(t){this._newContent=t,this._isShown()&&(this._disposePopper(),this.show())}_getTemplateFactory(t){return this._templateFactory?this._templateFactory.changeContent(t):this._templateFactory=new es({...this._config,content:t,extraClass:this._resolvePossibleFunction(this._config.customClass)}),this._templateFactory}_getContentForTemplate(){return{[os]:this._getTitle()}}_getTitle(){return this._resolvePossibleFunction(this._config.title)||this._element.getAttribute("data-bs-original-title")}_initializeOnDelegatedTarget(t){return this.constructor.getOrCreateInstance(t.delegateTarget,this._getDelegateConfig())}_isAnimated(){return this._config.animation||this.tip&&this.tip.classList.contains(ns)}_isShown(){return this.tip&&this.tip.classList.contains(ss)}_createPopper(t){const e=Xt(this._config.placement,[this,t,this._element]),i=hs[e.toUpperCase()];return Dt(this._element,t,this._getPopperConfig(i))}_getOffset(){const{offset:t}=this._config;return"string"==typeof t?t.split(",").map((t=>Number.parseInt(t,10))):"function"==typeof t?e=>t(e,this._element):t}_resolvePossibleFunction(t){return Xt(t,[this._element])}_getPopperConfig(t){const e={placement:t,modifiers:[{name:"flip",options:{fallbackPlacements:this._config.fallbackPlacements}},{name:"offset",options:{offset:this._getOffset()}},{name:"preventOverflow",options:{boundary:this._config.boundary}},{name:"arrow",options:{element:`.${this.constructor.NAME}-arrow`}},{name:"preSetPlacement",enabled:!0,phase:"beforeMain",fn:t=>{this._getTipElement().setAttribute("data-popper-placement",t.state.placement)}}]};return{...e,...Xt(this._config.popperConfig,[e])}}_setListeners(){const t=this._config.trigger.split(" ");for(const e of t)if("click"===e)fe.on(this._element,this.constructor.eventName("click"),this._config.selector,(t=>{this._initializeOnDelegatedTarget(t).toggle()}));else if("manual"!==e){const t=e===ls?this.constructor.eventName("mouseenter"):this.constructor.eventName("focusin"),i=e===ls?this.constructor.eventName("mouseleave"):this.constructor.eventName("focusout");fe.on(this._element,t,this._config.selector,(t=>{const e=this._initializeOnDelegatedTarget(t);e._activeTrigger["focusin"===t.type?cs:ls]=!0,e._enter()})),fe.on(this._element,i,this._config.selector,(t=>{const e=this._initializeOnDelegatedTarget(t);e._activeTrigger["focusout"===t.type?cs:ls]=e._element.contains(t.relatedTarget),e._leave()}))}this._hideModalHandler=()=>{this._element&&this.hide()},fe.on(this._element.closest(rs),as,this._hideModalHandler)}_fixTitle(){const t=this._element.getAttribute("title");t&&(this._element.getAttribute("aria-label")||this._element.textContent.trim()||this._element.setAttribute("aria-label",t),this._element.setAttribute("data-bs-original-title",t),this._element.removeAttribute("title"))}_enter(){this._isShown()||this._isHovered?this._isHovered=!0:(this._isHovered=!0,this._setTimeout((()=>{this._isHovered&&this.show()}),this._config.delay.show))}_leave(){this._isWithActiveTrigger()||(this._isHovered=!1,this._setTimeout((()=>{this._isHovered||this.hide()}),this._config.delay.hide))}_setTimeout(t,e){clearTimeout(this._timeout),this._timeout=setTimeout(t,e)}_isWithActiveTrigger(){return Object.values(this._activeTrigger).includes(!0)}_getConfig(t){const e=_e.getDataAttributes(this._element);for(const t of Object.keys(e))is.has(t)&&delete e[t];return t={...e,..."object"==typeof t&&t?t:{}},t=this._mergeConfigObj(t),t=this._configAfterMerge(t),this._typeCheckConfig(t),t}_configAfterMerge(t){return t.container=!1===t.container?document.body:Ht(t.container),"number"==typeof t.delay&&(t.delay={show:t.delay,hide:t.delay}),"number"==typeof t.title&&(t.title=t.title.toString()),"number"==typeof t.content&&(t.content=t.content.toString()),t}_getDelegateConfig(){const t={};for(const[e,i]of Object.entries(this._config))this.constructor.Default[e]!==i&&(t[e]=i);return t.selector=!1,t.trigger="manual",t}_disposePopper(){this._popper&&(this._popper.destroy(),this._popper=null),this.tip&&(this.tip.remove(),this.tip=null)}static jQueryInterface(t){return this.each((function(){const e=fs.getOrCreateInstance(this,t);if("string"==typeof t){if(void 0===e[t])throw new TypeError(`No method named "${t}"`);e[t]()}}))}}Qt(fs);const ps=".popover-header",ms=".popover-body",gs={...fs.Default,content:"",offset:[0,8],placement:"right",template:'',trigger:"click"},_s={...fs.DefaultType,content:"(null|string|element|function)"};class bs extends fs{static get Default(){return gs}static get DefaultType(){return _s}static get NAME(){return"popover"}_isWithContent(){return this._getTitle()||this._getContent()}_getContentForTemplate(){return{[ps]:this._getTitle(),[ms]:this._getContent()}}_getContent(){return this._resolvePossibleFunction(this._config.content)}static jQueryInterface(t){return this.each((function(){const e=bs.getOrCreateInstance(this,t);if("string"==typeof t){if(void 0===e[t])throw new TypeError(`No method named "${t}"`);e[t]()}}))}}Qt(bs);const vs=".bs.scrollspy",ys=`activate${vs}`,ws=`click${vs}`,Es=`load${vs}.data-api`,As="active",Ts="[href]",Cs=".nav-link",Os=`${Cs}, .nav-item > ${Cs}, .list-group-item`,xs={offset:null,rootMargin:"0px 0px -25%",smoothScroll:!1,target:null,threshold:[.1,.5,1]},ks={offset:"(number|null)",rootMargin:"string",smoothScroll:"boolean",target:"element",threshold:"array"};class Ls extends ve{constructor(t,e){super(t,e),this._targetLinks=new Map,this._observableSections=new Map,this._rootElement="visible"===getComputedStyle(this._element).overflowY?null:this._element,this._activeTarget=null,this._observer=null,this._previousScrollData={visibleEntryTop:0,parentScrollTop:0},this.refresh()}static get Default(){return xs}static get DefaultType(){return ks}static get NAME(){return"scrollspy"}refresh(){this._initializeTargetsAndObservables(),this._maybeEnableSmoothScroll(),this._observer?this._observer.disconnect():this._observer=this._getNewObserver();for(const t of this._observableSections.values())this._observer.observe(t)}dispose(){this._observer.disconnect(),super.dispose()}_configAfterMerge(t){return t.target=Ht(t.target)||document.body,t.rootMargin=t.offset?`${t.offset}px 0px -30%`:t.rootMargin,"string"==typeof t.threshold&&(t.threshold=t.threshold.split(",").map((t=>Number.parseFloat(t)))),t}_maybeEnableSmoothScroll(){this._config.smoothScroll&&(fe.off(this._config.target,ws),fe.on(this._config.target,ws,Ts,(t=>{const e=this._observableSections.get(t.target.hash);if(e){t.preventDefault();const i=this._rootElement||window,n=e.offsetTop-this._element.offsetTop;if(i.scrollTo)return void i.scrollTo({top:n,behavior:"smooth"});i.scrollTop=n}})))}_getNewObserver(){const t={root:this._rootElement,threshold:this._config.threshold,rootMargin:this._config.rootMargin};return new IntersectionObserver((t=>this._observerCallback(t)),t)}_observerCallback(t){const e=t=>this._targetLinks.get(`#${t.target.id}`),i=t=>{this._previousScrollData.visibleEntryTop=t.target.offsetTop,this._process(e(t))},n=(this._rootElement||document.documentElement).scrollTop,s=n>=this._previousScrollData.parentScrollTop;this._previousScrollData.parentScrollTop=n;for(const o of t){if(!o.isIntersecting){this._activeTarget=null,this._clearActiveClass(e(o));continue}const t=o.target.offsetTop>=this._previousScrollData.visibleEntryTop;if(s&&t){if(i(o),!n)return}else s||t||i(o)}}_initializeTargetsAndObservables(){this._targetLinks=new Map,this._observableSections=new Map;const t=we.find(Ts,this._config.target);for(const e of t){if(!e.hash||Wt(e))continue;const t=we.findOne(decodeURI(e.hash),this._element);Bt(t)&&(this._targetLinks.set(decodeURI(e.hash),e),this._observableSections.set(e.hash,t))}}_process(t){this._activeTarget!==t&&(this._clearActiveClass(this._config.target),this._activeTarget=t,t.classList.add(As),this._activateParents(t),fe.trigger(this._element,ys,{relatedTarget:t}))}_activateParents(t){if(t.classList.contains("dropdown-item"))we.findOne(".dropdown-toggle",t.closest(".dropdown")).classList.add(As);else for(const e of we.parents(t,".nav, .list-group"))for(const t of we.prev(e,Os))t.classList.add(As)}_clearActiveClass(t){t.classList.remove(As);const e=we.find(`${Ts}.${As}`,t);for(const t of e)t.classList.remove(As)}static jQueryInterface(t){return this.each((function(){const e=Ls.getOrCreateInstance(this,t);if("string"==typeof t){if(void 0===e[t]||t.startsWith("_")||"constructor"===t)throw new TypeError(`No method named "${t}"`);e[t]()}}))}}fe.on(window,Es,(()=>{for(const t of we.find('[data-bs-spy="scroll"]'))Ls.getOrCreateInstance(t)})),Qt(Ls);const Ss=".bs.tab",Ds=`hide${Ss}`,$s=`hidden${Ss}`,Is=`show${Ss}`,Ns=`shown${Ss}`,Ps=`click${Ss}`,Ms=`keydown${Ss}`,js=`load${Ss}`,Fs="ArrowLeft",Hs="ArrowRight",Bs="ArrowUp",Ws="ArrowDown",zs="Home",Rs="End",qs="active",Vs="fade",Ys="show",Ks=".dropdown-toggle",Qs=`:not(${Ks})`,Xs='[data-bs-toggle="tab"], [data-bs-toggle="pill"], [data-bs-toggle="list"]',Us=`.nav-link${Qs}, .list-group-item${Qs}, [role="tab"]${Qs}, ${Xs}`,Gs=`.${qs}[data-bs-toggle="tab"], .${qs}[data-bs-toggle="pill"], .${qs}[data-bs-toggle="list"]`;class Js extends ve{constructor(t){super(t),this._parent=this._element.closest('.list-group, .nav, [role="tablist"]'),this._parent&&(this._setInitialAttributes(this._parent,this._getChildren()),fe.on(this._element,Ms,(t=>this._keydown(t))))}static get NAME(){return"tab"}show(){const t=this._element;if(this._elemIsActive(t))return;const e=this._getActiveElem(),i=e?fe.trigger(e,Ds,{relatedTarget:t}):null;fe.trigger(t,Is,{relatedTarget:e}).defaultPrevented||i&&i.defaultPrevented||(this._deactivate(e,t),this._activate(t,e))}_activate(t,e){t&&(t.classList.add(qs),this._activate(we.getElementFromSelector(t)),this._queueCallback((()=>{"tab"===t.getAttribute("role")?(t.removeAttribute("tabindex"),t.setAttribute("aria-selected",!0),this._toggleDropDown(t,!0),fe.trigger(t,Ns,{relatedTarget:e})):t.classList.add(Ys)}),t,t.classList.contains(Vs)))}_deactivate(t,e){t&&(t.classList.remove(qs),t.blur(),this._deactivate(we.getElementFromSelector(t)),this._queueCallback((()=>{"tab"===t.getAttribute("role")?(t.setAttribute("aria-selected",!1),t.setAttribute("tabindex","-1"),this._toggleDropDown(t,!1),fe.trigger(t,$s,{relatedTarget:e})):t.classList.remove(Ys)}),t,t.classList.contains(Vs)))}_keydown(t){if(![Fs,Hs,Bs,Ws,zs,Rs].includes(t.key))return;t.stopPropagation(),t.preventDefault();const e=this._getChildren().filter((t=>!Wt(t)));let i;if([zs,Rs].includes(t.key))i=e[t.key===zs?0:e.length-1];else{const n=[Hs,Ws].includes(t.key);i=Gt(e,t.target,n,!0)}i&&(i.focus({preventScroll:!0}),Js.getOrCreateInstance(i).show())}_getChildren(){return we.find(Us,this._parent)}_getActiveElem(){return this._getChildren().find((t=>this._elemIsActive(t)))||null}_setInitialAttributes(t,e){this._setAttributeIfNotExists(t,"role","tablist");for(const t of e)this._setInitialAttributesOnChild(t)}_setInitialAttributesOnChild(t){t=this._getInnerElement(t);const e=this._elemIsActive(t),i=this._getOuterElement(t);t.setAttribute("aria-selected",e),i!==t&&this._setAttributeIfNotExists(i,"role","presentation"),e||t.setAttribute("tabindex","-1"),this._setAttributeIfNotExists(t,"role","tab"),this._setInitialAttributesOnTargetPanel(t)}_setInitialAttributesOnTargetPanel(t){const e=we.getElementFromSelector(t);e&&(this._setAttributeIfNotExists(e,"role","tabpanel"),t.id&&this._setAttributeIfNotExists(e,"aria-labelledby",`${t.id}`))}_toggleDropDown(t,e){const i=this._getOuterElement(t);if(!i.classList.contains("dropdown"))return;const n=(t,n)=>{const s=we.findOne(t,i);s&&s.classList.toggle(n,e)};n(Ks,qs),n(".dropdown-menu",Ys),i.setAttribute("aria-expanded",e)}_setAttributeIfNotExists(t,e,i){t.hasAttribute(e)||t.setAttribute(e,i)}_elemIsActive(t){return t.classList.contains(qs)}_getInnerElement(t){return t.matches(Us)?t:we.findOne(Us,t)}_getOuterElement(t){return t.closest(".nav-item, .list-group-item")||t}static jQueryInterface(t){return this.each((function(){const e=Js.getOrCreateInstance(this);if("string"==typeof t){if(void 0===e[t]||t.startsWith("_")||"constructor"===t)throw new TypeError(`No method named "${t}"`);e[t]()}}))}}fe.on(document,Ps,Xs,(function(t){["A","AREA"].includes(this.tagName)&&t.preventDefault(),Wt(this)||Js.getOrCreateInstance(this).show()})),fe.on(window,js,(()=>{for(const t of we.find(Gs))Js.getOrCreateInstance(t)})),Qt(Js);const Zs=".bs.toast",to=`mouseover${Zs}`,eo=`mouseout${Zs}`,io=`focusin${Zs}`,no=`focusout${Zs}`,so=`hide${Zs}`,oo=`hidden${Zs}`,ro=`show${Zs}`,ao=`shown${Zs}`,lo="hide",co="show",ho="showing",uo={animation:"boolean",autohide:"boolean",delay:"number"},fo={animation:!0,autohide:!0,delay:5e3};class po extends ve{constructor(t,e){super(t,e),this._timeout=null,this._hasMouseInteraction=!1,this._hasKeyboardInteraction=!1,this._setListeners()}static get Default(){return fo}static get DefaultType(){return uo}static get NAME(){return"toast"}show(){fe.trigger(this._element,ro).defaultPrevented||(this._clearTimeout(),this._config.animation&&this._element.classList.add("fade"),this._element.classList.remove(lo),qt(this._element),this._element.classList.add(co,ho),this._queueCallback((()=>{this._element.classList.remove(ho),fe.trigger(this._element,ao),this._maybeScheduleHide()}),this._element,this._config.animation))}hide(){this.isShown()&&(fe.trigger(this._element,so).defaultPrevented||(this._element.classList.add(ho),this._queueCallback((()=>{this._element.classList.add(lo),this._element.classList.remove(ho,co),fe.trigger(this._element,oo)}),this._element,this._config.animation)))}dispose(){this._clearTimeout(),this.isShown()&&this._element.classList.remove(co),super.dispose()}isShown(){return this._element.classList.contains(co)}_maybeScheduleHide(){this._config.autohide&&(this._hasMouseInteraction||this._hasKeyboardInteraction||(this._timeout=setTimeout((()=>{this.hide()}),this._config.delay)))}_onInteraction(t,e){switch(t.type){case"mouseover":case"mouseout":this._hasMouseInteraction=e;break;case"focusin":case"focusout":this._hasKeyboardInteraction=e}if(e)return void this._clearTimeout();const i=t.relatedTarget;this._element===i||this._element.contains(i)||this._maybeScheduleHide()}_setListeners(){fe.on(this._element,to,(t=>this._onInteraction(t,!0))),fe.on(this._element,eo,(t=>this._onInteraction(t,!1))),fe.on(this._element,io,(t=>this._onInteraction(t,!0))),fe.on(this._element,no,(t=>this._onInteraction(t,!1)))}_clearTimeout(){clearTimeout(this._timeout),this._timeout=null}static jQueryInterface(t){return this.each((function(){const e=po.getOrCreateInstance(this,t);if("string"==typeof t){if(void 0===e[t])throw new TypeError(`No method named "${t}"`);e[t](this)}}))}}function mo(t){"loading"!=document.readyState?t():document.addEventListener("DOMContentLoaded",t)}Ee(po),Qt(po),mo((function(){[].slice.call(document.querySelectorAll('[data-bs-toggle="tooltip"]')).map((function(t){return new fs(t,{delay:{show:500,hide:100}})}))})),mo((function(){document.getElementById("pst-back-to-top").addEventListener("click",(function(){document.body.scrollTop=0,document.documentElement.scrollTop=0}))})),mo((function(){var t=document.getElementById("pst-back-to-top"),e=document.getElementsByClassName("bd-header")[0].getBoundingClientRect();window.addEventListener("scroll",(function(){this.oldScroll>this.scrollY&&this.scrollY>e.bottom?t.style.display="block":t.style.display="none",this.oldScroll=this.scrollY}))})),window.bootstrap=i})(); +//# sourceMappingURL=bootstrap.js.map \ No newline at end of file diff --git a/dipy.org/pull/66/_static/scripts/bootstrap.js.LICENSE.txt b/dipy.org/pull/66/_static/scripts/bootstrap.js.LICENSE.txt new file mode 100644 index 0000000..28755c2 --- /dev/null +++ b/dipy.org/pull/66/_static/scripts/bootstrap.js.LICENSE.txt @@ -0,0 +1,5 @@ +/*! + * Bootstrap v5.3.3 (https://getbootstrap.com/) + * Copyright 2011-2024 The Bootstrap Authors (https://github.com/twbs/bootstrap/graphs/contributors) + * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE) + */ diff --git a/dipy.org/pull/66/_static/scripts/bootstrap.js.map b/dipy.org/pull/66/_static/scripts/bootstrap.js.map new file mode 100644 index 0000000..e9e8158 --- /dev/null +++ b/dipy.org/pull/66/_static/scripts/bootstrap.js.map @@ -0,0 +1 @@ +{"version":3,"file":"scripts/bootstrap.js","mappings":";mBACA,IAAIA,EAAsB,CCA1BA,EAAwB,CAACC,EAASC,KACjC,IAAI,IAAIC,KAAOD,EACXF,EAAoBI,EAAEF,EAAYC,KAASH,EAAoBI,EAAEH,EAASE,IAC5EE,OAAOC,eAAeL,EAASE,EAAK,CAAEI,YAAY,EAAMC,IAAKN,EAAWC,IAE1E,ECNDH,EAAwB,CAACS,EAAKC,IAAUL,OAAOM,UAAUC,eAAeC,KAAKJ,EAAKC,GCClFV,EAAyBC,IACH,oBAAXa,QAA0BA,OAAOC,aAC1CV,OAAOC,eAAeL,EAASa,OAAOC,YAAa,CAAEC,MAAO,WAE7DX,OAAOC,eAAeL,EAAS,aAAc,CAAEe,OAAO,GAAO,01BCLvD,IAAI,EAAM,MACNC,EAAS,SACTC,EAAQ,QACRC,EAAO,OACPC,EAAO,OACPC,EAAiB,CAAC,EAAKJ,EAAQC,EAAOC,GACtCG,EAAQ,QACRC,EAAM,MACNC,EAAkB,kBAClBC,EAAW,WACXC,EAAS,SACTC,EAAY,YACZC,EAAmCP,EAAeQ,QAAO,SAAUC,EAAKC,GACjF,OAAOD,EAAIE,OAAO,CAACD,EAAY,IAAMT,EAAOS,EAAY,IAAMR,GAChE,GAAG,IACQ,EAA0B,GAAGS,OAAOX,EAAgB,CAACD,IAAOS,QAAO,SAAUC,EAAKC,GAC3F,OAAOD,EAAIE,OAAO,CAACD,EAAWA,EAAY,IAAMT,EAAOS,EAAY,IAAMR,GAC3E,GAAG,IAEQU,EAAa,aACbC,EAAO,OACPC,EAAY,YAEZC,EAAa,aACbC,EAAO,OACPC,EAAY,YAEZC,EAAc,cACdC,EAAQ,QACRC,EAAa,aACbC,EAAiB,CAACT,EAAYC,EAAMC,EAAWC,EAAYC,EAAMC,EAAWC,EAAaC,EAAOC,GC9B5F,SAASE,EAAYC,GAClC,OAAOA,GAAWA,EAAQC,UAAY,IAAIC,cAAgB,IAC5D,CCFe,SAASC,EAAUC,GAChC,GAAY,MAARA,EACF,OAAOC,OAGT,GAAwB,oBAApBD,EAAKE,WAAkC,CACzC,IAAIC,EAAgBH,EAAKG,cACzB,OAAOA,GAAgBA,EAAcC,aAAwBH,MAC/D,CAEA,OAAOD,CACT,CCTA,SAASK,EAAUL,GAEjB,OAAOA,aADUD,EAAUC,GAAMM,SACIN,aAAgBM,OACvD,CAEA,SAASC,EAAcP,GAErB,OAAOA,aADUD,EAAUC,GAAMQ,aACIR,aAAgBQ,WACvD,CAEA,SAASC,EAAaT,GAEpB,MAA0B,oBAAfU,aAKJV,aADUD,EAAUC,GAAMU,YACIV,aAAgBU,WACvD,CCwDA,SACEC,KAAM,cACNC,SAAS,EACTC,MAAO,QACPC,GA5EF,SAAqBC,GACnB,IAAIC,EAAQD,EAAKC,MACjB3D,OAAO4D,KAAKD,EAAME,UAAUC,SAAQ,SAAUR,GAC5C,IAAIS,EAAQJ,EAAMK,OAAOV,IAAS,CAAC,EAC/BW,EAAaN,EAAMM,WAAWX,IAAS,CAAC,EACxCf,EAAUoB,EAAME,SAASP,GAExBJ,EAAcX,IAAaD,EAAYC,KAO5CvC,OAAOkE,OAAO3B,EAAQwB,MAAOA,GAC7B/D,OAAO4D,KAAKK,GAAYH,SAAQ,SAAUR,GACxC,IAAI3C,EAAQsD,EAAWX,IAET,IAAV3C,EACF4B,EAAQ4B,gBAAgBb,GAExBf,EAAQ6B,aAAad,GAAgB,IAAV3C,EAAiB,GAAKA,EAErD,IACF,GACF,EAoDE0D,OAlDF,SAAgBC,GACd,IAAIX,EAAQW,EAAMX,MACdY,EAAgB,CAClBlD,OAAQ,CACNmD,SAAUb,EAAMc,QAAQC,SACxB5D,KAAM,IACN6D,IAAK,IACLC,OAAQ,KAEVC,MAAO,CACLL,SAAU,YAEZlD,UAAW,CAAC,GASd,OAPAtB,OAAOkE,OAAOP,EAAME,SAASxC,OAAO0C,MAAOQ,EAAclD,QACzDsC,EAAMK,OAASO,EAEXZ,EAAME,SAASgB,OACjB7E,OAAOkE,OAAOP,EAAME,SAASgB,MAAMd,MAAOQ,EAAcM,OAGnD,WACL7E,OAAO4D,KAAKD,EAAME,UAAUC,SAAQ,SAAUR,GAC5C,IAAIf,EAAUoB,EAAME,SAASP,GACzBW,EAAaN,EAAMM,WAAWX,IAAS,CAAC,EAGxCS,EAFkB/D,OAAO4D,KAAKD,EAAMK,OAAOzD,eAAe+C,GAAQK,EAAMK,OAAOV,GAAQiB,EAAcjB,IAE7E9B,QAAO,SAAUuC,EAAOe,GAElD,OADAf,EAAMe,GAAY,GACXf,CACT,GAAG,CAAC,GAECb,EAAcX,IAAaD,EAAYC,KAI5CvC,OAAOkE,OAAO3B,EAAQwB,MAAOA,GAC7B/D,OAAO4D,KAAKK,GAAYH,SAAQ,SAAUiB,GACxCxC,EAAQ4B,gBAAgBY,EAC1B,IACF,GACF,CACF,EASEC,SAAU,CAAC,kBCjFE,SAASC,EAAiBvD,GACvC,OAAOA,EAAUwD,MAAM,KAAK,EAC9B,CCHO,IAAI,EAAMC,KAAKC,IACX,EAAMD,KAAKE,IACXC,EAAQH,KAAKG,MCFT,SAASC,IACtB,IAAIC,EAASC,UAAUC,cAEvB,OAAc,MAAVF,GAAkBA,EAAOG,QAAUC,MAAMC,QAAQL,EAAOG,QACnDH,EAAOG,OAAOG,KAAI,SAAUC,GACjC,OAAOA,EAAKC,MAAQ,IAAMD,EAAKE,OACjC,IAAGC,KAAK,KAGHT,UAAUU,SACnB,CCTe,SAASC,IACtB,OAAQ,iCAAiCC,KAAKd,IAChD,CCCe,SAASe,EAAsB/D,EAASgE,EAAcC,QAC9C,IAAjBD,IACFA,GAAe,QAGO,IAApBC,IACFA,GAAkB,GAGpB,IAAIC,EAAalE,EAAQ+D,wBACrBI,EAAS,EACTC,EAAS,EAETJ,GAAgBrD,EAAcX,KAChCmE,EAASnE,EAAQqE,YAAc,GAAItB,EAAMmB,EAAWI,OAAStE,EAAQqE,aAAmB,EACxFD,EAASpE,EAAQuE,aAAe,GAAIxB,EAAMmB,EAAWM,QAAUxE,EAAQuE,cAAoB,GAG7F,IACIE,GADOhE,EAAUT,GAAWG,EAAUH,GAAWK,QAC3BoE,eAEtBC,GAAoBb,KAAsBI,EAC1CU,GAAKT,EAAW3F,MAAQmG,GAAoBD,EAAiBA,EAAeG,WAAa,IAAMT,EAC/FU,GAAKX,EAAW9B,KAAOsC,GAAoBD,EAAiBA,EAAeK,UAAY,IAAMV,EAC7FE,EAAQJ,EAAWI,MAAQH,EAC3BK,EAASN,EAAWM,OAASJ,EACjC,MAAO,CACLE,MAAOA,EACPE,OAAQA,EACRpC,IAAKyC,EACLvG,MAAOqG,EAAIL,EACXjG,OAAQwG,EAAIL,EACZjG,KAAMoG,EACNA,EAAGA,EACHE,EAAGA,EAEP,CCrCe,SAASE,EAAc/E,GACpC,IAAIkE,EAAaH,EAAsB/D,GAGnCsE,EAAQtE,EAAQqE,YAChBG,EAASxE,EAAQuE,aAUrB,OARI3B,KAAKoC,IAAId,EAAWI,MAAQA,IAAU,IACxCA,EAAQJ,EAAWI,OAGjB1B,KAAKoC,IAAId,EAAWM,OAASA,IAAW,IAC1CA,EAASN,EAAWM,QAGf,CACLG,EAAG3E,EAAQ4E,WACXC,EAAG7E,EAAQ8E,UACXR,MAAOA,EACPE,OAAQA,EAEZ,CCvBe,SAASS,EAASC,EAAQC,GACvC,IAAIC,EAAWD,EAAME,aAAeF,EAAME,cAE1C,GAAIH,EAAOD,SAASE,GAClB,OAAO,EAEJ,GAAIC,GAAYvE,EAAauE,GAAW,CACzC,IAAIE,EAAOH,EAEX,EAAG,CACD,GAAIG,GAAQJ,EAAOK,WAAWD,GAC5B,OAAO,EAITA,EAAOA,EAAKE,YAAcF,EAAKG,IACjC,OAASH,EACX,CAGF,OAAO,CACT,CCrBe,SAAS,EAAiBtF,GACvC,OAAOG,EAAUH,GAAS0F,iBAAiB1F,EAC7C,CCFe,SAAS2F,EAAe3F,GACrC,MAAO,CAAC,QAAS,KAAM,MAAM4F,QAAQ7F,EAAYC,KAAa,CAChE,CCFe,SAAS6F,EAAmB7F,GAEzC,QAASS,EAAUT,GAAWA,EAAQO,cACtCP,EAAQ8F,WAAazF,OAAOyF,UAAUC,eACxC,CCFe,SAASC,EAAchG,GACpC,MAA6B,SAAzBD,EAAYC,GACPA,EAMPA,EAAQiG,cACRjG,EAAQwF,aACR3E,EAAab,GAAWA,EAAQyF,KAAO,OAEvCI,EAAmB7F,EAGvB,CCVA,SAASkG,EAAoBlG,GAC3B,OAAKW,EAAcX,IACoB,UAAvC,EAAiBA,GAASiC,SAInBjC,EAAQmG,aAHN,IAIX,CAwCe,SAASC,EAAgBpG,GAItC,IAHA,IAAIK,EAASF,EAAUH,GACnBmG,EAAeD,EAAoBlG,GAEhCmG,GAAgBR,EAAeQ,IAA6D,WAA5C,EAAiBA,GAAclE,UACpFkE,EAAeD,EAAoBC,GAGrC,OAAIA,IAA+C,SAA9BpG,EAAYoG,IAA0D,SAA9BpG,EAAYoG,IAAwE,WAA5C,EAAiBA,GAAclE,UAC3H5B,EAGF8F,GAhDT,SAA4BnG,GAC1B,IAAIqG,EAAY,WAAWvC,KAAKd,KAGhC,GAFW,WAAWc,KAAKd,MAEfrC,EAAcX,IAII,UAFX,EAAiBA,GAEnBiC,SACb,OAAO,KAIX,IAAIqE,EAAcN,EAAchG,GAMhC,IAJIa,EAAayF,KACfA,EAAcA,EAAYb,MAGrB9E,EAAc2F,IAAgB,CAAC,OAAQ,QAAQV,QAAQ7F,EAAYuG,IAAgB,GAAG,CAC3F,IAAIC,EAAM,EAAiBD,GAI3B,GAAsB,SAAlBC,EAAIC,WAA4C,SAApBD,EAAIE,aAA0C,UAAhBF,EAAIG,UAAiF,IAA1D,CAAC,YAAa,eAAed,QAAQW,EAAII,aAAsBN,GAAgC,WAAnBE,EAAII,YAA2BN,GAAaE,EAAIK,QAAyB,SAAfL,EAAIK,OACjO,OAAON,EAEPA,EAAcA,EAAYd,UAE9B,CAEA,OAAO,IACT,CAgByBqB,CAAmB7G,IAAYK,CACxD,CCpEe,SAASyG,EAAyB3H,GAC/C,MAAO,CAAC,MAAO,UAAUyG,QAAQzG,IAAc,EAAI,IAAM,GAC3D,CCDO,SAAS4H,EAAOjE,EAAK1E,EAAOyE,GACjC,OAAO,EAAQC,EAAK,EAAQ1E,EAAOyE,GACrC,CCFe,SAASmE,EAAmBC,GACzC,OAAOxJ,OAAOkE,OAAO,CAAC,ECDf,CACLS,IAAK,EACL9D,MAAO,EACPD,OAAQ,EACRE,KAAM,GDHuC0I,EACjD,CEHe,SAASC,EAAgB9I,EAAOiD,GAC7C,OAAOA,EAAKpC,QAAO,SAAUkI,EAAS5J,GAEpC,OADA4J,EAAQ5J,GAAOa,EACR+I,CACT,GAAG,CAAC,EACN,CC4EA,SACEpG,KAAM,QACNC,SAAS,EACTC,MAAO,OACPC,GApEF,SAAeC,GACb,IAAIiG,EAEAhG,EAAQD,EAAKC,MACbL,EAAOI,EAAKJ,KACZmB,EAAUf,EAAKe,QACfmF,EAAejG,EAAME,SAASgB,MAC9BgF,EAAgBlG,EAAMmG,cAAcD,cACpCE,EAAgB9E,EAAiBtB,EAAMjC,WACvCsI,EAAOX,EAAyBU,GAEhCE,EADa,CAACnJ,EAAMD,GAAOsH,QAAQ4B,IAAkB,EAClC,SAAW,QAElC,GAAKH,GAAiBC,EAAtB,CAIA,IAAIL,EAxBgB,SAAyBU,EAASvG,GAItD,OAAO4F,EAAsC,iBAH7CW,EAA6B,mBAAZA,EAAyBA,EAAQlK,OAAOkE,OAAO,CAAC,EAAGP,EAAMwG,MAAO,CAC/EzI,UAAWiC,EAAMjC,aACbwI,GACkDA,EAAUT,EAAgBS,EAASlJ,GAC7F,CAmBsBoJ,CAAgB3F,EAAQyF,QAASvG,GACjD0G,EAAY/C,EAAcsC,GAC1BU,EAAmB,MAATN,EAAe,EAAMlJ,EAC/ByJ,EAAmB,MAATP,EAAepJ,EAASC,EAClC2J,EAAU7G,EAAMwG,MAAM7I,UAAU2I,GAAOtG,EAAMwG,MAAM7I,UAAU0I,GAAQH,EAAcG,GAAQrG,EAAMwG,MAAM9I,OAAO4I,GAC9GQ,EAAYZ,EAAcG,GAAQrG,EAAMwG,MAAM7I,UAAU0I,GACxDU,EAAoB/B,EAAgBiB,GACpCe,EAAaD,EAA6B,MAATV,EAAeU,EAAkBE,cAAgB,EAAIF,EAAkBG,aAAe,EAAI,EAC3HC,EAAoBN,EAAU,EAAIC,EAAY,EAG9CpF,EAAMmE,EAAcc,GACpBlF,EAAMuF,EAAaN,EAAUJ,GAAOT,EAAce,GAClDQ,EAASJ,EAAa,EAAIN,EAAUJ,GAAO,EAAIa,EAC/CE,EAAS1B,EAAOjE,EAAK0F,EAAQ3F,GAE7B6F,EAAWjB,EACfrG,EAAMmG,cAAcxG,KAASqG,EAAwB,CAAC,GAAyBsB,GAAYD,EAAQrB,EAAsBuB,aAAeF,EAASD,EAAQpB,EAnBzJ,CAoBF,EAkCEtF,OAhCF,SAAgBC,GACd,IAAIX,EAAQW,EAAMX,MAEdwH,EADU7G,EAAMG,QACWlC,QAC3BqH,OAAoC,IAArBuB,EAA8B,sBAAwBA,EAErD,MAAhBvB,IAKwB,iBAAjBA,IACTA,EAAejG,EAAME,SAASxC,OAAO+J,cAAcxB,MAOhDpC,EAAS7D,EAAME,SAASxC,OAAQuI,KAIrCjG,EAAME,SAASgB,MAAQ+E,EACzB,EASE5E,SAAU,CAAC,iBACXqG,iBAAkB,CAAC,oBCxFN,SAASC,EAAa5J,GACnC,OAAOA,EAAUwD,MAAM,KAAK,EAC9B,CCOA,IAAIqG,GAAa,CACf5G,IAAK,OACL9D,MAAO,OACPD,OAAQ,OACRE,KAAM,QAeD,SAAS0K,GAAYlH,GAC1B,IAAImH,EAEApK,EAASiD,EAAMjD,OACfqK,EAAapH,EAAMoH,WACnBhK,EAAY4C,EAAM5C,UAClBiK,EAAYrH,EAAMqH,UAClBC,EAAUtH,EAAMsH,QAChBpH,EAAWF,EAAME,SACjBqH,EAAkBvH,EAAMuH,gBACxBC,EAAWxH,EAAMwH,SACjBC,EAAezH,EAAMyH,aACrBC,EAAU1H,EAAM0H,QAChBC,EAAaL,EAAQ1E,EACrBA,OAAmB,IAAf+E,EAAwB,EAAIA,EAChCC,EAAaN,EAAQxE,EACrBA,OAAmB,IAAf8E,EAAwB,EAAIA,EAEhCC,EAAgC,mBAAjBJ,EAA8BA,EAAa,CAC5D7E,EAAGA,EACHE,IACG,CACHF,EAAGA,EACHE,GAGFF,EAAIiF,EAAMjF,EACVE,EAAI+E,EAAM/E,EACV,IAAIgF,EAAOR,EAAQrL,eAAe,KAC9B8L,EAAOT,EAAQrL,eAAe,KAC9B+L,EAAQxL,EACRyL,EAAQ,EACRC,EAAM5J,OAEV,GAAIkJ,EAAU,CACZ,IAAIpD,EAAeC,EAAgBtH,GAC/BoL,EAAa,eACbC,EAAY,cAEZhE,IAAiBhG,EAAUrB,IAGmB,WAA5C,EAFJqH,EAAeN,EAAmB/G,IAECmD,UAAsC,aAAbA,IAC1DiI,EAAa,eACbC,EAAY,gBAOZhL,IAAc,IAAQA,IAAcZ,GAAQY,IAAcb,IAAU8K,IAAczK,KACpFqL,EAAQ3L,EAGRwG,IAFc4E,GAAWtD,IAAiB8D,GAAOA,EAAIxF,eAAiBwF,EAAIxF,eAAeD,OACzF2B,EAAa+D,IACEf,EAAW3E,OAC1BK,GAAKyE,EAAkB,GAAK,GAG1BnK,IAAcZ,IAASY,IAAc,GAAOA,IAAcd,GAAW+K,IAAczK,KACrFoL,EAAQzL,EAGRqG,IAFc8E,GAAWtD,IAAiB8D,GAAOA,EAAIxF,eAAiBwF,EAAIxF,eAAeH,MACzF6B,EAAagE,IACEhB,EAAW7E,MAC1BK,GAAK2E,EAAkB,GAAK,EAEhC,CAEA,IAgBMc,EAhBFC,EAAe5M,OAAOkE,OAAO,CAC/BM,SAAUA,GACTsH,GAAYP,IAEXsB,GAAyB,IAAjBd,EAlFd,SAA2BrI,EAAM8I,GAC/B,IAAItF,EAAIxD,EAAKwD,EACTE,EAAI1D,EAAK0D,EACT0F,EAAMN,EAAIO,kBAAoB,EAClC,MAAO,CACL7F,EAAG5B,EAAM4B,EAAI4F,GAAOA,GAAO,EAC3B1F,EAAG9B,EAAM8B,EAAI0F,GAAOA,GAAO,EAE/B,CA0EsCE,CAAkB,CACpD9F,EAAGA,EACHE,GACC1E,EAAUrB,IAAW,CACtB6F,EAAGA,EACHE,GAMF,OAHAF,EAAI2F,EAAM3F,EACVE,EAAIyF,EAAMzF,EAENyE,EAGK7L,OAAOkE,OAAO,CAAC,EAAG0I,IAAeD,EAAiB,CAAC,GAAkBJ,GAASF,EAAO,IAAM,GAAIM,EAAeL,GAASF,EAAO,IAAM,GAAIO,EAAe5D,WAAayD,EAAIO,kBAAoB,IAAM,EAAI,aAAe7F,EAAI,OAASE,EAAI,MAAQ,eAAiBF,EAAI,OAASE,EAAI,SAAUuF,IAG5R3M,OAAOkE,OAAO,CAAC,EAAG0I,IAAenB,EAAkB,CAAC,GAAmBc,GAASF,EAAOjF,EAAI,KAAO,GAAIqE,EAAgBa,GAASF,EAAOlF,EAAI,KAAO,GAAIuE,EAAgB1C,UAAY,GAAI0C,GAC9L,CA4CA,UACEnI,KAAM,gBACNC,SAAS,EACTC,MAAO,cACPC,GA9CF,SAAuBwJ,GACrB,IAAItJ,EAAQsJ,EAAMtJ,MACdc,EAAUwI,EAAMxI,QAChByI,EAAwBzI,EAAQoH,gBAChCA,OAA4C,IAA1BqB,GAA0CA,EAC5DC,EAAoB1I,EAAQqH,SAC5BA,OAAiC,IAAtBqB,GAAsCA,EACjDC,EAAwB3I,EAAQsH,aAChCA,OAAyC,IAA1BqB,GAA0CA,EACzDR,EAAe,CACjBlL,UAAWuD,EAAiBtB,EAAMjC,WAClCiK,UAAWL,EAAa3H,EAAMjC,WAC9BL,OAAQsC,EAAME,SAASxC,OACvBqK,WAAY/H,EAAMwG,MAAM9I,OACxBwK,gBAAiBA,EACjBG,QAAoC,UAA3BrI,EAAMc,QAAQC,UAGgB,MAArCf,EAAMmG,cAAcD,gBACtBlG,EAAMK,OAAO3C,OAASrB,OAAOkE,OAAO,CAAC,EAAGP,EAAMK,OAAO3C,OAAQmK,GAAYxL,OAAOkE,OAAO,CAAC,EAAG0I,EAAc,CACvGhB,QAASjI,EAAMmG,cAAcD,cAC7BrF,SAAUb,EAAMc,QAAQC,SACxBoH,SAAUA,EACVC,aAAcA,OAIe,MAA7BpI,EAAMmG,cAAcjF,QACtBlB,EAAMK,OAAOa,MAAQ7E,OAAOkE,OAAO,CAAC,EAAGP,EAAMK,OAAOa,MAAO2G,GAAYxL,OAAOkE,OAAO,CAAC,EAAG0I,EAAc,CACrGhB,QAASjI,EAAMmG,cAAcjF,MAC7BL,SAAU,WACVsH,UAAU,EACVC,aAAcA,OAIlBpI,EAAMM,WAAW5C,OAASrB,OAAOkE,OAAO,CAAC,EAAGP,EAAMM,WAAW5C,OAAQ,CACnE,wBAAyBsC,EAAMjC,WAEnC,EAQE2L,KAAM,CAAC,GCrKT,IAAIC,GAAU,CACZA,SAAS,GAsCX,UACEhK,KAAM,iBACNC,SAAS,EACTC,MAAO,QACPC,GAAI,WAAe,EACnBY,OAxCF,SAAgBX,GACd,IAAIC,EAAQD,EAAKC,MACb4J,EAAW7J,EAAK6J,SAChB9I,EAAUf,EAAKe,QACf+I,EAAkB/I,EAAQgJ,OAC1BA,OAA6B,IAApBD,GAAoCA,EAC7CE,EAAkBjJ,EAAQkJ,OAC1BA,OAA6B,IAApBD,GAAoCA,EAC7C9K,EAASF,EAAUiB,EAAME,SAASxC,QAClCuM,EAAgB,GAAGjM,OAAOgC,EAAMiK,cAActM,UAAWqC,EAAMiK,cAAcvM,QAYjF,OAVIoM,GACFG,EAAc9J,SAAQ,SAAU+J,GAC9BA,EAAaC,iBAAiB,SAAUP,EAASQ,OAAQT,GAC3D,IAGEK,GACF/K,EAAOkL,iBAAiB,SAAUP,EAASQ,OAAQT,IAG9C,WACDG,GACFG,EAAc9J,SAAQ,SAAU+J,GAC9BA,EAAaG,oBAAoB,SAAUT,EAASQ,OAAQT,GAC9D,IAGEK,GACF/K,EAAOoL,oBAAoB,SAAUT,EAASQ,OAAQT,GAE1D,CACF,EASED,KAAM,CAAC,GC/CT,IAAIY,GAAO,CACTnN,KAAM,QACND,MAAO,OACPD,OAAQ,MACR+D,IAAK,UAEQ,SAASuJ,GAAqBxM,GAC3C,OAAOA,EAAUyM,QAAQ,0BAA0B,SAAUC,GAC3D,OAAOH,GAAKG,EACd,GACF,CCVA,IAAI,GAAO,CACTnN,MAAO,MACPC,IAAK,SAEQ,SAASmN,GAA8B3M,GACpD,OAAOA,EAAUyM,QAAQ,cAAc,SAAUC,GAC/C,OAAO,GAAKA,EACd,GACF,CCPe,SAASE,GAAgB3L,GACtC,IAAI6J,EAAM9J,EAAUC,GAGpB,MAAO,CACL4L,WAHe/B,EAAIgC,YAInBC,UAHcjC,EAAIkC,YAKtB,CCNe,SAASC,GAAoBpM,GAQ1C,OAAO+D,EAAsB8B,EAAmB7F,IAAUzB,KAAOwN,GAAgB/L,GAASgM,UAC5F,CCXe,SAASK,GAAerM,GAErC,IAAIsM,EAAoB,EAAiBtM,GACrCuM,EAAWD,EAAkBC,SAC7BC,EAAYF,EAAkBE,UAC9BC,EAAYH,EAAkBG,UAElC,MAAO,6BAA6B3I,KAAKyI,EAAWE,EAAYD,EAClE,CCLe,SAASE,GAAgBtM,GACtC,MAAI,CAAC,OAAQ,OAAQ,aAAawF,QAAQ7F,EAAYK,KAAU,EAEvDA,EAAKG,cAAcoM,KAGxBhM,EAAcP,IAASiM,GAAejM,GACjCA,EAGFsM,GAAgB1G,EAAc5F,GACvC,CCJe,SAASwM,GAAkB5M,EAAS6M,GACjD,IAAIC,OAES,IAATD,IACFA,EAAO,IAGT,IAAIvB,EAAeoB,GAAgB1M,GAC/B+M,EAASzB,KAAqE,OAAlDwB,EAAwB9M,EAAQO,oBAAyB,EAASuM,EAAsBH,MACpH1C,EAAM9J,EAAUmL,GAChB0B,EAASD,EAAS,CAAC9C,GAAK7K,OAAO6K,EAAIxF,gBAAkB,GAAI4H,GAAef,GAAgBA,EAAe,IAAMA,EAC7G2B,EAAcJ,EAAKzN,OAAO4N,GAC9B,OAAOD,EAASE,EAChBA,EAAY7N,OAAOwN,GAAkB5G,EAAcgH,IACrD,CCzBe,SAASE,GAAiBC,GACvC,OAAO1P,OAAOkE,OAAO,CAAC,EAAGwL,EAAM,CAC7B5O,KAAM4O,EAAKxI,EACXvC,IAAK+K,EAAKtI,EACVvG,MAAO6O,EAAKxI,EAAIwI,EAAK7I,MACrBjG,OAAQ8O,EAAKtI,EAAIsI,EAAK3I,QAE1B,CCqBA,SAAS4I,GAA2BpN,EAASqN,EAAgBlL,GAC3D,OAAOkL,IAAmBxO,EAAWqO,GCzBxB,SAAyBlN,EAASmC,GAC/C,IAAI8H,EAAM9J,EAAUH,GAChBsN,EAAOzH,EAAmB7F,GAC1ByE,EAAiBwF,EAAIxF,eACrBH,EAAQgJ,EAAKhF,YACb9D,EAAS8I,EAAKjF,aACd1D,EAAI,EACJE,EAAI,EAER,GAAIJ,EAAgB,CAClBH,EAAQG,EAAeH,MACvBE,EAASC,EAAeD,OACxB,IAAI+I,EAAiB1J,KAEjB0J,IAAmBA,GAA+B,UAAbpL,KACvCwC,EAAIF,EAAeG,WACnBC,EAAIJ,EAAeK,UAEvB,CAEA,MAAO,CACLR,MAAOA,EACPE,OAAQA,EACRG,EAAGA,EAAIyH,GAAoBpM,GAC3B6E,EAAGA,EAEP,CDDwD2I,CAAgBxN,EAASmC,IAAa1B,EAAU4M,GAdxG,SAAoCrN,EAASmC,GAC3C,IAAIgL,EAAOpJ,EAAsB/D,GAAS,EAAoB,UAAbmC,GASjD,OARAgL,EAAK/K,IAAM+K,EAAK/K,IAAMpC,EAAQyN,UAC9BN,EAAK5O,KAAO4O,EAAK5O,KAAOyB,EAAQ0N,WAChCP,EAAK9O,OAAS8O,EAAK/K,IAAMpC,EAAQqI,aACjC8E,EAAK7O,MAAQ6O,EAAK5O,KAAOyB,EAAQsI,YACjC6E,EAAK7I,MAAQtE,EAAQsI,YACrB6E,EAAK3I,OAASxE,EAAQqI,aACtB8E,EAAKxI,EAAIwI,EAAK5O,KACd4O,EAAKtI,EAAIsI,EAAK/K,IACP+K,CACT,CAG0HQ,CAA2BN,EAAgBlL,GAAY+K,GEtBlK,SAAyBlN,GACtC,IAAI8M,EAEAQ,EAAOzH,EAAmB7F,GAC1B4N,EAAY7B,GAAgB/L,GAC5B2M,EAA0D,OAAlDG,EAAwB9M,EAAQO,oBAAyB,EAASuM,EAAsBH,KAChGrI,EAAQ,EAAIgJ,EAAKO,YAAaP,EAAKhF,YAAaqE,EAAOA,EAAKkB,YAAc,EAAGlB,EAAOA,EAAKrE,YAAc,GACvG9D,EAAS,EAAI8I,EAAKQ,aAAcR,EAAKjF,aAAcsE,EAAOA,EAAKmB,aAAe,EAAGnB,EAAOA,EAAKtE,aAAe,GAC5G1D,GAAKiJ,EAAU5B,WAAaI,GAAoBpM,GAChD6E,GAAK+I,EAAU1B,UAMnB,MAJiD,QAA7C,EAAiBS,GAAQW,GAAMS,YACjCpJ,GAAK,EAAI2I,EAAKhF,YAAaqE,EAAOA,EAAKrE,YAAc,GAAKhE,GAGrD,CACLA,MAAOA,EACPE,OAAQA,EACRG,EAAGA,EACHE,EAAGA,EAEP,CFCkMmJ,CAAgBnI,EAAmB7F,IACrO,CG1Be,SAASiO,GAAe9M,GACrC,IAOIkI,EAPAtK,EAAYoC,EAAKpC,UACjBiB,EAAUmB,EAAKnB,QACfb,EAAYgC,EAAKhC,UACjBqI,EAAgBrI,EAAYuD,EAAiBvD,GAAa,KAC1DiK,EAAYjK,EAAY4J,EAAa5J,GAAa,KAClD+O,EAAUnP,EAAU4F,EAAI5F,EAAUuF,MAAQ,EAAItE,EAAQsE,MAAQ,EAC9D6J,EAAUpP,EAAU8F,EAAI9F,EAAUyF,OAAS,EAAIxE,EAAQwE,OAAS,EAGpE,OAAQgD,GACN,KAAK,EACH6B,EAAU,CACR1E,EAAGuJ,EACHrJ,EAAG9F,EAAU8F,EAAI7E,EAAQwE,QAE3B,MAEF,KAAKnG,EACHgL,EAAU,CACR1E,EAAGuJ,EACHrJ,EAAG9F,EAAU8F,EAAI9F,EAAUyF,QAE7B,MAEF,KAAKlG,EACH+K,EAAU,CACR1E,EAAG5F,EAAU4F,EAAI5F,EAAUuF,MAC3BO,EAAGsJ,GAEL,MAEF,KAAK5P,EACH8K,EAAU,CACR1E,EAAG5F,EAAU4F,EAAI3E,EAAQsE,MACzBO,EAAGsJ,GAEL,MAEF,QACE9E,EAAU,CACR1E,EAAG5F,EAAU4F,EACbE,EAAG9F,EAAU8F,GAInB,IAAIuJ,EAAW5G,EAAgBV,EAAyBU,GAAiB,KAEzE,GAAgB,MAAZ4G,EAAkB,CACpB,IAAI1G,EAAmB,MAAb0G,EAAmB,SAAW,QAExC,OAAQhF,GACN,KAAK1K,EACH2K,EAAQ+E,GAAY/E,EAAQ+E,IAAarP,EAAU2I,GAAO,EAAI1H,EAAQ0H,GAAO,GAC7E,MAEF,KAAK/I,EACH0K,EAAQ+E,GAAY/E,EAAQ+E,IAAarP,EAAU2I,GAAO,EAAI1H,EAAQ0H,GAAO,GAKnF,CAEA,OAAO2B,CACT,CC3De,SAASgF,GAAejN,EAAOc,QAC5B,IAAZA,IACFA,EAAU,CAAC,GAGb,IAAIoM,EAAWpM,EACXqM,EAAqBD,EAASnP,UAC9BA,OAAmC,IAAvBoP,EAAgCnN,EAAMjC,UAAYoP,EAC9DC,EAAoBF,EAASnM,SAC7BA,OAAiC,IAAtBqM,EAA+BpN,EAAMe,SAAWqM,EAC3DC,EAAoBH,EAASI,SAC7BA,OAAiC,IAAtBD,EAA+B7P,EAAkB6P,EAC5DE,EAAwBL,EAASM,aACjCA,OAAyC,IAA1BD,EAAmC9P,EAAW8P,EAC7DE,EAAwBP,EAASQ,eACjCA,OAA2C,IAA1BD,EAAmC/P,EAAS+P,EAC7DE,EAAuBT,EAASU,YAChCA,OAAuC,IAAzBD,GAA0CA,EACxDE,EAAmBX,EAAS3G,QAC5BA,OAA+B,IAArBsH,EAA8B,EAAIA,EAC5ChI,EAAgBD,EAAsC,iBAAZW,EAAuBA,EAAUT,EAAgBS,EAASlJ,IACpGyQ,EAAaJ,IAAmBhQ,EAASC,EAAYD,EACrDqK,EAAa/H,EAAMwG,MAAM9I,OACzBkB,EAAUoB,EAAME,SAAS0N,EAAcE,EAAaJ,GACpDK,EJkBS,SAAyBnP,EAAS0O,EAAUE,EAAczM,GACvE,IAAIiN,EAAmC,oBAAbV,EAlB5B,SAA4B1O,GAC1B,IAAIpB,EAAkBgO,GAAkB5G,EAAchG,IAElDqP,EADoB,CAAC,WAAY,SAASzJ,QAAQ,EAAiB5F,GAASiC,WAAa,GACnDtB,EAAcX,GAAWoG,EAAgBpG,GAAWA,EAE9F,OAAKS,EAAU4O,GAKRzQ,EAAgBgI,QAAO,SAAUyG,GACtC,OAAO5M,EAAU4M,IAAmBpI,EAASoI,EAAgBgC,IAAmD,SAAhCtP,EAAYsN,EAC9F,IANS,EAOX,CAK6DiC,CAAmBtP,GAAW,GAAGZ,OAAOsP,GAC/F9P,EAAkB,GAAGQ,OAAOgQ,EAAqB,CAACR,IAClDW,EAAsB3Q,EAAgB,GACtC4Q,EAAe5Q,EAAgBK,QAAO,SAAUwQ,EAASpC,GAC3D,IAAIF,EAAOC,GAA2BpN,EAASqN,EAAgBlL,GAK/D,OAJAsN,EAAQrN,IAAM,EAAI+K,EAAK/K,IAAKqN,EAAQrN,KACpCqN,EAAQnR,MAAQ,EAAI6O,EAAK7O,MAAOmR,EAAQnR,OACxCmR,EAAQpR,OAAS,EAAI8O,EAAK9O,OAAQoR,EAAQpR,QAC1CoR,EAAQlR,KAAO,EAAI4O,EAAK5O,KAAMkR,EAAQlR,MAC/BkR,CACT,GAAGrC,GAA2BpN,EAASuP,EAAqBpN,IAK5D,OAJAqN,EAAalL,MAAQkL,EAAalR,MAAQkR,EAAajR,KACvDiR,EAAahL,OAASgL,EAAanR,OAASmR,EAAapN,IACzDoN,EAAa7K,EAAI6K,EAAajR,KAC9BiR,EAAa3K,EAAI2K,EAAapN,IACvBoN,CACT,CInC2BE,CAAgBjP,EAAUT,GAAWA,EAAUA,EAAQ2P,gBAAkB9J,EAAmBzE,EAAME,SAASxC,QAAS4P,EAAUE,EAAczM,GACjKyN,EAAsB7L,EAAsB3C,EAAME,SAASvC,WAC3DuI,EAAgB2G,GAAe,CACjClP,UAAW6Q,EACX5P,QAASmJ,EACThH,SAAU,WACVhD,UAAWA,IAET0Q,EAAmB3C,GAAiBzP,OAAOkE,OAAO,CAAC,EAAGwH,EAAY7B,IAClEwI,EAAoBhB,IAAmBhQ,EAAS+Q,EAAmBD,EAGnEG,EAAkB,CACpB3N,IAAK+M,EAAmB/M,IAAM0N,EAAkB1N,IAAM6E,EAAc7E,IACpE/D,OAAQyR,EAAkBzR,OAAS8Q,EAAmB9Q,OAAS4I,EAAc5I,OAC7EE,KAAM4Q,EAAmB5Q,KAAOuR,EAAkBvR,KAAO0I,EAAc1I,KACvED,MAAOwR,EAAkBxR,MAAQ6Q,EAAmB7Q,MAAQ2I,EAAc3I,OAExE0R,EAAa5O,EAAMmG,cAAckB,OAErC,GAAIqG,IAAmBhQ,GAAUkR,EAAY,CAC3C,IAAIvH,EAASuH,EAAW7Q,GACxB1B,OAAO4D,KAAK0O,GAAiBxO,SAAQ,SAAUhE,GAC7C,IAAI0S,EAAW,CAAC3R,EAAOD,GAAQuH,QAAQrI,IAAQ,EAAI,GAAK,EACpDkK,EAAO,CAAC,EAAKpJ,GAAQuH,QAAQrI,IAAQ,EAAI,IAAM,IACnDwS,EAAgBxS,IAAQkL,EAAOhB,GAAQwI,CACzC,GACF,CAEA,OAAOF,CACT,CCyEA,UACEhP,KAAM,OACNC,SAAS,EACTC,MAAO,OACPC,GA5HF,SAAcC,GACZ,IAAIC,EAAQD,EAAKC,MACbc,EAAUf,EAAKe,QACfnB,EAAOI,EAAKJ,KAEhB,IAAIK,EAAMmG,cAAcxG,GAAMmP,MAA9B,CAoCA,IAhCA,IAAIC,EAAoBjO,EAAQkM,SAC5BgC,OAAsC,IAAtBD,GAAsCA,EACtDE,EAAmBnO,EAAQoO,QAC3BC,OAAoC,IAArBF,GAAqCA,EACpDG,EAA8BtO,EAAQuO,mBACtC9I,EAAUzF,EAAQyF,QAClB+G,EAAWxM,EAAQwM,SACnBE,EAAe1M,EAAQ0M,aACvBI,EAAc9M,EAAQ8M,YACtB0B,EAAwBxO,EAAQyO,eAChCA,OAA2C,IAA1BD,GAA0CA,EAC3DE,EAAwB1O,EAAQ0O,sBAChCC,EAAqBzP,EAAMc,QAAQ/C,UACnCqI,EAAgB9E,EAAiBmO,GAEjCJ,EAAqBD,IADHhJ,IAAkBqJ,GACqCF,EAjC/E,SAAuCxR,GACrC,GAAIuD,EAAiBvD,KAAeX,EAClC,MAAO,GAGT,IAAIsS,EAAoBnF,GAAqBxM,GAC7C,MAAO,CAAC2M,GAA8B3M,GAAY2R,EAAmBhF,GAA8BgF,GACrG,CA0B6IC,CAA8BF,GAA3E,CAAClF,GAAqBkF,KAChHG,EAAa,CAACH,GAAoBzR,OAAOqR,GAAoBxR,QAAO,SAAUC,EAAKC,GACrF,OAAOD,EAAIE,OAAOsD,EAAiBvD,KAAeX,ECvCvC,SAA8B4C,EAAOc,QAClC,IAAZA,IACFA,EAAU,CAAC,GAGb,IAAIoM,EAAWpM,EACX/C,EAAYmP,EAASnP,UACrBuP,EAAWJ,EAASI,SACpBE,EAAeN,EAASM,aACxBjH,EAAU2G,EAAS3G,QACnBgJ,EAAiBrC,EAASqC,eAC1BM,EAAwB3C,EAASsC,sBACjCA,OAAkD,IAA1BK,EAAmC,EAAgBA,EAC3E7H,EAAYL,EAAa5J,GACzB6R,EAAa5H,EAAYuH,EAAiB3R,EAAsBA,EAAoB4H,QAAO,SAAUzH,GACvG,OAAO4J,EAAa5J,KAAeiK,CACrC,IAAK3K,EACDyS,EAAoBF,EAAWpK,QAAO,SAAUzH,GAClD,OAAOyR,EAAsBhL,QAAQzG,IAAc,CACrD,IAEiC,IAA7B+R,EAAkBC,SACpBD,EAAoBF,GAItB,IAAII,EAAYF,EAAkBjS,QAAO,SAAUC,EAAKC,GAOtD,OANAD,EAAIC,GAAakP,GAAejN,EAAO,CACrCjC,UAAWA,EACXuP,SAAUA,EACVE,aAAcA,EACdjH,QAASA,IACRjF,EAAiBvD,IACbD,CACT,GAAG,CAAC,GACJ,OAAOzB,OAAO4D,KAAK+P,GAAWC,MAAK,SAAUC,EAAGC,GAC9C,OAAOH,EAAUE,GAAKF,EAAUG,EAClC,GACF,CDC6DC,CAAqBpQ,EAAO,CACnFjC,UAAWA,EACXuP,SAAUA,EACVE,aAAcA,EACdjH,QAASA,EACTgJ,eAAgBA,EAChBC,sBAAuBA,IACpBzR,EACP,GAAG,IACCsS,EAAgBrQ,EAAMwG,MAAM7I,UAC5BoK,EAAa/H,EAAMwG,MAAM9I,OACzB4S,EAAY,IAAIC,IAChBC,GAAqB,EACrBC,EAAwBb,EAAW,GAE9Bc,EAAI,EAAGA,EAAId,EAAWG,OAAQW,IAAK,CAC1C,IAAI3S,EAAY6R,EAAWc,GAEvBC,EAAiBrP,EAAiBvD,GAElC6S,EAAmBjJ,EAAa5J,KAAeT,EAC/CuT,EAAa,CAAC,EAAK5T,GAAQuH,QAAQmM,IAAmB,EACtDrK,EAAMuK,EAAa,QAAU,SAC7B1F,EAAW8B,GAAejN,EAAO,CACnCjC,UAAWA,EACXuP,SAAUA,EACVE,aAAcA,EACdI,YAAaA,EACbrH,QAASA,IAEPuK,EAAoBD,EAAaD,EAAmB1T,EAAQC,EAAOyT,EAAmB3T,EAAS,EAE/FoT,EAAc/J,GAAOyB,EAAWzB,KAClCwK,EAAoBvG,GAAqBuG,IAG3C,IAAIC,EAAmBxG,GAAqBuG,GACxCE,EAAS,GAUb,GARIhC,GACFgC,EAAOC,KAAK9F,EAASwF,IAAmB,GAGtCxB,GACF6B,EAAOC,KAAK9F,EAAS2F,IAAsB,EAAG3F,EAAS4F,IAAqB,GAG1EC,EAAOE,OAAM,SAAUC,GACzB,OAAOA,CACT,IAAI,CACFV,EAAwB1S,EACxByS,GAAqB,EACrB,KACF,CAEAF,EAAUc,IAAIrT,EAAWiT,EAC3B,CAEA,GAAIR,EAqBF,IAnBA,IAEIa,EAAQ,SAAeC,GACzB,IAAIC,EAAmB3B,EAAW4B,MAAK,SAAUzT,GAC/C,IAAIiT,EAASV,EAAU9T,IAAIuB,GAE3B,GAAIiT,EACF,OAAOA,EAAOS,MAAM,EAAGH,GAAIJ,OAAM,SAAUC,GACzC,OAAOA,CACT,GAEJ,IAEA,GAAII,EAEF,OADAd,EAAwBc,EACjB,OAEX,EAESD,EAnBY/B,EAAiB,EAAI,EAmBZ+B,EAAK,GAGpB,UAFFD,EAAMC,GADmBA,KAOpCtR,EAAMjC,YAAc0S,IACtBzQ,EAAMmG,cAAcxG,GAAMmP,OAAQ,EAClC9O,EAAMjC,UAAY0S,EAClBzQ,EAAM0R,OAAQ,EA5GhB,CA8GF,EAQEhK,iBAAkB,CAAC,UACnBgC,KAAM,CACJoF,OAAO,IE7IX,SAAS6C,GAAexG,EAAUY,EAAM6F,GAQtC,YAPyB,IAArBA,IACFA,EAAmB,CACjBrO,EAAG,EACHE,EAAG,IAIA,CACLzC,IAAKmK,EAASnK,IAAM+K,EAAK3I,OAASwO,EAAiBnO,EACnDvG,MAAOiO,EAASjO,MAAQ6O,EAAK7I,MAAQ0O,EAAiBrO,EACtDtG,OAAQkO,EAASlO,OAAS8O,EAAK3I,OAASwO,EAAiBnO,EACzDtG,KAAMgO,EAAShO,KAAO4O,EAAK7I,MAAQ0O,EAAiBrO,EAExD,CAEA,SAASsO,GAAsB1G,GAC7B,MAAO,CAAC,EAAKjO,EAAOD,EAAQE,GAAM2U,MAAK,SAAUC,GAC/C,OAAO5G,EAAS4G,IAAS,CAC3B,GACF,CA+BA,UACEpS,KAAM,OACNC,SAAS,EACTC,MAAO,OACP6H,iBAAkB,CAAC,mBACnB5H,GAlCF,SAAcC,GACZ,IAAIC,EAAQD,EAAKC,MACbL,EAAOI,EAAKJ,KACZ0Q,EAAgBrQ,EAAMwG,MAAM7I,UAC5BoK,EAAa/H,EAAMwG,MAAM9I,OACzBkU,EAAmB5R,EAAMmG,cAAc6L,gBACvCC,EAAoBhF,GAAejN,EAAO,CAC5C0N,eAAgB,cAEdwE,EAAoBjF,GAAejN,EAAO,CAC5C4N,aAAa,IAEXuE,EAA2BR,GAAeM,EAAmB5B,GAC7D+B,EAAsBT,GAAeO,EAAmBnK,EAAY6J,GACpES,EAAoBR,GAAsBM,GAC1CG,EAAmBT,GAAsBO,GAC7CpS,EAAMmG,cAAcxG,GAAQ,CAC1BwS,yBAA0BA,EAC1BC,oBAAqBA,EACrBC,kBAAmBA,EACnBC,iBAAkBA,GAEpBtS,EAAMM,WAAW5C,OAASrB,OAAOkE,OAAO,CAAC,EAAGP,EAAMM,WAAW5C,OAAQ,CACnE,+BAAgC2U,EAChC,sBAAuBC,GAE3B,GCJA,IACE3S,KAAM,SACNC,SAAS,EACTC,MAAO,OACPwB,SAAU,CAAC,iBACXvB,GA5BF,SAAgBa,GACd,IAAIX,EAAQW,EAAMX,MACdc,EAAUH,EAAMG,QAChBnB,EAAOgB,EAAMhB,KACb4S,EAAkBzR,EAAQuG,OAC1BA,OAA6B,IAApBkL,EAA6B,CAAC,EAAG,GAAKA,EAC/C7I,EAAO,EAAW7L,QAAO,SAAUC,EAAKC,GAE1C,OADAD,EAAIC,GA5BD,SAAiCA,EAAWyI,EAAOa,GACxD,IAAIjB,EAAgB9E,EAAiBvD,GACjCyU,EAAiB,CAACrV,EAAM,GAAKqH,QAAQ4B,IAAkB,GAAK,EAAI,EAEhErG,EAAyB,mBAAXsH,EAAwBA,EAAOhL,OAAOkE,OAAO,CAAC,EAAGiG,EAAO,CACxEzI,UAAWA,KACPsJ,EACFoL,EAAW1S,EAAK,GAChB2S,EAAW3S,EAAK,GAIpB,OAFA0S,EAAWA,GAAY,EACvBC,GAAYA,GAAY,GAAKF,EACtB,CAACrV,EAAMD,GAAOsH,QAAQ4B,IAAkB,EAAI,CACjD7C,EAAGmP,EACHjP,EAAGgP,GACD,CACFlP,EAAGkP,EACHhP,EAAGiP,EAEP,CASqBC,CAAwB5U,EAAWiC,EAAMwG,MAAOa,GAC1DvJ,CACT,GAAG,CAAC,GACA8U,EAAwBlJ,EAAK1J,EAAMjC,WACnCwF,EAAIqP,EAAsBrP,EAC1BE,EAAImP,EAAsBnP,EAEW,MAArCzD,EAAMmG,cAAcD,gBACtBlG,EAAMmG,cAAcD,cAAc3C,GAAKA,EACvCvD,EAAMmG,cAAcD,cAAczC,GAAKA,GAGzCzD,EAAMmG,cAAcxG,GAAQ+J,CAC9B,GC1BA,IACE/J,KAAM,gBACNC,SAAS,EACTC,MAAO,OACPC,GApBF,SAAuBC,GACrB,IAAIC,EAAQD,EAAKC,MACbL,EAAOI,EAAKJ,KAKhBK,EAAMmG,cAAcxG,GAAQkN,GAAe,CACzClP,UAAWqC,EAAMwG,MAAM7I,UACvBiB,QAASoB,EAAMwG,MAAM9I,OACrBqD,SAAU,WACVhD,UAAWiC,EAAMjC,WAErB,EAQE2L,KAAM,CAAC,GCgHT,IACE/J,KAAM,kBACNC,SAAS,EACTC,MAAO,OACPC,GA/HF,SAAyBC,GACvB,IAAIC,EAAQD,EAAKC,MACbc,EAAUf,EAAKe,QACfnB,EAAOI,EAAKJ,KACZoP,EAAoBjO,EAAQkM,SAC5BgC,OAAsC,IAAtBD,GAAsCA,EACtDE,EAAmBnO,EAAQoO,QAC3BC,OAAoC,IAArBF,GAAsCA,EACrD3B,EAAWxM,EAAQwM,SACnBE,EAAe1M,EAAQ0M,aACvBI,EAAc9M,EAAQ8M,YACtBrH,EAAUzF,EAAQyF,QAClBsM,EAAkB/R,EAAQgS,OAC1BA,OAA6B,IAApBD,GAAoCA,EAC7CE,EAAwBjS,EAAQkS,aAChCA,OAAyC,IAA1BD,EAAmC,EAAIA,EACtD5H,EAAW8B,GAAejN,EAAO,CACnCsN,SAAUA,EACVE,aAAcA,EACdjH,QAASA,EACTqH,YAAaA,IAEXxH,EAAgB9E,EAAiBtB,EAAMjC,WACvCiK,EAAYL,EAAa3H,EAAMjC,WAC/BkV,GAAmBjL,EACnBgF,EAAWtH,EAAyBU,GACpC8I,ECrCY,MDqCSlC,ECrCH,IAAM,IDsCxB9G,EAAgBlG,EAAMmG,cAAcD,cACpCmK,EAAgBrQ,EAAMwG,MAAM7I,UAC5BoK,EAAa/H,EAAMwG,MAAM9I,OACzBwV,EAA4C,mBAAjBF,EAA8BA,EAAa3W,OAAOkE,OAAO,CAAC,EAAGP,EAAMwG,MAAO,CACvGzI,UAAWiC,EAAMjC,aACbiV,EACFG,EAA2D,iBAAtBD,EAAiC,CACxElG,SAAUkG,EACVhE,QAASgE,GACP7W,OAAOkE,OAAO,CAChByM,SAAU,EACVkC,QAAS,GACRgE,GACCE,EAAsBpT,EAAMmG,cAAckB,OAASrH,EAAMmG,cAAckB,OAAOrH,EAAMjC,WAAa,KACjG2L,EAAO,CACTnG,EAAG,EACHE,EAAG,GAGL,GAAKyC,EAAL,CAIA,GAAI8I,EAAe,CACjB,IAAIqE,EAEAC,EAAwB,MAAbtG,EAAmB,EAAM7P,EACpCoW,EAAuB,MAAbvG,EAAmB/P,EAASC,EACtCoJ,EAAmB,MAAb0G,EAAmB,SAAW,QACpC3F,EAASnB,EAAc8G,GACvBtL,EAAM2F,EAAS8D,EAASmI,GACxB7R,EAAM4F,EAAS8D,EAASoI,GACxBC,EAAWV,GAAU/K,EAAWzB,GAAO,EAAI,EAC3CmN,EAASzL,IAAc1K,EAAQ+S,EAAc/J,GAAOyB,EAAWzB,GAC/DoN,EAAS1L,IAAc1K,GAASyK,EAAWzB,IAAQ+J,EAAc/J,GAGjEL,EAAejG,EAAME,SAASgB,MAC9BwF,EAAYoM,GAAU7M,EAAetC,EAAcsC,GAAgB,CACrE/C,MAAO,EACPE,OAAQ,GAENuQ,GAAqB3T,EAAMmG,cAAc,oBAAsBnG,EAAMmG,cAAc,oBAAoBI,QxBhFtG,CACLvF,IAAK,EACL9D,MAAO,EACPD,OAAQ,EACRE,KAAM,GwB6EFyW,GAAkBD,GAAmBL,GACrCO,GAAkBF,GAAmBJ,GAMrCO,GAAWnO,EAAO,EAAG0K,EAAc/J,GAAMI,EAAUJ,IACnDyN,GAAYd,EAAkB5C,EAAc/J,GAAO,EAAIkN,EAAWM,GAAWF,GAAkBT,EAA4BnG,SAAWyG,EAASK,GAAWF,GAAkBT,EAA4BnG,SACxMgH,GAAYf,GAAmB5C,EAAc/J,GAAO,EAAIkN,EAAWM,GAAWD,GAAkBV,EAA4BnG,SAAW0G,EAASI,GAAWD,GAAkBV,EAA4BnG,SACzMjG,GAAoB/G,EAAME,SAASgB,OAAS8D,EAAgBhF,EAAME,SAASgB,OAC3E+S,GAAelN,GAAiC,MAAbiG,EAAmBjG,GAAkBsF,WAAa,EAAItF,GAAkBuF,YAAc,EAAI,EAC7H4H,GAAwH,OAAjGb,EAA+C,MAAvBD,OAA8B,EAASA,EAAoBpG,IAAqBqG,EAAwB,EAEvJc,GAAY9M,EAAS2M,GAAYE,GACjCE,GAAkBzO,EAAOmN,EAAS,EAAQpR,EAF9B2F,EAAS0M,GAAYG,GAAsBD,IAEKvS,EAAK2F,EAAQyL,EAAS,EAAQrR,EAAK0S,IAAa1S,GAChHyE,EAAc8G,GAAYoH,GAC1B1K,EAAKsD,GAAYoH,GAAkB/M,CACrC,CAEA,GAAI8H,EAAc,CAChB,IAAIkF,GAEAC,GAAyB,MAAbtH,EAAmB,EAAM7P,EAErCoX,GAAwB,MAAbvH,EAAmB/P,EAASC,EAEvCsX,GAAUtO,EAAcgJ,GAExBuF,GAAmB,MAAZvF,EAAkB,SAAW,QAEpCwF,GAAOF,GAAUrJ,EAASmJ,IAE1BK,GAAOH,GAAUrJ,EAASoJ,IAE1BK,IAAuD,IAAxC,CAAC,EAAKzX,GAAMqH,QAAQ4B,GAEnCyO,GAAyH,OAAjGR,GAAgD,MAAvBjB,OAA8B,EAASA,EAAoBlE,IAAoBmF,GAAyB,EAEzJS,GAAaF,GAAeF,GAAOF,GAAUnE,EAAcoE,IAAQ1M,EAAW0M,IAAQI,GAAuB1B,EAA4BjE,QAEzI6F,GAAaH,GAAeJ,GAAUnE,EAAcoE,IAAQ1M,EAAW0M,IAAQI,GAAuB1B,EAA4BjE,QAAUyF,GAE5IK,GAAmBlC,GAAU8B,G1BzH9B,SAAwBlT,EAAK1E,EAAOyE,GACzC,IAAIwT,EAAItP,EAAOjE,EAAK1E,EAAOyE,GAC3B,OAAOwT,EAAIxT,EAAMA,EAAMwT,CACzB,C0BsHoDC,CAAeJ,GAAYN,GAASO,IAAcpP,EAAOmN,EAASgC,GAAaJ,GAAMF,GAAS1B,EAASiC,GAAaJ,IAEpKzO,EAAcgJ,GAAW8F,GACzBtL,EAAKwF,GAAW8F,GAAmBR,EACrC,CAEAxU,EAAMmG,cAAcxG,GAAQ+J,CAvE5B,CAwEF,EAQEhC,iBAAkB,CAAC,WE1HN,SAASyN,GAAiBC,EAAyBrQ,EAAcsD,QAC9D,IAAZA,IACFA,GAAU,GAGZ,ICnBoCrJ,ECJOJ,EFuBvCyW,EAA0B9V,EAAcwF,GACxCuQ,EAAuB/V,EAAcwF,IAf3C,SAAyBnG,GACvB,IAAImN,EAAOnN,EAAQ+D,wBACfI,EAASpB,EAAMoK,EAAK7I,OAAStE,EAAQqE,aAAe,EACpDD,EAASrB,EAAMoK,EAAK3I,QAAUxE,EAAQuE,cAAgB,EAC1D,OAAkB,IAAXJ,GAA2B,IAAXC,CACzB,CAU4DuS,CAAgBxQ,GACtEJ,EAAkBF,EAAmBM,GACrCgH,EAAOpJ,EAAsByS,EAAyBE,EAAsBjN,GAC5EyB,EAAS,CACXc,WAAY,EACZE,UAAW,GAET7C,EAAU,CACZ1E,EAAG,EACHE,EAAG,GAkBL,OAfI4R,IAA4BA,IAA4BhN,MACxB,SAA9B1J,EAAYoG,IAChBkG,GAAetG,MACbmF,GCnCgC9K,EDmCT+F,KClCdhG,EAAUC,IAAUO,EAAcP,GCJxC,CACL4L,YAFyChM,EDQbI,GCNR4L,WACpBE,UAAWlM,EAAQkM,WDGZH,GAAgB3L,IDoCnBO,EAAcwF,KAChBkD,EAAUtF,EAAsBoC,GAAc,IACtCxB,GAAKwB,EAAauH,WAC1BrE,EAAQxE,GAAKsB,EAAasH,WACjB1H,IACTsD,EAAQ1E,EAAIyH,GAAoBrG,KAI7B,CACLpB,EAAGwI,EAAK5O,KAAO2M,EAAOc,WAAa3C,EAAQ1E,EAC3CE,EAAGsI,EAAK/K,IAAM8I,EAAOgB,UAAY7C,EAAQxE,EACzCP,MAAO6I,EAAK7I,MACZE,OAAQ2I,EAAK3I,OAEjB,CGvDA,SAASoS,GAAMC,GACb,IAAItT,EAAM,IAAIoO,IACVmF,EAAU,IAAIC,IACdC,EAAS,GAKb,SAAS3F,EAAK4F,GACZH,EAAQI,IAAID,EAASlW,MACN,GAAG3B,OAAO6X,EAASxU,UAAY,GAAIwU,EAASnO,kBAAoB,IACtEvH,SAAQ,SAAU4V,GACzB,IAAKL,EAAQM,IAAID,GAAM,CACrB,IAAIE,EAAc9T,EAAI3F,IAAIuZ,GAEtBE,GACFhG,EAAKgG,EAET,CACF,IACAL,EAAO3E,KAAK4E,EACd,CAQA,OAzBAJ,EAAUtV,SAAQ,SAAU0V,GAC1B1T,EAAIiP,IAAIyE,EAASlW,KAAMkW,EACzB,IAiBAJ,EAAUtV,SAAQ,SAAU0V,GACrBH,EAAQM,IAAIH,EAASlW,OAExBsQ,EAAK4F,EAET,IACOD,CACT,CCvBA,IAAIM,GAAkB,CACpBnY,UAAW,SACX0X,UAAW,GACX1U,SAAU,YAGZ,SAASoV,KACP,IAAK,IAAI1B,EAAO2B,UAAUrG,OAAQsG,EAAO,IAAIpU,MAAMwS,GAAO6B,EAAO,EAAGA,EAAO7B,EAAM6B,IAC/ED,EAAKC,GAAQF,UAAUE,GAGzB,OAAQD,EAAKvE,MAAK,SAAUlT,GAC1B,QAASA,GAAoD,mBAAlCA,EAAQ+D,sBACrC,GACF,CAEO,SAAS4T,GAAgBC,QACL,IAArBA,IACFA,EAAmB,CAAC,GAGtB,IAAIC,EAAoBD,EACpBE,EAAwBD,EAAkBE,iBAC1CA,OAA6C,IAA1BD,EAAmC,GAAKA,EAC3DE,EAAyBH,EAAkBI,eAC3CA,OAA4C,IAA3BD,EAAoCV,GAAkBU,EAC3E,OAAO,SAAsBjZ,EAAWD,EAAQoD,QAC9B,IAAZA,IACFA,EAAU+V,GAGZ,ICxC6B/W,EAC3BgX,EDuCE9W,EAAQ,CACVjC,UAAW,SACXgZ,iBAAkB,GAClBjW,QAASzE,OAAOkE,OAAO,CAAC,EAAG2V,GAAiBW,GAC5C1Q,cAAe,CAAC,EAChBjG,SAAU,CACRvC,UAAWA,EACXD,OAAQA,GAEV4C,WAAY,CAAC,EACbD,OAAQ,CAAC,GAEP2W,EAAmB,GACnBC,GAAc,EACdrN,EAAW,CACb5J,MAAOA,EACPkX,WAAY,SAAoBC,GAC9B,IAAIrW,EAAsC,mBAArBqW,EAAkCA,EAAiBnX,EAAMc,SAAWqW,EACzFC,IACApX,EAAMc,QAAUzE,OAAOkE,OAAO,CAAC,EAAGsW,EAAgB7W,EAAMc,QAASA,GACjEd,EAAMiK,cAAgB,CACpBtM,UAAW0B,EAAU1B,GAAa6N,GAAkB7N,GAAaA,EAAU4Q,eAAiB/C,GAAkB7N,EAAU4Q,gBAAkB,GAC1I7Q,OAAQ8N,GAAkB9N,IAI5B,IElE4B+X,EAC9B4B,EFiEMN,EDhCG,SAAwBtB,GAErC,IAAIsB,EAAmBvB,GAAMC,GAE7B,OAAO/W,EAAeb,QAAO,SAAUC,EAAK+B,GAC1C,OAAO/B,EAAIE,OAAO+Y,EAAiBvR,QAAO,SAAUqQ,GAClD,OAAOA,EAAShW,QAAUA,CAC5B,IACF,GAAG,GACL,CCuB+ByX,EElEK7B,EFkEsB,GAAGzX,OAAO2Y,EAAkB3W,EAAMc,QAAQ2U,WEjE9F4B,EAAS5B,EAAU5X,QAAO,SAAUwZ,EAAQE,GAC9C,IAAIC,EAAWH,EAAOE,EAAQ5X,MAK9B,OAJA0X,EAAOE,EAAQ5X,MAAQ6X,EAAWnb,OAAOkE,OAAO,CAAC,EAAGiX,EAAUD,EAAS,CACrEzW,QAASzE,OAAOkE,OAAO,CAAC,EAAGiX,EAAS1W,QAASyW,EAAQzW,SACrD4I,KAAMrN,OAAOkE,OAAO,CAAC,EAAGiX,EAAS9N,KAAM6N,EAAQ7N,QAC5C6N,EACEF,CACT,GAAG,CAAC,GAEGhb,OAAO4D,KAAKoX,GAAQlV,KAAI,SAAUhG,GACvC,OAAOkb,EAAOlb,EAChB,MF4DM,OAJA6D,EAAM+W,iBAAmBA,EAAiBvR,QAAO,SAAUiS,GACzD,OAAOA,EAAE7X,OACX,IA+FFI,EAAM+W,iBAAiB5W,SAAQ,SAAUJ,GACvC,IAAIJ,EAAOI,EAAKJ,KACZ+X,EAAe3X,EAAKe,QACpBA,OAA2B,IAAjB4W,EAA0B,CAAC,EAAIA,EACzChX,EAASX,EAAKW,OAElB,GAAsB,mBAAXA,EAAuB,CAChC,IAAIiX,EAAYjX,EAAO,CACrBV,MAAOA,EACPL,KAAMA,EACNiK,SAAUA,EACV9I,QAASA,IAKXkW,EAAiB/F,KAAK0G,GAFT,WAAmB,EAGlC,CACF,IA/GS/N,EAASQ,QAClB,EAMAwN,YAAa,WACX,IAAIX,EAAJ,CAIA,IAAIY,EAAkB7X,EAAME,SACxBvC,EAAYka,EAAgBla,UAC5BD,EAASma,EAAgBna,OAG7B,GAAKyY,GAAiBxY,EAAWD,GAAjC,CAKAsC,EAAMwG,MAAQ,CACZ7I,UAAWwX,GAAiBxX,EAAWqH,EAAgBtH,GAAoC,UAA3BsC,EAAMc,QAAQC,UAC9ErD,OAAQiG,EAAcjG,IAOxBsC,EAAM0R,OAAQ,EACd1R,EAAMjC,UAAYiC,EAAMc,QAAQ/C,UAKhCiC,EAAM+W,iBAAiB5W,SAAQ,SAAU0V,GACvC,OAAO7V,EAAMmG,cAAc0P,EAASlW,MAAQtD,OAAOkE,OAAO,CAAC,EAAGsV,EAASnM,KACzE,IAEA,IAAK,IAAIoO,EAAQ,EAAGA,EAAQ9X,EAAM+W,iBAAiBhH,OAAQ+H,IACzD,IAAoB,IAAhB9X,EAAM0R,MAAV,CAMA,IAAIqG,EAAwB/X,EAAM+W,iBAAiBe,GAC/ChY,EAAKiY,EAAsBjY,GAC3BkY,EAAyBD,EAAsBjX,QAC/CoM,OAAsC,IAA3B8K,EAAoC,CAAC,EAAIA,EACpDrY,EAAOoY,EAAsBpY,KAEf,mBAAPG,IACTE,EAAQF,EAAG,CACTE,MAAOA,EACPc,QAASoM,EACTvN,KAAMA,EACNiK,SAAUA,KACN5J,EAdR,MAHEA,EAAM0R,OAAQ,EACdoG,GAAS,CAzBb,CATA,CAqDF,EAGA1N,QC1I2BtK,ED0IV,WACf,OAAO,IAAImY,SAAQ,SAAUC,GAC3BtO,EAASgO,cACTM,EAAQlY,EACV,GACF,EC7IG,WAUL,OATK8W,IACHA,EAAU,IAAImB,SAAQ,SAAUC,GAC9BD,QAAQC,UAAUC,MAAK,WACrBrB,OAAUsB,EACVF,EAAQpY,IACV,GACF,KAGKgX,CACT,GDmIIuB,QAAS,WACPjB,IACAH,GAAc,CAChB,GAGF,IAAKd,GAAiBxY,EAAWD,GAC/B,OAAOkM,EAmCT,SAASwN,IACPJ,EAAiB7W,SAAQ,SAAUL,GACjC,OAAOA,GACT,IACAkX,EAAmB,EACrB,CAEA,OAvCApN,EAASsN,WAAWpW,GAASqX,MAAK,SAAUnY,IACrCiX,GAAenW,EAAQwX,eAC1BxX,EAAQwX,cAActY,EAE1B,IAmCO4J,CACT,CACF,CACO,IAAI2O,GAA4BhC,KGzLnC,GAA4BA,GAAgB,CAC9CI,iBAFqB,CAAC6B,GAAgB,GAAe,GAAe,EAAa,GAAQ,GAAM,GAAiB,EAAO,MCJrH,GAA4BjC,GAAgB,CAC9CI,iBAFqB,CAAC6B,GAAgB,GAAe,GAAe,KCatE,MAAMC,GAAa,IAAIlI,IACjBmI,GAAO,CACX,GAAAtH,CAAIxS,EAASzC,EAAKyN,GACX6O,GAAWzC,IAAIpX,IAClB6Z,GAAWrH,IAAIxS,EAAS,IAAI2R,KAE9B,MAAMoI,EAAcF,GAAWjc,IAAIoC,GAI9B+Z,EAAY3C,IAAI7Z,IAA6B,IAArBwc,EAAYC,KAKzCD,EAAYvH,IAAIjV,EAAKyN,GAHnBiP,QAAQC,MAAM,+EAA+E7W,MAAM8W,KAAKJ,EAAY1Y,QAAQ,MAIhI,EACAzD,IAAG,CAACoC,EAASzC,IACPsc,GAAWzC,IAAIpX,IACV6Z,GAAWjc,IAAIoC,GAASpC,IAAIL,IAE9B,KAET,MAAA6c,CAAOpa,EAASzC,GACd,IAAKsc,GAAWzC,IAAIpX,GAClB,OAEF,MAAM+Z,EAAcF,GAAWjc,IAAIoC,GACnC+Z,EAAYM,OAAO9c,GAGM,IAArBwc,EAAYC,MACdH,GAAWQ,OAAOra,EAEtB,GAYIsa,GAAiB,gBAOjBC,GAAgBC,IAChBA,GAAYna,OAAOoa,KAAOpa,OAAOoa,IAAIC,SAEvCF,EAAWA,EAAS5O,QAAQ,iBAAiB,CAAC+O,EAAOC,IAAO,IAAIH,IAAIC,OAAOE,QAEtEJ,GA4CHK,GAAuB7a,IAC3BA,EAAQ8a,cAAc,IAAIC,MAAMT,IAAgB,EAE5C,GAAYU,MACXA,GAA4B,iBAAXA,UAGO,IAAlBA,EAAOC,SAChBD,EAASA,EAAO,SAEgB,IAApBA,EAAOE,UAEjBC,GAAaH,GAEb,GAAUA,GACLA,EAAOC,OAASD,EAAO,GAAKA,EAEf,iBAAXA,GAAuBA,EAAO7J,OAAS,EACzCrL,SAAS+C,cAAc0R,GAAcS,IAEvC,KAEHI,GAAYpb,IAChB,IAAK,GAAUA,IAAgD,IAApCA,EAAQqb,iBAAiBlK,OAClD,OAAO,EAET,MAAMmK,EAAgF,YAA7D5V,iBAAiB1F,GAASub,iBAAiB,cAE9DC,EAAgBxb,EAAQyb,QAAQ,uBACtC,IAAKD,EACH,OAAOF,EAET,GAAIE,IAAkBxb,EAAS,CAC7B,MAAM0b,EAAU1b,EAAQyb,QAAQ,WAChC,GAAIC,GAAWA,EAAQlW,aAAegW,EACpC,OAAO,EAET,GAAgB,OAAZE,EACF,OAAO,CAEX,CACA,OAAOJ,CAAgB,EAEnBK,GAAa3b,IACZA,GAAWA,EAAQkb,WAAaU,KAAKC,gBAGtC7b,EAAQ8b,UAAU7W,SAAS,mBAGC,IAArBjF,EAAQ+b,SACV/b,EAAQ+b,SAEV/b,EAAQgc,aAAa,aAAoD,UAArChc,EAAQic,aAAa,aAE5DC,GAAiBlc,IACrB,IAAK8F,SAASC,gBAAgBoW,aAC5B,OAAO,KAIT,GAAmC,mBAAxBnc,EAAQqF,YAA4B,CAC7C,MAAM+W,EAAOpc,EAAQqF,cACrB,OAAO+W,aAAgBtb,WAAasb,EAAO,IAC7C,CACA,OAAIpc,aAAmBc,WACdd,EAIJA,EAAQwF,WAGN0W,GAAelc,EAAQwF,YAFrB,IAEgC,EAErC6W,GAAO,OAUPC,GAAStc,IACbA,EAAQuE,YAAY,EAEhBgY,GAAY,IACZlc,OAAOmc,SAAW1W,SAAS6G,KAAKqP,aAAa,qBACxC3b,OAAOmc,OAET,KAEHC,GAA4B,GAgB5BC,GAAQ,IAAuC,QAAjC5W,SAASC,gBAAgB4W,IACvCC,GAAqBC,IAhBAC,QAiBN,KACjB,MAAMC,EAAIR,KAEV,GAAIQ,EAAG,CACL,MAAMhc,EAAO8b,EAAOG,KACdC,EAAqBF,EAAE7b,GAAGH,GAChCgc,EAAE7b,GAAGH,GAAQ8b,EAAOK,gBACpBH,EAAE7b,GAAGH,GAAMoc,YAAcN,EACzBE,EAAE7b,GAAGH,GAAMqc,WAAa,KACtBL,EAAE7b,GAAGH,GAAQkc,EACNJ,EAAOK,gBAElB,GA5B0B,YAAxBpX,SAASuX,YAENZ,GAA0BtL,QAC7BrL,SAASyF,iBAAiB,oBAAoB,KAC5C,IAAK,MAAMuR,KAAYL,GACrBK,GACF,IAGJL,GAA0BpK,KAAKyK,IAE/BA,GAkBA,EAEEQ,GAAU,CAACC,EAAkB9F,EAAO,GAAI+F,EAAeD,IACxB,mBAArBA,EAAkCA,KAAoB9F,GAAQ+F,EAExEC,GAAyB,CAACX,EAAUY,EAAmBC,GAAoB,KAC/E,IAAKA,EAEH,YADAL,GAAQR,GAGV,MACMc,EA/JiC5d,KACvC,IAAKA,EACH,OAAO,EAIT,IAAI,mBACF6d,EAAkB,gBAClBC,GACEzd,OAAOqF,iBAAiB1F,GAC5B,MAAM+d,EAA0BC,OAAOC,WAAWJ,GAC5CK,EAAuBF,OAAOC,WAAWH,GAG/C,OAAKC,GAA4BG,GAKjCL,EAAqBA,EAAmBlb,MAAM,KAAK,GACnDmb,EAAkBA,EAAgBnb,MAAM,KAAK,GAtDf,KAuDtBqb,OAAOC,WAAWJ,GAAsBG,OAAOC,WAAWH,KANzD,CAMoG,EA0IpFK,CAAiCT,GADlC,EAExB,IAAIU,GAAS,EACb,MAAMC,EAAU,EACdrR,aAEIA,IAAW0Q,IAGfU,GAAS,EACTV,EAAkBjS,oBAAoB6O,GAAgB+D,GACtDf,GAAQR,GAAS,EAEnBY,EAAkBnS,iBAAiB+O,GAAgB+D,GACnDC,YAAW,KACJF,GACHvD,GAAqB6C,EACvB,GACCE,EAAiB,EAYhBW,GAAuB,CAAC1R,EAAM2R,EAAeC,EAAeC,KAChE,MAAMC,EAAa9R,EAAKsE,OACxB,IAAI+H,EAAQrM,EAAKjH,QAAQ4Y,GAIzB,OAAe,IAAXtF,GACMuF,GAAiBC,EAAiB7R,EAAK8R,EAAa,GAAK9R,EAAK,IAExEqM,GAASuF,EAAgB,GAAK,EAC1BC,IACFxF,GAASA,EAAQyF,GAAcA,GAE1B9R,EAAKjK,KAAKC,IAAI,EAAGD,KAAKE,IAAIoW,EAAOyF,EAAa,KAAI,EAerDC,GAAiB,qBACjBC,GAAiB,OACjBC,GAAgB,SAChBC,GAAgB,CAAC,EACvB,IAAIC,GAAW,EACf,MAAMC,GAAe,CACnBC,WAAY,YACZC,WAAY,YAERC,GAAe,IAAIrI,IAAI,CAAC,QAAS,WAAY,UAAW,YAAa,cAAe,aAAc,iBAAkB,YAAa,WAAY,YAAa,cAAe,YAAa,UAAW,WAAY,QAAS,oBAAqB,aAAc,YAAa,WAAY,cAAe,cAAe,cAAe,YAAa,eAAgB,gBAAiB,eAAgB,gBAAiB,aAAc,QAAS,OAAQ,SAAU,QAAS,SAAU,SAAU,UAAW,WAAY,OAAQ,SAAU,eAAgB,SAAU,OAAQ,mBAAoB,mBAAoB,QAAS,QAAS,WAM/lB,SAASsI,GAAarf,EAASsf,GAC7B,OAAOA,GAAO,GAAGA,MAAQN,QAAgBhf,EAAQgf,UAAYA,IAC/D,CACA,SAASO,GAAiBvf,GACxB,MAAMsf,EAAMD,GAAarf,GAGzB,OAFAA,EAAQgf,SAAWM,EACnBP,GAAcO,GAAOP,GAAcO,IAAQ,CAAC,EACrCP,GAAcO,EACvB,CAiCA,SAASE,GAAYC,EAAQC,EAAUC,EAAqB,MAC1D,OAAOliB,OAAOmiB,OAAOH,GAAQ7M,MAAKiN,GAASA,EAAMH,WAAaA,GAAYG,EAAMF,qBAAuBA,GACzG,CACA,SAASG,GAAoBC,EAAmB1B,EAAS2B,GACvD,MAAMC,EAAiC,iBAAZ5B,EAErBqB,EAAWO,EAAcD,EAAqB3B,GAAW2B,EAC/D,IAAIE,EAAYC,GAAaJ,GAI7B,OAHKX,GAAahI,IAAI8I,KACpBA,EAAYH,GAEP,CAACE,EAAaP,EAAUQ,EACjC,CACA,SAASE,GAAWpgB,EAAS+f,EAAmB1B,EAAS2B,EAAoBK,GAC3E,GAAiC,iBAAtBN,IAAmC/f,EAC5C,OAEF,IAAKigB,EAAaP,EAAUQ,GAAaJ,GAAoBC,EAAmB1B,EAAS2B,GAIzF,GAAID,KAAqBd,GAAc,CACrC,MAAMqB,EAAepf,GACZ,SAAU2e,GACf,IAAKA,EAAMU,eAAiBV,EAAMU,gBAAkBV,EAAMW,iBAAmBX,EAAMW,eAAevb,SAAS4a,EAAMU,eAC/G,OAAOrf,EAAGjD,KAAKwiB,KAAMZ,EAEzB,EAEFH,EAAWY,EAAaZ,EAC1B,CACA,MAAMD,EAASF,GAAiBvf,GAC1B0gB,EAAWjB,EAAOS,KAAeT,EAAOS,GAAa,CAAC,GACtDS,EAAmBnB,GAAYkB,EAAUhB,EAAUO,EAAc5B,EAAU,MACjF,GAAIsC,EAEF,YADAA,EAAiBN,OAASM,EAAiBN,QAAUA,GAGvD,MAAMf,EAAMD,GAAaK,EAAUK,EAAkBnU,QAAQgT,GAAgB,KACvE1d,EAAK+e,EA5Db,SAAoCjgB,EAASwa,EAAUtZ,GACrD,OAAO,SAASmd,EAAQwB,GACtB,MAAMe,EAAc5gB,EAAQ6gB,iBAAiBrG,GAC7C,IAAK,IAAI,OACPxN,GACE6S,EAAO7S,GAAUA,IAAWyT,KAAMzT,EAASA,EAAOxH,WACpD,IAAK,MAAMsb,KAAcF,EACvB,GAAIE,IAAe9T,EASnB,OANA+T,GAAWlB,EAAO,CAChBW,eAAgBxT,IAEdqR,EAAQgC,QACVW,GAAaC,IAAIjhB,EAAS6f,EAAMqB,KAAM1G,EAAUtZ,GAE3CA,EAAGigB,MAAMnU,EAAQ,CAAC6S,GAG/B,CACF,CAwC2BuB,CAA2BphB,EAASqe,EAASqB,GAvExE,SAA0B1f,EAASkB,GACjC,OAAO,SAASmd,EAAQwB,GAOtB,OANAkB,GAAWlB,EAAO,CAChBW,eAAgBxgB,IAEdqe,EAAQgC,QACVW,GAAaC,IAAIjhB,EAAS6f,EAAMqB,KAAMhgB,GAEjCA,EAAGigB,MAAMnhB,EAAS,CAAC6f,GAC5B,CACF,CA6DoFwB,CAAiBrhB,EAAS0f,GAC5Gxe,EAAGye,mBAAqBM,EAAc5B,EAAU,KAChDnd,EAAGwe,SAAWA,EACdxe,EAAGmf,OAASA,EACZnf,EAAG8d,SAAWM,EACdoB,EAASpB,GAAOpe,EAChBlB,EAAQuL,iBAAiB2U,EAAWhf,EAAI+e,EAC1C,CACA,SAASqB,GAActhB,EAASyf,EAAQS,EAAW7B,EAASsB,GAC1D,MAAMze,EAAKse,GAAYC,EAAOS,GAAY7B,EAASsB,GAC9Cze,IAGLlB,EAAQyL,oBAAoByU,EAAWhf,EAAIqgB,QAAQ5B,WAC5CF,EAAOS,GAAWhf,EAAG8d,UAC9B,CACA,SAASwC,GAAyBxhB,EAASyf,EAAQS,EAAWuB,GAC5D,MAAMC,EAAoBjC,EAAOS,IAAc,CAAC,EAChD,IAAK,MAAOyB,EAAY9B,KAAUpiB,OAAOmkB,QAAQF,GAC3CC,EAAWE,SAASJ,IACtBH,GAActhB,EAASyf,EAAQS,EAAWL,EAAMH,SAAUG,EAAMF,mBAGtE,CACA,SAASQ,GAAaN,GAGpB,OADAA,EAAQA,EAAMjU,QAAQiT,GAAgB,IAC/BI,GAAaY,IAAUA,CAChC,CACA,MAAMmB,GAAe,CACnB,EAAAc,CAAG9hB,EAAS6f,EAAOxB,EAAS2B,GAC1BI,GAAWpgB,EAAS6f,EAAOxB,EAAS2B,GAAoB,EAC1D,EACA,GAAA+B,CAAI/hB,EAAS6f,EAAOxB,EAAS2B,GAC3BI,GAAWpgB,EAAS6f,EAAOxB,EAAS2B,GAAoB,EAC1D,EACA,GAAAiB,CAAIjhB,EAAS+f,EAAmB1B,EAAS2B,GACvC,GAAiC,iBAAtBD,IAAmC/f,EAC5C,OAEF,MAAOigB,EAAaP,EAAUQ,GAAaJ,GAAoBC,EAAmB1B,EAAS2B,GACrFgC,EAAc9B,IAAcH,EAC5BN,EAASF,GAAiBvf,GAC1B0hB,EAAoBjC,EAAOS,IAAc,CAAC,EAC1C+B,EAAclC,EAAkBmC,WAAW,KACjD,QAAwB,IAAbxC,EAAX,CAQA,GAAIuC,EACF,IAAK,MAAME,KAAgB1kB,OAAO4D,KAAKoe,GACrC+B,GAAyBxhB,EAASyf,EAAQ0C,EAAcpC,EAAkBlN,MAAM,IAGpF,IAAK,MAAOuP,EAAavC,KAAUpiB,OAAOmkB,QAAQF,GAAoB,CACpE,MAAMC,EAAaS,EAAYxW,QAAQkT,GAAe,IACjDkD,IAAejC,EAAkB8B,SAASF,IAC7CL,GAActhB,EAASyf,EAAQS,EAAWL,EAAMH,SAAUG,EAAMF,mBAEpE,CAXA,KAPA,CAEE,IAAKliB,OAAO4D,KAAKqgB,GAAmBvQ,OAClC,OAEFmQ,GAActhB,EAASyf,EAAQS,EAAWR,EAAUO,EAAc5B,EAAU,KAE9E,CAYF,EACA,OAAAgE,CAAQriB,EAAS6f,EAAOpI,GACtB,GAAqB,iBAAVoI,IAAuB7f,EAChC,OAAO,KAET,MAAM+c,EAAIR,KAGV,IAAI+F,EAAc,KACdC,GAAU,EACVC,GAAiB,EACjBC,GAAmB,EAJH5C,IADFM,GAAaN,IAMZ9C,IACjBuF,EAAcvF,EAAEhC,MAAM8E,EAAOpI,GAC7BsF,EAAE/c,GAASqiB,QAAQC,GACnBC,GAAWD,EAAYI,uBACvBF,GAAkBF,EAAYK,gCAC9BF,EAAmBH,EAAYM,sBAEjC,MAAMC,EAAM9B,GAAW,IAAIhG,MAAM8E,EAAO,CACtC0C,UACAO,YAAY,IACVrL,GAUJ,OATIgL,GACFI,EAAIE,iBAEFP,GACFxiB,EAAQ8a,cAAc+H,GAEpBA,EAAIJ,kBAAoBH,GAC1BA,EAAYS,iBAEPF,CACT,GAEF,SAAS9B,GAAWljB,EAAKmlB,EAAO,CAAC,GAC/B,IAAK,MAAOzlB,EAAKa,KAAUX,OAAOmkB,QAAQoB,GACxC,IACEnlB,EAAIN,GAAOa,CACb,CAAE,MAAO6kB,GACPxlB,OAAOC,eAAeG,EAAKN,EAAK,CAC9B2lB,cAAc,EACdtlB,IAAG,IACMQ,GAGb,CAEF,OAAOP,CACT,CASA,SAASslB,GAAc/kB,GACrB,GAAc,SAAVA,EACF,OAAO,EAET,GAAc,UAAVA,EACF,OAAO,EAET,GAAIA,IAAU4f,OAAO5f,GAAOkC,WAC1B,OAAO0d,OAAO5f,GAEhB,GAAc,KAAVA,GAA0B,SAAVA,EAClB,OAAO,KAET,GAAqB,iBAAVA,EACT,OAAOA,EAET,IACE,OAAOglB,KAAKC,MAAMC,mBAAmBllB,GACvC,CAAE,MAAO6kB,GACP,OAAO7kB,CACT,CACF,CACA,SAASmlB,GAAiBhmB,GACxB,OAAOA,EAAIqO,QAAQ,UAAU4X,GAAO,IAAIA,EAAItjB,iBAC9C,CACA,MAAMujB,GAAc,CAClB,gBAAAC,CAAiB1jB,EAASzC,EAAKa,GAC7B4B,EAAQ6B,aAAa,WAAW0hB,GAAiBhmB,KAAQa,EAC3D,EACA,mBAAAulB,CAAoB3jB,EAASzC,GAC3ByC,EAAQ4B,gBAAgB,WAAW2hB,GAAiBhmB,KACtD,EACA,iBAAAqmB,CAAkB5jB,GAChB,IAAKA,EACH,MAAO,CAAC,EAEV,MAAM0B,EAAa,CAAC,EACdmiB,EAASpmB,OAAO4D,KAAKrB,EAAQ8jB,SAASld,QAAOrJ,GAAOA,EAAI2kB,WAAW,QAAU3kB,EAAI2kB,WAAW,cAClG,IAAK,MAAM3kB,KAAOsmB,EAAQ,CACxB,IAAIE,EAAUxmB,EAAIqO,QAAQ,MAAO,IACjCmY,EAAUA,EAAQC,OAAO,GAAG9jB,cAAgB6jB,EAAQlR,MAAM,EAAGkR,EAAQ5S,QACrEzP,EAAWqiB,GAAWZ,GAAcnjB,EAAQ8jB,QAAQvmB,GACtD,CACA,OAAOmE,CACT,EACAuiB,iBAAgB,CAACjkB,EAASzC,IACjB4lB,GAAcnjB,EAAQic,aAAa,WAAWsH,GAAiBhmB,QAgB1E,MAAM2mB,GAEJ,kBAAWC,GACT,MAAO,CAAC,CACV,CACA,sBAAWC,GACT,MAAO,CAAC,CACV,CACA,eAAWpH,GACT,MAAM,IAAIqH,MAAM,sEAClB,CACA,UAAAC,CAAWC,GAIT,OAHAA,EAAS9D,KAAK+D,gBAAgBD,GAC9BA,EAAS9D,KAAKgE,kBAAkBF,GAChC9D,KAAKiE,iBAAiBH,GACfA,CACT,CACA,iBAAAE,CAAkBF,GAChB,OAAOA,CACT,CACA,eAAAC,CAAgBD,EAAQvkB,GACtB,MAAM2kB,EAAa,GAAU3kB,GAAWyjB,GAAYQ,iBAAiBjkB,EAAS,UAAY,CAAC,EAE3F,MAAO,IACFygB,KAAKmE,YAAYT,WACM,iBAAfQ,EAA0BA,EAAa,CAAC,KAC/C,GAAU3kB,GAAWyjB,GAAYG,kBAAkB5jB,GAAW,CAAC,KAC7C,iBAAXukB,EAAsBA,EAAS,CAAC,EAE/C,CACA,gBAAAG,CAAiBH,EAAQM,EAAcpE,KAAKmE,YAAYR,aACtD,IAAK,MAAO7hB,EAAUuiB,KAAkBrnB,OAAOmkB,QAAQiD,GAAc,CACnE,MAAMzmB,EAAQmmB,EAAOhiB,GACfwiB,EAAY,GAAU3mB,GAAS,UAhiBrC4c,OADSA,EAiiB+C5c,GA/hBnD,GAAG4c,IAELvd,OAAOM,UAAUuC,SAASrC,KAAK+c,GAAQL,MAAM,eAAe,GAAGza,cA8hBlE,IAAK,IAAI8kB,OAAOF,GAAehhB,KAAKihB,GAClC,MAAM,IAAIE,UAAU,GAAGxE,KAAKmE,YAAY5H,KAAKkI,0BAA0B3iB,qBAA4BwiB,yBAAiCD,MAExI,CAriBW9J,KAsiBb,EAqBF,MAAMmK,WAAsBjB,GAC1B,WAAAU,CAAY5kB,EAASukB,GACnBa,SACAplB,EAAUmb,GAAWnb,MAIrBygB,KAAK4E,SAAWrlB,EAChBygB,KAAK6E,QAAU7E,KAAK6D,WAAWC,GAC/BzK,GAAKtH,IAAIiO,KAAK4E,SAAU5E,KAAKmE,YAAYW,SAAU9E,MACrD,CAGA,OAAA+E,GACE1L,GAAKM,OAAOqG,KAAK4E,SAAU5E,KAAKmE,YAAYW,UAC5CvE,GAAaC,IAAIR,KAAK4E,SAAU5E,KAAKmE,YAAYa,WACjD,IAAK,MAAMC,KAAgBjoB,OAAOkoB,oBAAoBlF,MACpDA,KAAKiF,GAAgB,IAEzB,CACA,cAAAE,CAAe9I,EAAU9c,EAAS6lB,GAAa,GAC7CpI,GAAuBX,EAAU9c,EAAS6lB,EAC5C,CACA,UAAAvB,CAAWC,GAIT,OAHAA,EAAS9D,KAAK+D,gBAAgBD,EAAQ9D,KAAK4E,UAC3Cd,EAAS9D,KAAKgE,kBAAkBF,GAChC9D,KAAKiE,iBAAiBH,GACfA,CACT,CAGA,kBAAOuB,CAAY9lB,GACjB,OAAO8Z,GAAKlc,IAAIud,GAAWnb,GAAUygB,KAAK8E,SAC5C,CACA,0BAAOQ,CAAoB/lB,EAASukB,EAAS,CAAC,GAC5C,OAAO9D,KAAKqF,YAAY9lB,IAAY,IAAIygB,KAAKzgB,EAA2B,iBAAXukB,EAAsBA,EAAS,KAC9F,CACA,kBAAWyB,GACT,MA5CY,OA6Cd,CACA,mBAAWT,GACT,MAAO,MAAM9E,KAAKzD,MACpB,CACA,oBAAWyI,GACT,MAAO,IAAIhF,KAAK8E,UAClB,CACA,gBAAOU,CAAUllB,GACf,MAAO,GAAGA,IAAO0f,KAAKgF,WACxB,EAUF,MAAMS,GAAclmB,IAClB,IAAIwa,EAAWxa,EAAQic,aAAa,kBACpC,IAAKzB,GAAyB,MAAbA,EAAkB,CACjC,IAAI2L,EAAgBnmB,EAAQic,aAAa,QAMzC,IAAKkK,IAAkBA,EAActE,SAAS,OAASsE,EAAcjE,WAAW,KAC9E,OAAO,KAILiE,EAActE,SAAS,OAASsE,EAAcjE,WAAW,OAC3DiE,EAAgB,IAAIA,EAAcxjB,MAAM,KAAK,MAE/C6X,EAAW2L,GAAmC,MAAlBA,EAAwBA,EAAcC,OAAS,IAC7E,CACA,OAAO5L,EAAWA,EAAS7X,MAAM,KAAKY,KAAI8iB,GAAO9L,GAAc8L,KAAM1iB,KAAK,KAAO,IAAI,EAEjF2iB,GAAiB,CACrB1T,KAAI,CAAC4H,EAAUxa,EAAU8F,SAASC,kBACzB,GAAG3G,UAAUsB,QAAQ3C,UAAU8iB,iBAAiB5iB,KAAK+B,EAASwa,IAEvE+L,QAAO,CAAC/L,EAAUxa,EAAU8F,SAASC,kBAC5BrF,QAAQ3C,UAAU8K,cAAc5K,KAAK+B,EAASwa,GAEvDgM,SAAQ,CAACxmB,EAASwa,IACT,GAAGpb,UAAUY,EAAQwmB,UAAU5f,QAAOzB,GAASA,EAAMshB,QAAQjM,KAEtE,OAAAkM,CAAQ1mB,EAASwa,GACf,MAAMkM,EAAU,GAChB,IAAIC,EAAW3mB,EAAQwF,WAAWiW,QAAQjB,GAC1C,KAAOmM,GACLD,EAAQrU,KAAKsU,GACbA,EAAWA,EAASnhB,WAAWiW,QAAQjB,GAEzC,OAAOkM,CACT,EACA,IAAAE,CAAK5mB,EAASwa,GACZ,IAAIqM,EAAW7mB,EAAQ8mB,uBACvB,KAAOD,GAAU,CACf,GAAIA,EAASJ,QAAQjM,GACnB,MAAO,CAACqM,GAEVA,EAAWA,EAASC,sBACtB,CACA,MAAO,EACT,EAEA,IAAAxhB,CAAKtF,EAASwa,GACZ,IAAIlV,EAAOtF,EAAQ+mB,mBACnB,KAAOzhB,GAAM,CACX,GAAIA,EAAKmhB,QAAQjM,GACf,MAAO,CAAClV,GAEVA,EAAOA,EAAKyhB,kBACd,CACA,MAAO,EACT,EACA,iBAAAC,CAAkBhnB,GAChB,MAAMinB,EAAa,CAAC,IAAK,SAAU,QAAS,WAAY,SAAU,UAAW,aAAc,4BAA4B1jB,KAAIiX,GAAY,GAAGA,2BAAiC7W,KAAK,KAChL,OAAO8c,KAAK7N,KAAKqU,EAAYjnB,GAAS4G,QAAOsgB,IAAOvL,GAAWuL,IAAO9L,GAAU8L,IAClF,EACA,sBAAAC,CAAuBnnB,GACrB,MAAMwa,EAAW0L,GAAYlmB,GAC7B,OAAIwa,GACK8L,GAAeC,QAAQ/L,GAAYA,EAErC,IACT,EACA,sBAAA4M,CAAuBpnB,GACrB,MAAMwa,EAAW0L,GAAYlmB,GAC7B,OAAOwa,EAAW8L,GAAeC,QAAQ/L,GAAY,IACvD,EACA,+BAAA6M,CAAgCrnB,GAC9B,MAAMwa,EAAW0L,GAAYlmB,GAC7B,OAAOwa,EAAW8L,GAAe1T,KAAK4H,GAAY,EACpD,GAUI8M,GAAuB,CAACC,EAAWC,EAAS,UAChD,MAAMC,EAAa,gBAAgBF,EAAU9B,YACvC1kB,EAAOwmB,EAAUvK,KACvBgE,GAAac,GAAGhc,SAAU2hB,EAAY,qBAAqB1mB,OAAU,SAAU8e,GAI7E,GAHI,CAAC,IAAK,QAAQgC,SAASpB,KAAKiH,UAC9B7H,EAAMkD,iBAEJpH,GAAW8E,MACb,OAEF,MAAMzT,EAASsZ,GAAec,uBAAuB3G,OAASA,KAAKhF,QAAQ,IAAI1a,KAC9DwmB,EAAUxB,oBAAoB/Y,GAGtCwa,IACX,GAAE,EAiBEG,GAAc,YACdC,GAAc,QAAQD,KACtBE,GAAe,SAASF,KAQ9B,MAAMG,WAAc3C,GAElB,eAAWnI,GACT,MAfW,OAgBb,CAGA,KAAA+K,GAEE,GADmB/G,GAAaqB,QAAQ5B,KAAK4E,SAAUuC,IACxCnF,iBACb,OAEFhC,KAAK4E,SAASvJ,UAAU1B,OAlBF,QAmBtB,MAAMyL,EAAapF,KAAK4E,SAASvJ,UAAU7W,SApBrB,QAqBtBwb,KAAKmF,gBAAe,IAAMnF,KAAKuH,mBAAmBvH,KAAK4E,SAAUQ,EACnE,CAGA,eAAAmC,GACEvH,KAAK4E,SAASjL,SACd4G,GAAaqB,QAAQ5B,KAAK4E,SAAUwC,IACpCpH,KAAK+E,SACP,CAGA,sBAAOtI,CAAgBqH,GACrB,OAAO9D,KAAKwH,MAAK,WACf,MAAMnd,EAAOgd,GAAM/B,oBAAoBtF,MACvC,GAAsB,iBAAX8D,EAAX,CAGA,QAAqB/K,IAAjB1O,EAAKyZ,IAAyBA,EAAOrC,WAAW,MAAmB,gBAAXqC,EAC1D,MAAM,IAAIU,UAAU,oBAAoBV,MAE1CzZ,EAAKyZ,GAAQ9D,KAJb,CAKF,GACF,EAOF6G,GAAqBQ,GAAO,SAM5BlL,GAAmBkL,IAcnB,MAKMI,GAAyB,4BAO/B,MAAMC,WAAehD,GAEnB,eAAWnI,GACT,MAfW,QAgBb,CAGA,MAAAoL,GAEE3H,KAAK4E,SAASxjB,aAAa,eAAgB4e,KAAK4E,SAASvJ,UAAUsM,OAjB3C,UAkB1B,CAGA,sBAAOlL,CAAgBqH,GACrB,OAAO9D,KAAKwH,MAAK,WACf,MAAMnd,EAAOqd,GAAOpC,oBAAoBtF,MACzB,WAAX8D,GACFzZ,EAAKyZ,IAET,GACF,EAOFvD,GAAac,GAAGhc,SAjCe,2BAiCmBoiB,IAAwBrI,IACxEA,EAAMkD,iBACN,MAAMsF,EAASxI,EAAM7S,OAAOyO,QAAQyM,IACvBC,GAAOpC,oBAAoBsC,GACnCD,QAAQ,IAOfxL,GAAmBuL,IAcnB,MACMG,GAAc,YACdC,GAAmB,aAAaD,KAChCE,GAAkB,YAAYF,KAC9BG,GAAiB,WAAWH,KAC5BI,GAAoB,cAAcJ,KAClCK,GAAkB,YAAYL,KAK9BM,GAAY,CAChBC,YAAa,KACbC,aAAc,KACdC,cAAe,MAEXC,GAAgB,CACpBH,YAAa,kBACbC,aAAc,kBACdC,cAAe,mBAOjB,MAAME,WAAc/E,GAClB,WAAAU,CAAY5kB,EAASukB,GACnBa,QACA3E,KAAK4E,SAAWrlB,EACXA,GAAYipB,GAAMC,gBAGvBzI,KAAK6E,QAAU7E,KAAK6D,WAAWC,GAC/B9D,KAAK0I,QAAU,EACf1I,KAAK2I,sBAAwB7H,QAAQlhB,OAAOgpB,cAC5C5I,KAAK6I,cACP,CAGA,kBAAWnF,GACT,OAAOyE,EACT,CACA,sBAAWxE,GACT,OAAO4E,EACT,CACA,eAAWhM,GACT,MA/CW,OAgDb,CAGA,OAAAwI,GACExE,GAAaC,IAAIR,KAAK4E,SAAUiD,GAClC,CAGA,MAAAiB,CAAO1J,GACAY,KAAK2I,sBAIN3I,KAAK+I,wBAAwB3J,KAC/BY,KAAK0I,QAAUtJ,EAAM4J,SAJrBhJ,KAAK0I,QAAUtJ,EAAM6J,QAAQ,GAAGD,OAMpC,CACA,IAAAE,CAAK9J,GACCY,KAAK+I,wBAAwB3J,KAC/BY,KAAK0I,QAAUtJ,EAAM4J,QAAUhJ,KAAK0I,SAEtC1I,KAAKmJ,eACLtM,GAAQmD,KAAK6E,QAAQuD,YACvB,CACA,KAAAgB,CAAMhK,GACJY,KAAK0I,QAAUtJ,EAAM6J,SAAW7J,EAAM6J,QAAQvY,OAAS,EAAI,EAAI0O,EAAM6J,QAAQ,GAAGD,QAAUhJ,KAAK0I,OACjG,CACA,YAAAS,GACE,MAAME,EAAYlnB,KAAKoC,IAAIyb,KAAK0I,SAChC,GAAIW,GAnEgB,GAoElB,OAEF,MAAM/b,EAAY+b,EAAYrJ,KAAK0I,QACnC1I,KAAK0I,QAAU,EACVpb,GAGLuP,GAAQvP,EAAY,EAAI0S,KAAK6E,QAAQyD,cAAgBtI,KAAK6E,QAAQwD,aACpE,CACA,WAAAQ,GACM7I,KAAK2I,uBACPpI,GAAac,GAAGrB,KAAK4E,SAAUqD,IAAmB7I,GAASY,KAAK8I,OAAO1J,KACvEmB,GAAac,GAAGrB,KAAK4E,SAAUsD,IAAiB9I,GAASY,KAAKkJ,KAAK9J,KACnEY,KAAK4E,SAASvJ,UAAU5E,IAlFG,mBAoF3B8J,GAAac,GAAGrB,KAAK4E,SAAUkD,IAAkB1I,GAASY,KAAK8I,OAAO1J,KACtEmB,GAAac,GAAGrB,KAAK4E,SAAUmD,IAAiB3I,GAASY,KAAKoJ,MAAMhK,KACpEmB,GAAac,GAAGrB,KAAK4E,SAAUoD,IAAgB5I,GAASY,KAAKkJ,KAAK9J,KAEtE,CACA,uBAAA2J,CAAwB3J,GACtB,OAAOY,KAAK2I,wBA3FS,QA2FiBvJ,EAAMkK,aA5FrB,UA4FyDlK,EAAMkK,YACxF,CAGA,kBAAOb,GACL,MAAO,iBAAkBpjB,SAASC,iBAAmB7C,UAAU8mB,eAAiB,CAClF,EAeF,MAEMC,GAAc,eACdC,GAAiB,YACjBC,GAAmB,YACnBC,GAAoB,aAGpBC,GAAa,OACbC,GAAa,OACbC,GAAiB,OACjBC,GAAkB,QAClBC,GAAc,QAAQR,KACtBS,GAAa,OAAOT,KACpBU,GAAkB,UAAUV,KAC5BW,GAAqB,aAAaX,KAClCY,GAAqB,aAAaZ,KAClCa,GAAmB,YAAYb,KAC/Bc,GAAwB,OAAOd,KAAcC,KAC7Cc,GAAyB,QAAQf,KAAcC,KAC/Ce,GAAsB,WACtBC,GAAsB,SAMtBC,GAAkB,UAClBC,GAAgB,iBAChBC,GAAuBF,GAAkBC,GAKzCE,GAAmB,CACvB,CAACnB,IAAmBK,GACpB,CAACJ,IAAoBG,IAEjBgB,GAAY,CAChBC,SAAU,IACVC,UAAU,EACVC,MAAO,QACPC,MAAM,EACNC,OAAO,EACPC,MAAM,GAEFC,GAAgB,CACpBN,SAAU,mBAEVC,SAAU,UACVC,MAAO,mBACPC,KAAM,mBACNC,MAAO,UACPC,KAAM,WAOR,MAAME,WAAiB5G,GACrB,WAAAP,CAAY5kB,EAASukB,GACnBa,MAAMplB,EAASukB,GACf9D,KAAKuL,UAAY,KACjBvL,KAAKwL,eAAiB,KACtBxL,KAAKyL,YAAa,EAClBzL,KAAK0L,aAAe,KACpB1L,KAAK2L,aAAe,KACpB3L,KAAK4L,mBAAqB/F,GAAeC,QArCjB,uBAqC8C9F,KAAK4E,UAC3E5E,KAAK6L,qBACD7L,KAAK6E,QAAQqG,OAASV,IACxBxK,KAAK8L,OAET,CAGA,kBAAWpI,GACT,OAAOoH,EACT,CACA,sBAAWnH,GACT,OAAO0H,EACT,CACA,eAAW9O,GACT,MAnFW,UAoFb,CAGA,IAAA1X,GACEmb,KAAK+L,OAAOnC,GACd,CACA,eAAAoC,IAIO3mB,SAAS4mB,QAAUtR,GAAUqF,KAAK4E,WACrC5E,KAAKnb,MAET,CACA,IAAAshB,GACEnG,KAAK+L,OAAOlC,GACd,CACA,KAAAoB,GACMjL,KAAKyL,YACPrR,GAAqB4F,KAAK4E,UAE5B5E,KAAKkM,gBACP,CACA,KAAAJ,GACE9L,KAAKkM,iBACLlM,KAAKmM,kBACLnM,KAAKuL,UAAYa,aAAY,IAAMpM,KAAKgM,mBAAmBhM,KAAK6E,QAAQkG,SAC1E,CACA,iBAAAsB,GACOrM,KAAK6E,QAAQqG,OAGdlL,KAAKyL,WACPlL,GAAae,IAAItB,KAAK4E,SAAUqF,IAAY,IAAMjK,KAAK8L,UAGzD9L,KAAK8L,QACP,CACA,EAAAQ,CAAG7T,GACD,MAAM8T,EAAQvM,KAAKwM,YACnB,GAAI/T,EAAQ8T,EAAM7b,OAAS,GAAK+H,EAAQ,EACtC,OAEF,GAAIuH,KAAKyL,WAEP,YADAlL,GAAae,IAAItB,KAAK4E,SAAUqF,IAAY,IAAMjK,KAAKsM,GAAG7T,KAG5D,MAAMgU,EAAczM,KAAK0M,cAAc1M,KAAK2M,cAC5C,GAAIF,IAAgBhU,EAClB,OAEF,MAAMtC,EAAQsC,EAAQgU,EAAc7C,GAAaC,GACjD7J,KAAK+L,OAAO5V,EAAOoW,EAAM9T,GAC3B,CACA,OAAAsM,GACM/E,KAAK2L,cACP3L,KAAK2L,aAAa5G,UAEpBJ,MAAMI,SACR,CAGA,iBAAAf,CAAkBF,GAEhB,OADAA,EAAO8I,gBAAkB9I,EAAOiH,SACzBjH,CACT,CACA,kBAAA+H,GACM7L,KAAK6E,QAAQmG,UACfzK,GAAac,GAAGrB,KAAK4E,SAAUsF,IAAiB9K,GAASY,KAAK6M,SAASzN,KAE9C,UAAvBY,KAAK6E,QAAQoG,QACf1K,GAAac,GAAGrB,KAAK4E,SAAUuF,IAAoB,IAAMnK,KAAKiL,UAC9D1K,GAAac,GAAGrB,KAAK4E,SAAUwF,IAAoB,IAAMpK,KAAKqM,uBAE5DrM,KAAK6E,QAAQsG,OAAS3C,GAAMC,eAC9BzI,KAAK8M,yBAET,CACA,uBAAAA,GACE,IAAK,MAAMC,KAAOlH,GAAe1T,KArIX,qBAqImC6N,KAAK4E,UAC5DrE,GAAac,GAAG0L,EAAK1C,IAAkBjL,GAASA,EAAMkD,mBAExD,MAmBM0K,EAAc,CAClB3E,aAAc,IAAMrI,KAAK+L,OAAO/L,KAAKiN,kBAAkBnD,KACvDxB,cAAe,IAAMtI,KAAK+L,OAAO/L,KAAKiN,kBAAkBlD,KACxD3B,YAtBkB,KACS,UAAvBpI,KAAK6E,QAAQoG,QAYjBjL,KAAKiL,QACDjL,KAAK0L,cACPwB,aAAalN,KAAK0L,cAEpB1L,KAAK0L,aAAe7N,YAAW,IAAMmC,KAAKqM,qBAjLjB,IAiL+DrM,KAAK6E,QAAQkG,UAAS,GAOhH/K,KAAK2L,aAAe,IAAInD,GAAMxI,KAAK4E,SAAUoI,EAC/C,CACA,QAAAH,CAASzN,GACP,GAAI,kBAAkB/b,KAAK+b,EAAM7S,OAAO0a,SACtC,OAEF,MAAM3Z,EAAYud,GAAiBzL,EAAMtiB,KACrCwQ,IACF8R,EAAMkD,iBACNtC,KAAK+L,OAAO/L,KAAKiN,kBAAkB3f,IAEvC,CACA,aAAAof,CAAcntB,GACZ,OAAOygB,KAAKwM,YAAYrnB,QAAQ5F,EAClC,CACA,0BAAA4tB,CAA2B1U,GACzB,IAAKuH,KAAK4L,mBACR,OAEF,MAAMwB,EAAkBvH,GAAeC,QAAQ4E,GAAiB1K,KAAK4L,oBACrEwB,EAAgB/R,UAAU1B,OAAO8Q,IACjC2C,EAAgBjsB,gBAAgB,gBAChC,MAAMksB,EAAqBxH,GAAeC,QAAQ,sBAAsBrN,MAAWuH,KAAK4L,oBACpFyB,IACFA,EAAmBhS,UAAU5E,IAAIgU,IACjC4C,EAAmBjsB,aAAa,eAAgB,QAEpD,CACA,eAAA+qB,GACE,MAAM5sB,EAAUygB,KAAKwL,gBAAkBxL,KAAK2M,aAC5C,IAAKptB,EACH,OAEF,MAAM+tB,EAAkB/P,OAAOgQ,SAAShuB,EAAQic,aAAa,oBAAqB,IAClFwE,KAAK6E,QAAQkG,SAAWuC,GAAmBtN,KAAK6E,QAAQ+H,eAC1D,CACA,MAAAb,CAAO5V,EAAO5W,EAAU,MACtB,GAAIygB,KAAKyL,WACP,OAEF,MAAM1N,EAAgBiC,KAAK2M,aACrBa,EAASrX,IAAUyT,GACnB6D,EAAcluB,GAAWue,GAAqBkC,KAAKwM,YAAazO,EAAeyP,EAAQxN,KAAK6E,QAAQuG,MAC1G,GAAIqC,IAAgB1P,EAClB,OAEF,MAAM2P,EAAmB1N,KAAK0M,cAAce,GACtCE,EAAenI,GACZjF,GAAaqB,QAAQ5B,KAAK4E,SAAUY,EAAW,CACpD1F,cAAe2N,EACfngB,UAAW0S,KAAK4N,kBAAkBzX,GAClCuD,KAAMsG,KAAK0M,cAAc3O,GACzBuO,GAAIoB,IAIR,GADmBC,EAAa3D,IACjBhI,iBACb,OAEF,IAAKjE,IAAkB0P,EAGrB,OAEF,MAAMI,EAAY/M,QAAQd,KAAKuL,WAC/BvL,KAAKiL,QACLjL,KAAKyL,YAAa,EAClBzL,KAAKmN,2BAA2BO,GAChC1N,KAAKwL,eAAiBiC,EACtB,MAAMK,EAAuBN,EA3OR,sBADF,oBA6ObO,EAAiBP,EA3OH,qBACA,qBA2OpBC,EAAYpS,UAAU5E,IAAIsX,GAC1BlS,GAAO4R,GACP1P,EAAc1C,UAAU5E,IAAIqX,GAC5BL,EAAYpS,UAAU5E,IAAIqX,GAQ1B9N,KAAKmF,gBAPoB,KACvBsI,EAAYpS,UAAU1B,OAAOmU,EAAsBC,GACnDN,EAAYpS,UAAU5E,IAAIgU,IAC1B1M,EAAc1C,UAAU1B,OAAO8Q,GAAqBsD,EAAgBD,GACpE9N,KAAKyL,YAAa,EAClBkC,EAAa1D,GAAW,GAEYlM,EAAeiC,KAAKgO,eACtDH,GACF7N,KAAK8L,OAET,CACA,WAAAkC,GACE,OAAOhO,KAAK4E,SAASvJ,UAAU7W,SAhQV,QAiQvB,CACA,UAAAmoB,GACE,OAAO9G,GAAeC,QAAQ8E,GAAsB5K,KAAK4E,SAC3D,CACA,SAAA4H,GACE,OAAO3G,GAAe1T,KAAKwY,GAAe3K,KAAK4E,SACjD,CACA,cAAAsH,GACMlM,KAAKuL,YACP0C,cAAcjO,KAAKuL,WACnBvL,KAAKuL,UAAY,KAErB,CACA,iBAAA0B,CAAkB3f,GAChB,OAAI2O,KACK3O,IAAcwc,GAAiBD,GAAaD,GAE9Ctc,IAAcwc,GAAiBF,GAAaC,EACrD,CACA,iBAAA+D,CAAkBzX,GAChB,OAAI8F,KACK9F,IAAU0T,GAAaC,GAAiBC,GAE1C5T,IAAU0T,GAAaE,GAAkBD,EAClD,CAGA,sBAAOrN,CAAgBqH,GACrB,OAAO9D,KAAKwH,MAAK,WACf,MAAMnd,EAAOihB,GAAShG,oBAAoBtF,KAAM8D,GAChD,GAAsB,iBAAXA,GAIX,GAAsB,iBAAXA,EAAqB,CAC9B,QAAqB/K,IAAjB1O,EAAKyZ,IAAyBA,EAAOrC,WAAW,MAAmB,gBAAXqC,EAC1D,MAAM,IAAIU,UAAU,oBAAoBV,MAE1CzZ,EAAKyZ,IACP,OAREzZ,EAAKiiB,GAAGxI,EASZ,GACF,EAOFvD,GAAac,GAAGhc,SAAUklB,GAvSE,uCAuS2C,SAAUnL,GAC/E,MAAM7S,EAASsZ,GAAec,uBAAuB3G,MACrD,IAAKzT,IAAWA,EAAO8O,UAAU7W,SAASgmB,IACxC,OAEFpL,EAAMkD,iBACN,MAAM4L,EAAW5C,GAAShG,oBAAoB/Y,GACxC4hB,EAAanO,KAAKxE,aAAa,oBACrC,OAAI2S,GACFD,EAAS5B,GAAG6B,QACZD,EAAS7B,qBAGyC,SAAhDrJ,GAAYQ,iBAAiBxD,KAAM,UACrCkO,EAASrpB,YACTqpB,EAAS7B,sBAGX6B,EAAS/H,YACT+H,EAAS7B,oBACX,IACA9L,GAAac,GAAGzhB,OAAQ0qB,IAAuB,KAC7C,MAAM8D,EAAYvI,GAAe1T,KA5TR,6BA6TzB,IAAK,MAAM+b,KAAYE,EACrB9C,GAAShG,oBAAoB4I,EAC/B,IAOF/R,GAAmBmP,IAcnB,MAEM+C,GAAc,eAEdC,GAAe,OAAOD,KACtBE,GAAgB,QAAQF,KACxBG,GAAe,OAAOH,KACtBI,GAAiB,SAASJ,KAC1BK,GAAyB,QAAQL,cACjCM,GAAoB,OACpBC,GAAsB,WACtBC,GAAwB,aAExBC,GAA6B,WAAWF,OAAwBA,KAKhEG,GAAyB,8BACzBC,GAAY,CAChBvqB,OAAQ,KACRkjB,QAAQ,GAEJsH,GAAgB,CACpBxqB,OAAQ,iBACRkjB,OAAQ,WAOV,MAAMuH,WAAiBxK,GACrB,WAAAP,CAAY5kB,EAASukB,GACnBa,MAAMplB,EAASukB,GACf9D,KAAKmP,kBAAmB,EACxBnP,KAAKoP,cAAgB,GACrB,MAAMC,EAAaxJ,GAAe1T,KAAK4c,IACvC,IAAK,MAAMO,KAAQD,EAAY,CAC7B,MAAMtV,EAAW8L,GAAea,uBAAuB4I,GACjDC,EAAgB1J,GAAe1T,KAAK4H,GAAU5T,QAAOqpB,GAAgBA,IAAiBxP,KAAK4E,WAChF,OAAb7K,GAAqBwV,EAAc7e,QACrCsP,KAAKoP,cAAcxd,KAAK0d,EAE5B,CACAtP,KAAKyP,sBACAzP,KAAK6E,QAAQpgB,QAChBub,KAAK0P,0BAA0B1P,KAAKoP,cAAepP,KAAK2P,YAEtD3P,KAAK6E,QAAQ8C,QACf3H,KAAK2H,QAET,CAGA,kBAAWjE,GACT,OAAOsL,EACT,CACA,sBAAWrL,GACT,OAAOsL,EACT,CACA,eAAW1S,GACT,MA9DW,UA+Db,CAGA,MAAAoL,GACM3H,KAAK2P,WACP3P,KAAK4P,OAEL5P,KAAK6P,MAET,CACA,IAAAA,GACE,GAAI7P,KAAKmP,kBAAoBnP,KAAK2P,WAChC,OAEF,IAAIG,EAAiB,GAQrB,GALI9P,KAAK6E,QAAQpgB,SACfqrB,EAAiB9P,KAAK+P,uBAhEH,wCAgE4C5pB,QAAO5G,GAAWA,IAAYygB,KAAK4E,WAAU9hB,KAAIvD,GAAW2vB,GAAS5J,oBAAoB/lB,EAAS,CAC/JooB,QAAQ,OAGRmI,EAAepf,QAAUof,EAAe,GAAGX,iBAC7C,OAGF,GADmB5O,GAAaqB,QAAQ5B,KAAK4E,SAAU0J,IACxCtM,iBACb,OAEF,IAAK,MAAMgO,KAAkBF,EAC3BE,EAAeJ,OAEjB,MAAMK,EAAYjQ,KAAKkQ,gBACvBlQ,KAAK4E,SAASvJ,UAAU1B,OAAOiV,IAC/B5O,KAAK4E,SAASvJ,UAAU5E,IAAIoY,IAC5B7O,KAAK4E,SAAS7jB,MAAMkvB,GAAa,EACjCjQ,KAAK0P,0BAA0B1P,KAAKoP,eAAe,GACnDpP,KAAKmP,kBAAmB,EACxB,MAQMgB,EAAa,SADUF,EAAU,GAAGxL,cAAgBwL,EAAU7d,MAAM,KAE1E4N,KAAKmF,gBATY,KACfnF,KAAKmP,kBAAmB,EACxBnP,KAAK4E,SAASvJ,UAAU1B,OAAOkV,IAC/B7O,KAAK4E,SAASvJ,UAAU5E,IAAImY,GAAqBD,IACjD3O,KAAK4E,SAAS7jB,MAAMkvB,GAAa,GACjC1P,GAAaqB,QAAQ5B,KAAK4E,SAAU2J,GAAc,GAItBvO,KAAK4E,UAAU,GAC7C5E,KAAK4E,SAAS7jB,MAAMkvB,GAAa,GAAGjQ,KAAK4E,SAASuL,MACpD,CACA,IAAAP,GACE,GAAI5P,KAAKmP,mBAAqBnP,KAAK2P,WACjC,OAGF,GADmBpP,GAAaqB,QAAQ5B,KAAK4E,SAAU4J,IACxCxM,iBACb,OAEF,MAAMiO,EAAYjQ,KAAKkQ,gBACvBlQ,KAAK4E,SAAS7jB,MAAMkvB,GAAa,GAAGjQ,KAAK4E,SAASthB,wBAAwB2sB,OAC1EpU,GAAOmE,KAAK4E,UACZ5E,KAAK4E,SAASvJ,UAAU5E,IAAIoY,IAC5B7O,KAAK4E,SAASvJ,UAAU1B,OAAOiV,GAAqBD,IACpD,IAAK,MAAM/M,KAAW5B,KAAKoP,cAAe,CACxC,MAAM7vB,EAAUsmB,GAAec,uBAAuB/E,GAClDriB,IAAYygB,KAAK2P,SAASpwB,IAC5BygB,KAAK0P,0BAA0B,CAAC9N,IAAU,EAE9C,CACA5B,KAAKmP,kBAAmB,EAOxBnP,KAAK4E,SAAS7jB,MAAMkvB,GAAa,GACjCjQ,KAAKmF,gBAPY,KACfnF,KAAKmP,kBAAmB,EACxBnP,KAAK4E,SAASvJ,UAAU1B,OAAOkV,IAC/B7O,KAAK4E,SAASvJ,UAAU5E,IAAImY,IAC5BrO,GAAaqB,QAAQ5B,KAAK4E,SAAU6J,GAAe,GAGvBzO,KAAK4E,UAAU,EAC/C,CACA,QAAA+K,CAASpwB,EAAUygB,KAAK4E,UACtB,OAAOrlB,EAAQ8b,UAAU7W,SAASmqB,GACpC,CAGA,iBAAA3K,CAAkBF,GAGhB,OAFAA,EAAO6D,OAAS7G,QAAQgD,EAAO6D,QAC/B7D,EAAOrf,OAASiW,GAAWoJ,EAAOrf,QAC3Bqf,CACT,CACA,aAAAoM,GACE,OAAOlQ,KAAK4E,SAASvJ,UAAU7W,SA3IL,uBAChB,QACC,QA0Ib,CACA,mBAAAirB,GACE,IAAKzP,KAAK6E,QAAQpgB,OAChB,OAEF,MAAMshB,EAAW/F,KAAK+P,uBAAuBhB,IAC7C,IAAK,MAAMxvB,KAAWwmB,EAAU,CAC9B,MAAMqK,EAAWvK,GAAec,uBAAuBpnB,GACnD6wB,GACFpQ,KAAK0P,0BAA0B,CAACnwB,GAAUygB,KAAK2P,SAASS,GAE5D,CACF,CACA,sBAAAL,CAAuBhW,GACrB,MAAMgM,EAAWF,GAAe1T,KAAK2c,GAA4B9O,KAAK6E,QAAQpgB,QAE9E,OAAOohB,GAAe1T,KAAK4H,EAAUiG,KAAK6E,QAAQpgB,QAAQ0B,QAAO5G,IAAYwmB,EAAS3E,SAAS7hB,IACjG,CACA,yBAAAmwB,CAA0BW,EAAcC,GACtC,GAAKD,EAAa3f,OAGlB,IAAK,MAAMnR,KAAW8wB,EACpB9wB,EAAQ8b,UAAUsM,OArKK,aAqKyB2I,GAChD/wB,EAAQ6B,aAAa,gBAAiBkvB,EAE1C,CAGA,sBAAO7T,CAAgBqH,GACrB,MAAMe,EAAU,CAAC,EAIjB,MAHsB,iBAAXf,GAAuB,YAAYzgB,KAAKygB,KACjDe,EAAQ8C,QAAS,GAEZ3H,KAAKwH,MAAK,WACf,MAAMnd,EAAO6kB,GAAS5J,oBAAoBtF,KAAM6E,GAChD,GAAsB,iBAAXf,EAAqB,CAC9B,QAA4B,IAAjBzZ,EAAKyZ,GACd,MAAM,IAAIU,UAAU,oBAAoBV,MAE1CzZ,EAAKyZ,IACP,CACF,GACF,EAOFvD,GAAac,GAAGhc,SAAUqpB,GAAwBK,IAAwB,SAAU3P,IAErD,MAAzBA,EAAM7S,OAAO0a,SAAmB7H,EAAMW,gBAAmD,MAAjCX,EAAMW,eAAekH,UAC/E7H,EAAMkD,iBAER,IAAK,MAAM/iB,KAAWsmB,GAAee,gCAAgC5G,MACnEkP,GAAS5J,oBAAoB/lB,EAAS,CACpCooB,QAAQ,IACPA,QAEP,IAMAxL,GAAmB+S,IAcnB,MAAMqB,GAAS,WAETC,GAAc,eACdC,GAAiB,YAGjBC,GAAiB,UACjBC,GAAmB,YAGnBC,GAAe,OAAOJ,KACtBK,GAAiB,SAASL,KAC1BM,GAAe,OAAON,KACtBO,GAAgB,QAAQP,KACxBQ,GAAyB,QAAQR,KAAcC,KAC/CQ,GAAyB,UAAUT,KAAcC,KACjDS,GAAuB,QAAQV,KAAcC,KAC7CU,GAAoB,OAMpBC,GAAyB,4DACzBC,GAA6B,GAAGD,MAA0BD,KAC1DG,GAAgB,iBAIhBC,GAAgBtV,KAAU,UAAY,YACtCuV,GAAmBvV,KAAU,YAAc,UAC3CwV,GAAmBxV,KAAU,aAAe,eAC5CyV,GAAsBzV,KAAU,eAAiB,aACjD0V,GAAkB1V,KAAU,aAAe,cAC3C2V,GAAiB3V,KAAU,cAAgB,aAG3C4V,GAAY,CAChBC,WAAW,EACX7jB,SAAU,kBACV8jB,QAAS,UACT/pB,OAAQ,CAAC,EAAG,GACZgqB,aAAc,KACd1zB,UAAW,UAEP2zB,GAAgB,CACpBH,UAAW,mBACX7jB,SAAU,mBACV8jB,QAAS,SACT/pB,OAAQ,0BACRgqB,aAAc,yBACd1zB,UAAW,2BAOb,MAAM4zB,WAAiBxN,GACrB,WAAAP,CAAY5kB,EAASukB,GACnBa,MAAMplB,EAASukB,GACf9D,KAAKmS,QAAU,KACfnS,KAAKoS,QAAUpS,KAAK4E,SAAS7f,WAE7Bib,KAAKqS,MAAQxM,GAAehhB,KAAKmb,KAAK4E,SAAU0M,IAAe,IAAMzL,GAAeM,KAAKnG,KAAK4E,SAAU0M,IAAe,IAAMzL,GAAeC,QAAQwL,GAAetR,KAAKoS,SACxKpS,KAAKsS,UAAYtS,KAAKuS,eACxB,CAGA,kBAAW7O,GACT,OAAOmO,EACT,CACA,sBAAWlO,GACT,OAAOsO,EACT,CACA,eAAW1V,GACT,OAAOgU,EACT,CAGA,MAAA5I,GACE,OAAO3H,KAAK2P,WAAa3P,KAAK4P,OAAS5P,KAAK6P,MAC9C,CACA,IAAAA,GACE,GAAI3U,GAAW8E,KAAK4E,WAAa5E,KAAK2P,WACpC,OAEF,MAAM7P,EAAgB,CACpBA,cAAeE,KAAK4E,UAGtB,IADkBrE,GAAaqB,QAAQ5B,KAAK4E,SAAUkM,GAAchR,GACtDkC,iBAAd,CASA,GANAhC,KAAKwS,gBAMD,iBAAkBntB,SAASC,kBAAoB0a,KAAKoS,QAAQpX,QAzExC,eA0EtB,IAAK,MAAMzb,IAAW,GAAGZ,UAAU0G,SAAS6G,KAAK6Z,UAC/CxF,GAAac,GAAG9hB,EAAS,YAAaqc,IAG1CoE,KAAK4E,SAAS6N,QACdzS,KAAK4E,SAASxjB,aAAa,iBAAiB,GAC5C4e,KAAKqS,MAAMhX,UAAU5E,IAAI0a,IACzBnR,KAAK4E,SAASvJ,UAAU5E,IAAI0a,IAC5B5Q,GAAaqB,QAAQ5B,KAAK4E,SAAUmM,GAAejR,EAhBnD,CAiBF,CACA,IAAA8P,GACE,GAAI1U,GAAW8E,KAAK4E,YAAc5E,KAAK2P,WACrC,OAEF,MAAM7P,EAAgB,CACpBA,cAAeE,KAAK4E,UAEtB5E,KAAK0S,cAAc5S,EACrB,CACA,OAAAiF,GACM/E,KAAKmS,SACPnS,KAAKmS,QAAQnZ,UAEf2L,MAAMI,SACR,CACA,MAAAha,GACEiV,KAAKsS,UAAYtS,KAAKuS,gBAClBvS,KAAKmS,SACPnS,KAAKmS,QAAQpnB,QAEjB,CAGA,aAAA2nB,CAAc5S,GAEZ,IADkBS,GAAaqB,QAAQ5B,KAAK4E,SAAUgM,GAAc9Q,GACtDkC,iBAAd,CAMA,GAAI,iBAAkB3c,SAASC,gBAC7B,IAAK,MAAM/F,IAAW,GAAGZ,UAAU0G,SAAS6G,KAAK6Z,UAC/CxF,GAAaC,IAAIjhB,EAAS,YAAaqc,IAGvCoE,KAAKmS,SACPnS,KAAKmS,QAAQnZ,UAEfgH,KAAKqS,MAAMhX,UAAU1B,OAAOwX,IAC5BnR,KAAK4E,SAASvJ,UAAU1B,OAAOwX,IAC/BnR,KAAK4E,SAASxjB,aAAa,gBAAiB,SAC5C4hB,GAAYE,oBAAoBlD,KAAKqS,MAAO,UAC5C9R,GAAaqB,QAAQ5B,KAAK4E,SAAUiM,GAAgB/Q,EAhBpD,CAiBF,CACA,UAAA+D,CAAWC,GAET,GAAgC,iBADhCA,EAASa,MAAMd,WAAWC,IACRxlB,YAA2B,GAAUwlB,EAAOxlB,YAAgE,mBAA3CwlB,EAAOxlB,UAAUgF,sBAElG,MAAM,IAAIkhB,UAAU,GAAG+L,GAAO9L,+GAEhC,OAAOX,CACT,CACA,aAAA0O,GACE,QAAsB,IAAX,EACT,MAAM,IAAIhO,UAAU,gEAEtB,IAAImO,EAAmB3S,KAAK4E,SACG,WAA3B5E,KAAK6E,QAAQvmB,UACfq0B,EAAmB3S,KAAKoS,QACf,GAAUpS,KAAK6E,QAAQvmB,WAChCq0B,EAAmBjY,GAAWsF,KAAK6E,QAAQvmB,WACA,iBAA3B0hB,KAAK6E,QAAQvmB,YAC7Bq0B,EAAmB3S,KAAK6E,QAAQvmB,WAElC,MAAM0zB,EAAehS,KAAK4S,mBAC1B5S,KAAKmS,QAAU,GAAoBQ,EAAkB3S,KAAKqS,MAAOL,EACnE,CACA,QAAArC,GACE,OAAO3P,KAAKqS,MAAMhX,UAAU7W,SAAS2sB,GACvC,CACA,aAAA0B,GACE,MAAMC,EAAiB9S,KAAKoS,QAC5B,GAAIU,EAAezX,UAAU7W,SArKN,WAsKrB,OAAOmtB,GAET,GAAImB,EAAezX,UAAU7W,SAvKJ,aAwKvB,OAAOotB,GAET,GAAIkB,EAAezX,UAAU7W,SAzKA,iBA0K3B,MA5JsB,MA8JxB,GAAIsuB,EAAezX,UAAU7W,SA3KE,mBA4K7B,MA9JyB,SAkK3B,MAAMuuB,EAAkF,QAA1E9tB,iBAAiB+a,KAAKqS,OAAOvX,iBAAiB,iBAAiB6K,OAC7E,OAAImN,EAAezX,UAAU7W,SArLP,UAsLbuuB,EAAQvB,GAAmBD,GAE7BwB,EAAQrB,GAAsBD,EACvC,CACA,aAAAc,GACE,OAAkD,OAA3CvS,KAAK4E,SAAS5J,QAnLD,UAoLtB,CACA,UAAAgY,GACE,MAAM,OACJhrB,GACEgY,KAAK6E,QACT,MAAsB,iBAAX7c,EACFA,EAAO9F,MAAM,KAAKY,KAAInF,GAAS4f,OAAOgQ,SAAS5vB,EAAO,MAEzC,mBAAXqK,EACFirB,GAAcjrB,EAAOirB,EAAYjT,KAAK4E,UAExC5c,CACT,CACA,gBAAA4qB,GACE,MAAMM,EAAwB,CAC5Bx0B,UAAWshB,KAAK6S,gBAChBzc,UAAW,CAAC,CACV9V,KAAM,kBACNmB,QAAS,CACPwM,SAAU+R,KAAK6E,QAAQ5W,WAExB,CACD3N,KAAM,SACNmB,QAAS,CACPuG,OAAQgY,KAAKgT,iBAanB,OAPIhT,KAAKsS,WAAsC,WAAzBtS,KAAK6E,QAAQkN,WACjC/O,GAAYC,iBAAiBjD,KAAKqS,MAAO,SAAU,UACnDa,EAAsB9c,UAAY,CAAC,CACjC9V,KAAM,cACNC,SAAS,KAGN,IACF2yB,KACArW,GAAQmD,KAAK6E,QAAQmN,aAAc,CAACkB,IAE3C,CACA,eAAAC,EAAgB,IACdr2B,EAAG,OACHyP,IAEA,MAAMggB,EAAQ1G,GAAe1T,KAhOF,8DAgO+B6N,KAAKqS,OAAOlsB,QAAO5G,GAAWob,GAAUpb,KAC7FgtB,EAAM7b,QAMXoN,GAAqByO,EAAOhgB,EAAQzP,IAAQ6zB,IAAmBpE,EAAMnL,SAAS7U,IAASkmB,OACzF,CAGA,sBAAOhW,CAAgBqH,GACrB,OAAO9D,KAAKwH,MAAK,WACf,MAAMnd,EAAO6nB,GAAS5M,oBAAoBtF,KAAM8D,GAChD,GAAsB,iBAAXA,EAAX,CAGA,QAA4B,IAAjBzZ,EAAKyZ,GACd,MAAM,IAAIU,UAAU,oBAAoBV,MAE1CzZ,EAAKyZ,IAJL,CAKF,GACF,CACA,iBAAOsP,CAAWhU,GAChB,GA5QuB,IA4QnBA,EAAMwI,QAAgD,UAAfxI,EAAMqB,MA/QnC,QA+QuDrB,EAAMtiB,IACzE,OAEF,MAAMu2B,EAAcxN,GAAe1T,KAAKkf,IACxC,IAAK,MAAM1J,KAAU0L,EAAa,CAChC,MAAMC,EAAUpB,GAAS7M,YAAYsC,GACrC,IAAK2L,IAAyC,IAA9BA,EAAQzO,QAAQiN,UAC9B,SAEF,MAAMyB,EAAenU,EAAMmU,eACrBC,EAAeD,EAAanS,SAASkS,EAAQjB,OACnD,GAAIkB,EAAanS,SAASkS,EAAQ1O,WAA2C,WAA9B0O,EAAQzO,QAAQiN,YAA2B0B,GAA8C,YAA9BF,EAAQzO,QAAQiN,WAA2B0B,EACnJ,SAIF,GAAIF,EAAQjB,MAAM7tB,SAAS4a,EAAM7S,UAA2B,UAAf6S,EAAMqB,MA/RvC,QA+R2DrB,EAAMtiB,KAAqB,qCAAqCuG,KAAK+b,EAAM7S,OAAO0a,UACvJ,SAEF,MAAMnH,EAAgB,CACpBA,cAAewT,EAAQ1O,UAEN,UAAfxF,EAAMqB,OACRX,EAAckH,WAAa5H,GAE7BkU,EAAQZ,cAAc5S,EACxB,CACF,CACA,4BAAO2T,CAAsBrU,GAI3B,MAAMsU,EAAU,kBAAkBrwB,KAAK+b,EAAM7S,OAAO0a,SAC9C0M,EAjTW,WAiTKvU,EAAMtiB,IACtB82B,EAAkB,CAAClD,GAAgBC,IAAkBvP,SAAShC,EAAMtiB,KAC1E,IAAK82B,IAAoBD,EACvB,OAEF,GAAID,IAAYC,EACd,OAEFvU,EAAMkD,iBAGN,MAAMuR,EAAkB7T,KAAKgG,QAAQoL,IAA0BpR,KAAO6F,GAAeM,KAAKnG,KAAMoR,IAAwB,IAAMvL,GAAehhB,KAAKmb,KAAMoR,IAAwB,IAAMvL,GAAeC,QAAQsL,GAAwBhS,EAAMW,eAAehb,YACpPwF,EAAW2nB,GAAS5M,oBAAoBuO,GAC9C,GAAID,EAIF,OAHAxU,EAAM0U,kBACNvpB,EAASslB,YACTtlB,EAAS4oB,gBAAgB/T,GAGvB7U,EAASolB,aAEXvQ,EAAM0U,kBACNvpB,EAASqlB,OACTiE,EAAgBpB,QAEpB,EAOFlS,GAAac,GAAGhc,SAAU4rB,GAAwBG,GAAwBc,GAASuB,uBACnFlT,GAAac,GAAGhc,SAAU4rB,GAAwBK,GAAeY,GAASuB,uBAC1ElT,GAAac,GAAGhc,SAAU2rB,GAAwBkB,GAASkB,YAC3D7S,GAAac,GAAGhc,SAAU6rB,GAAsBgB,GAASkB,YACzD7S,GAAac,GAAGhc,SAAU2rB,GAAwBI,IAAwB,SAAUhS,GAClFA,EAAMkD,iBACN4P,GAAS5M,oBAAoBtF,MAAM2H,QACrC,IAMAxL,GAAmB+V,IAcnB,MAAM6B,GAAS,WAETC,GAAoB,OACpBC,GAAkB,gBAAgBF,KAClCG,GAAY,CAChBC,UAAW,iBACXC,cAAe,KACfhP,YAAY,EACZzK,WAAW,EAEX0Z,YAAa,QAETC,GAAgB,CACpBH,UAAW,SACXC,cAAe,kBACfhP,WAAY,UACZzK,UAAW,UACX0Z,YAAa,oBAOf,MAAME,WAAiB9Q,GACrB,WAAAU,CAAYL,GACVa,QACA3E,KAAK6E,QAAU7E,KAAK6D,WAAWC,GAC/B9D,KAAKwU,aAAc,EACnBxU,KAAK4E,SAAW,IAClB,CAGA,kBAAWlB,GACT,OAAOwQ,EACT,CACA,sBAAWvQ,GACT,OAAO2Q,EACT,CACA,eAAW/X,GACT,OAAOwX,EACT,CAGA,IAAAlE,CAAKxT,GACH,IAAK2D,KAAK6E,QAAQlK,UAEhB,YADAkC,GAAQR,GAGV2D,KAAKyU,UACL,MAAMl1B,EAAUygB,KAAK0U,cACjB1U,KAAK6E,QAAQO,YACfvJ,GAAOtc,GAETA,EAAQ8b,UAAU5E,IAAIud,IACtBhU,KAAK2U,mBAAkB,KACrB9X,GAAQR,EAAS,GAErB,CACA,IAAAuT,CAAKvT,GACE2D,KAAK6E,QAAQlK,WAIlBqF,KAAK0U,cAAcrZ,UAAU1B,OAAOqa,IACpChU,KAAK2U,mBAAkB,KACrB3U,KAAK+E,UACLlI,GAAQR,EAAS,KANjBQ,GAAQR,EAQZ,CACA,OAAA0I,GACO/E,KAAKwU,cAGVjU,GAAaC,IAAIR,KAAK4E,SAAUqP,IAChCjU,KAAK4E,SAASjL,SACdqG,KAAKwU,aAAc,EACrB,CAGA,WAAAE,GACE,IAAK1U,KAAK4E,SAAU,CAClB,MAAMgQ,EAAWvvB,SAASwvB,cAAc,OACxCD,EAAST,UAAYnU,KAAK6E,QAAQsP,UAC9BnU,KAAK6E,QAAQO,YACfwP,EAASvZ,UAAU5E,IApFD,QAsFpBuJ,KAAK4E,SAAWgQ,CAClB,CACA,OAAO5U,KAAK4E,QACd,CACA,iBAAAZ,CAAkBF,GAGhB,OADAA,EAAOuQ,YAAc3Z,GAAWoJ,EAAOuQ,aAChCvQ,CACT,CACA,OAAA2Q,GACE,GAAIzU,KAAKwU,YACP,OAEF,MAAMj1B,EAAUygB,KAAK0U,cACrB1U,KAAK6E,QAAQwP,YAAYS,OAAOv1B,GAChCghB,GAAac,GAAG9hB,EAAS00B,IAAiB,KACxCpX,GAAQmD,KAAK6E,QAAQuP,cAAc,IAErCpU,KAAKwU,aAAc,CACrB,CACA,iBAAAG,CAAkBtY,GAChBW,GAAuBX,EAAU2D,KAAK0U,cAAe1U,KAAK6E,QAAQO,WACpE,EAeF,MAEM2P,GAAc,gBACdC,GAAkB,UAAUD,KAC5BE,GAAoB,cAAcF,KAGlCG,GAAmB,WACnBC,GAAY,CAChBC,WAAW,EACXC,YAAa,MAETC,GAAgB,CACpBF,UAAW,UACXC,YAAa,WAOf,MAAME,WAAkB9R,GACtB,WAAAU,CAAYL,GACVa,QACA3E,KAAK6E,QAAU7E,KAAK6D,WAAWC,GAC/B9D,KAAKwV,WAAY,EACjBxV,KAAKyV,qBAAuB,IAC9B,CAGA,kBAAW/R,GACT,OAAOyR,EACT,CACA,sBAAWxR,GACT,OAAO2R,EACT,CACA,eAAW/Y,GACT,MArCW,WAsCb,CAGA,QAAAmZ,GACM1V,KAAKwV,YAGLxV,KAAK6E,QAAQuQ,WACfpV,KAAK6E,QAAQwQ,YAAY5C,QAE3BlS,GAAaC,IAAInb,SAAU0vB,IAC3BxU,GAAac,GAAGhc,SAAU2vB,IAAiB5V,GAASY,KAAK2V,eAAevW,KACxEmB,GAAac,GAAGhc,SAAU4vB,IAAmB7V,GAASY,KAAK4V,eAAexW,KAC1EY,KAAKwV,WAAY,EACnB,CACA,UAAAK,GACO7V,KAAKwV,YAGVxV,KAAKwV,WAAY,EACjBjV,GAAaC,IAAInb,SAAU0vB,IAC7B,CAGA,cAAAY,CAAevW,GACb,MAAM,YACJiW,GACErV,KAAK6E,QACT,GAAIzF,EAAM7S,SAAWlH,UAAY+Z,EAAM7S,SAAW8oB,GAAeA,EAAY7wB,SAAS4a,EAAM7S,QAC1F,OAEF,MAAM1L,EAAWglB,GAAeU,kBAAkB8O,GAC1B,IAApBx0B,EAAS6P,OACX2kB,EAAY5C,QACHzS,KAAKyV,uBAAyBP,GACvCr0B,EAASA,EAAS6P,OAAS,GAAG+hB,QAE9B5xB,EAAS,GAAG4xB,OAEhB,CACA,cAAAmD,CAAexW,GAzED,QA0ERA,EAAMtiB,MAGVkjB,KAAKyV,qBAAuBrW,EAAM0W,SAAWZ,GA5EzB,UA6EtB,EAeF,MAAMa,GAAyB,oDACzBC,GAA0B,cAC1BC,GAAmB,gBACnBC,GAAkB,eAMxB,MAAMC,GACJ,WAAAhS,GACEnE,KAAK4E,SAAWvf,SAAS6G,IAC3B,CAGA,QAAAkqB,GAEE,MAAMC,EAAgBhxB,SAASC,gBAAgBuC,YAC/C,OAAO1F,KAAKoC,IAAI3E,OAAO02B,WAAaD,EACtC,CACA,IAAAzG,GACE,MAAM/rB,EAAQmc,KAAKoW,WACnBpW,KAAKuW,mBAELvW,KAAKwW,sBAAsBxW,KAAK4E,SAAUqR,IAAkBQ,GAAmBA,EAAkB5yB,IAEjGmc,KAAKwW,sBAAsBT,GAAwBE,IAAkBQ,GAAmBA,EAAkB5yB,IAC1Gmc,KAAKwW,sBAAsBR,GAAyBE,IAAiBO,GAAmBA,EAAkB5yB,GAC5G,CACA,KAAAwO,GACE2N,KAAK0W,wBAAwB1W,KAAK4E,SAAU,YAC5C5E,KAAK0W,wBAAwB1W,KAAK4E,SAAUqR,IAC5CjW,KAAK0W,wBAAwBX,GAAwBE,IACrDjW,KAAK0W,wBAAwBV,GAAyBE,GACxD,CACA,aAAAS,GACE,OAAO3W,KAAKoW,WAAa,CAC3B,CAGA,gBAAAG,GACEvW,KAAK4W,sBAAsB5W,KAAK4E,SAAU,YAC1C5E,KAAK4E,SAAS7jB,MAAM+K,SAAW,QACjC,CACA,qBAAA0qB,CAAsBzc,EAAU8c,EAAexa,GAC7C,MAAMya,EAAiB9W,KAAKoW,WAS5BpW,KAAK+W,2BAA2Bhd,GARHxa,IAC3B,GAAIA,IAAYygB,KAAK4E,UAAYhlB,OAAO02B,WAAa/2B,EAAQsI,YAAcivB,EACzE,OAEF9W,KAAK4W,sBAAsBr3B,EAASs3B,GACpC,MAAMJ,EAAkB72B,OAAOqF,iBAAiB1F,GAASub,iBAAiB+b,GAC1Et3B,EAAQwB,MAAMi2B,YAAYH,EAAe,GAAGxa,EAASkB,OAAOC,WAAWiZ,QAAsB,GAGjG,CACA,qBAAAG,CAAsBr3B,EAASs3B,GAC7B,MAAMI,EAAc13B,EAAQwB,MAAM+Z,iBAAiB+b,GAC/CI,GACFjU,GAAYC,iBAAiB1jB,EAASs3B,EAAeI,EAEzD,CACA,uBAAAP,CAAwB3c,EAAU8c,GAWhC7W,KAAK+W,2BAA2Bhd,GAVHxa,IAC3B,MAAM5B,EAAQqlB,GAAYQ,iBAAiBjkB,EAASs3B,GAEtC,OAAVl5B,GAIJqlB,GAAYE,oBAAoB3jB,EAASs3B,GACzCt3B,EAAQwB,MAAMi2B,YAAYH,EAAel5B,IAJvC4B,EAAQwB,MAAMm2B,eAAeL,EAIgB,GAGnD,CACA,0BAAAE,CAA2Bhd,EAAUod,GACnC,GAAI,GAAUpd,GACZod,EAASpd,QAGX,IAAK,MAAM6L,KAAOC,GAAe1T,KAAK4H,EAAUiG,KAAK4E,UACnDuS,EAASvR,EAEb,EAeF,MAEMwR,GAAc,YAGdC,GAAe,OAAOD,KACtBE,GAAyB,gBAAgBF,KACzCG,GAAiB,SAASH,KAC1BI,GAAe,OAAOJ,KACtBK,GAAgB,QAAQL,KACxBM,GAAiB,SAASN,KAC1BO,GAAsB,gBAAgBP,KACtCQ,GAA0B,oBAAoBR,KAC9CS,GAA0B,kBAAkBT,KAC5CU,GAAyB,QAAQV,cACjCW,GAAkB,aAElBC,GAAoB,OACpBC,GAAoB,eAKpBC,GAAY,CAChBtD,UAAU,EACVnC,OAAO,EACPzH,UAAU,GAENmN,GAAgB,CACpBvD,SAAU,mBACVnC,MAAO,UACPzH,SAAU,WAOZ,MAAMoN,WAAc1T,GAClB,WAAAP,CAAY5kB,EAASukB,GACnBa,MAAMplB,EAASukB,GACf9D,KAAKqY,QAAUxS,GAAeC,QArBV,gBAqBmC9F,KAAK4E,UAC5D5E,KAAKsY,UAAYtY,KAAKuY,sBACtBvY,KAAKwY,WAAaxY,KAAKyY,uBACvBzY,KAAK2P,UAAW,EAChB3P,KAAKmP,kBAAmB,EACxBnP,KAAK0Y,WAAa,IAAIvC,GACtBnW,KAAK6L,oBACP,CAGA,kBAAWnI,GACT,OAAOwU,EACT,CACA,sBAAWvU,GACT,OAAOwU,EACT,CACA,eAAW5b,GACT,MA1DW,OA2Db,CAGA,MAAAoL,CAAO7H,GACL,OAAOE,KAAK2P,SAAW3P,KAAK4P,OAAS5P,KAAK6P,KAAK/P,EACjD,CACA,IAAA+P,CAAK/P,GACCE,KAAK2P,UAAY3P,KAAKmP,kBAGR5O,GAAaqB,QAAQ5B,KAAK4E,SAAU4S,GAAc,CAClE1X,kBAEYkC,mBAGdhC,KAAK2P,UAAW,EAChB3P,KAAKmP,kBAAmB,EACxBnP,KAAK0Y,WAAW9I,OAChBvqB,SAAS6G,KAAKmP,UAAU5E,IAAIshB,IAC5B/X,KAAK2Y,gBACL3Y,KAAKsY,UAAUzI,MAAK,IAAM7P,KAAK4Y,aAAa9Y,KAC9C,CACA,IAAA8P,GACO5P,KAAK2P,WAAY3P,KAAKmP,mBAGT5O,GAAaqB,QAAQ5B,KAAK4E,SAAUyS,IACxCrV,mBAGdhC,KAAK2P,UAAW,EAChB3P,KAAKmP,kBAAmB,EACxBnP,KAAKwY,WAAW3C,aAChB7V,KAAK4E,SAASvJ,UAAU1B,OAAOqe,IAC/BhY,KAAKmF,gBAAe,IAAMnF,KAAK6Y,cAAc7Y,KAAK4E,SAAU5E,KAAKgO,gBACnE,CACA,OAAAjJ,GACExE,GAAaC,IAAI5gB,OAAQw3B,IACzB7W,GAAaC,IAAIR,KAAKqY,QAASjB,IAC/BpX,KAAKsY,UAAUvT,UACf/E,KAAKwY,WAAW3C,aAChBlR,MAAMI,SACR,CACA,YAAA+T,GACE9Y,KAAK2Y,eACP,CAGA,mBAAAJ,GACE,OAAO,IAAIhE,GAAS,CAClB5Z,UAAWmG,QAAQd,KAAK6E,QAAQ+P,UAEhCxP,WAAYpF,KAAKgO,eAErB,CACA,oBAAAyK,GACE,OAAO,IAAIlD,GAAU,CACnBF,YAAarV,KAAK4E,UAEtB,CACA,YAAAgU,CAAa9Y,GAENza,SAAS6G,KAAK1H,SAASwb,KAAK4E,WAC/Bvf,SAAS6G,KAAK4oB,OAAO9U,KAAK4E,UAE5B5E,KAAK4E,SAAS7jB,MAAMgxB,QAAU,QAC9B/R,KAAK4E,SAASzjB,gBAAgB,eAC9B6e,KAAK4E,SAASxjB,aAAa,cAAc,GACzC4e,KAAK4E,SAASxjB,aAAa,OAAQ,UACnC4e,KAAK4E,SAASnZ,UAAY,EAC1B,MAAMstB,EAAYlT,GAAeC,QA7GT,cA6GsC9F,KAAKqY,SAC/DU,IACFA,EAAUttB,UAAY,GAExBoQ,GAAOmE,KAAK4E,UACZ5E,KAAK4E,SAASvJ,UAAU5E,IAAIuhB,IAU5BhY,KAAKmF,gBATsB,KACrBnF,KAAK6E,QAAQ4N,OACfzS,KAAKwY,WAAW9C,WAElB1V,KAAKmP,kBAAmB,EACxB5O,GAAaqB,QAAQ5B,KAAK4E,SAAU6S,GAAe,CACjD3X,iBACA,GAEoCE,KAAKqY,QAASrY,KAAKgO,cAC7D,CACA,kBAAAnC,GACEtL,GAAac,GAAGrB,KAAK4E,SAAUiT,IAAyBzY,IAhJvC,WAiJXA,EAAMtiB,MAGNkjB,KAAK6E,QAAQmG,SACfhL,KAAK4P,OAGP5P,KAAKgZ,6BAA4B,IAEnCzY,GAAac,GAAGzhB,OAAQ83B,IAAgB,KAClC1X,KAAK2P,WAAa3P,KAAKmP,kBACzBnP,KAAK2Y,eACP,IAEFpY,GAAac,GAAGrB,KAAK4E,SAAUgT,IAAyBxY,IAEtDmB,GAAae,IAAItB,KAAK4E,SAAU+S,IAAqBsB,IAC/CjZ,KAAK4E,WAAaxF,EAAM7S,QAAUyT,KAAK4E,WAAaqU,EAAO1sB,SAGjC,WAA1ByT,KAAK6E,QAAQ+P,SAIb5U,KAAK6E,QAAQ+P,UACf5U,KAAK4P,OAJL5P,KAAKgZ,6BAKP,GACA,GAEN,CACA,UAAAH,GACE7Y,KAAK4E,SAAS7jB,MAAMgxB,QAAU,OAC9B/R,KAAK4E,SAASxjB,aAAa,eAAe,GAC1C4e,KAAK4E,SAASzjB,gBAAgB,cAC9B6e,KAAK4E,SAASzjB,gBAAgB,QAC9B6e,KAAKmP,kBAAmB,EACxBnP,KAAKsY,UAAU1I,MAAK,KAClBvqB,SAAS6G,KAAKmP,UAAU1B,OAAOoe,IAC/B/X,KAAKkZ,oBACLlZ,KAAK0Y,WAAWrmB,QAChBkO,GAAaqB,QAAQ5B,KAAK4E,SAAU2S,GAAe,GAEvD,CACA,WAAAvJ,GACE,OAAOhO,KAAK4E,SAASvJ,UAAU7W,SAjLT,OAkLxB,CACA,0BAAAw0B,GAEE,GADkBzY,GAAaqB,QAAQ5B,KAAK4E,SAAU0S,IACxCtV,iBACZ,OAEF,MAAMmX,EAAqBnZ,KAAK4E,SAASvX,aAAehI,SAASC,gBAAgBsC,aAC3EwxB,EAAmBpZ,KAAK4E,SAAS7jB,MAAMiL,UAEpB,WAArBotB,GAAiCpZ,KAAK4E,SAASvJ,UAAU7W,SAASyzB,MAGjEkB,IACHnZ,KAAK4E,SAAS7jB,MAAMiL,UAAY,UAElCgU,KAAK4E,SAASvJ,UAAU5E,IAAIwhB,IAC5BjY,KAAKmF,gBAAe,KAClBnF,KAAK4E,SAASvJ,UAAU1B,OAAOse,IAC/BjY,KAAKmF,gBAAe,KAClBnF,KAAK4E,SAAS7jB,MAAMiL,UAAYotB,CAAgB,GAC/CpZ,KAAKqY,QAAQ,GACfrY,KAAKqY,SACRrY,KAAK4E,SAAS6N,QAChB,CAMA,aAAAkG,GACE,MAAMQ,EAAqBnZ,KAAK4E,SAASvX,aAAehI,SAASC,gBAAgBsC,aAC3EkvB,EAAiB9W,KAAK0Y,WAAWtC,WACjCiD,EAAoBvC,EAAiB,EAC3C,GAAIuC,IAAsBF,EAAoB,CAC5C,MAAMr3B,EAAWma,KAAU,cAAgB,eAC3C+D,KAAK4E,SAAS7jB,MAAMe,GAAY,GAAGg1B,KACrC,CACA,IAAKuC,GAAqBF,EAAoB,CAC5C,MAAMr3B,EAAWma,KAAU,eAAiB,cAC5C+D,KAAK4E,SAAS7jB,MAAMe,GAAY,GAAGg1B,KACrC,CACF,CACA,iBAAAoC,GACElZ,KAAK4E,SAAS7jB,MAAMu4B,YAAc,GAClCtZ,KAAK4E,SAAS7jB,MAAMw4B,aAAe,EACrC,CAGA,sBAAO9c,CAAgBqH,EAAQhE,GAC7B,OAAOE,KAAKwH,MAAK,WACf,MAAMnd,EAAO+tB,GAAM9S,oBAAoBtF,KAAM8D,GAC7C,GAAsB,iBAAXA,EAAX,CAGA,QAA4B,IAAjBzZ,EAAKyZ,GACd,MAAM,IAAIU,UAAU,oBAAoBV,MAE1CzZ,EAAKyZ,GAAQhE,EAJb,CAKF,GACF,EAOFS,GAAac,GAAGhc,SAAUyyB,GA9OK,4BA8O2C,SAAU1Y,GAClF,MAAM7S,EAASsZ,GAAec,uBAAuB3G,MACjD,CAAC,IAAK,QAAQoB,SAASpB,KAAKiH,UAC9B7H,EAAMkD,iBAER/B,GAAae,IAAI/U,EAAQirB,IAAcgC,IACjCA,EAAUxX,kBAIdzB,GAAae,IAAI/U,EAAQgrB,IAAgB,KACnC5c,GAAUqF,OACZA,KAAKyS,OACP,GACA,IAIJ,MAAMgH,EAAc5T,GAAeC,QAnQb,eAoQlB2T,GACFrB,GAAM/S,YAAYoU,GAAa7J,OAEpBwI,GAAM9S,oBAAoB/Y,GAClCob,OAAO3H,KACd,IACA6G,GAAqBuR,IAMrBjc,GAAmBic,IAcnB,MAEMsB,GAAc,gBACdC,GAAiB,YACjBC,GAAwB,OAAOF,KAAcC,KAE7CE,GAAoB,OACpBC,GAAuB,UACvBC,GAAoB,SAEpBC,GAAgB,kBAChBC,GAAe,OAAOP,KACtBQ,GAAgB,QAAQR,KACxBS,GAAe,OAAOT,KACtBU,GAAuB,gBAAgBV,KACvCW,GAAiB,SAASX,KAC1BY,GAAe,SAASZ,KACxBa,GAAyB,QAAQb,KAAcC,KAC/Ca,GAAwB,kBAAkBd,KAE1Ce,GAAY,CAChB7F,UAAU,EACV5J,UAAU,EACVvgB,QAAQ,GAEJiwB,GAAgB,CACpB9F,SAAU,mBACV5J,SAAU,UACVvgB,OAAQ,WAOV,MAAMkwB,WAAkBjW,GACtB,WAAAP,CAAY5kB,EAASukB,GACnBa,MAAMplB,EAASukB,GACf9D,KAAK2P,UAAW,EAChB3P,KAAKsY,UAAYtY,KAAKuY,sBACtBvY,KAAKwY,WAAaxY,KAAKyY,uBACvBzY,KAAK6L,oBACP,CAGA,kBAAWnI,GACT,OAAO+W,EACT,CACA,sBAAW9W,GACT,OAAO+W,EACT,CACA,eAAWne,GACT,MApDW,WAqDb,CAGA,MAAAoL,CAAO7H,GACL,OAAOE,KAAK2P,SAAW3P,KAAK4P,OAAS5P,KAAK6P,KAAK/P,EACjD,CACA,IAAA+P,CAAK/P,GACCE,KAAK2P,UAGSpP,GAAaqB,QAAQ5B,KAAK4E,SAAUqV,GAAc,CAClEna,kBAEYkC,mBAGdhC,KAAK2P,UAAW,EAChB3P,KAAKsY,UAAUzI,OACV7P,KAAK6E,QAAQpa,SAChB,IAAI0rB,IAAkBvG,OAExB5P,KAAK4E,SAASxjB,aAAa,cAAc,GACzC4e,KAAK4E,SAASxjB,aAAa,OAAQ,UACnC4e,KAAK4E,SAASvJ,UAAU5E,IAAIqjB,IAW5B9Z,KAAKmF,gBAVoB,KAClBnF,KAAK6E,QAAQpa,SAAUuV,KAAK6E,QAAQ+P,UACvC5U,KAAKwY,WAAW9C,WAElB1V,KAAK4E,SAASvJ,UAAU5E,IAAIojB,IAC5B7Z,KAAK4E,SAASvJ,UAAU1B,OAAOmgB,IAC/BvZ,GAAaqB,QAAQ5B,KAAK4E,SAAUsV,GAAe,CACjDpa,iBACA,GAEkCE,KAAK4E,UAAU,GACvD,CACA,IAAAgL,GACO5P,KAAK2P,WAGQpP,GAAaqB,QAAQ5B,KAAK4E,SAAUuV,IACxCnY,mBAGdhC,KAAKwY,WAAW3C,aAChB7V,KAAK4E,SAASgW,OACd5a,KAAK2P,UAAW,EAChB3P,KAAK4E,SAASvJ,UAAU5E,IAAIsjB,IAC5B/Z,KAAKsY,UAAU1I,OAUf5P,KAAKmF,gBAToB,KACvBnF,KAAK4E,SAASvJ,UAAU1B,OAAOkgB,GAAmBE,IAClD/Z,KAAK4E,SAASzjB,gBAAgB,cAC9B6e,KAAK4E,SAASzjB,gBAAgB,QACzB6e,KAAK6E,QAAQpa,SAChB,IAAI0rB,IAAkB9jB,QAExBkO,GAAaqB,QAAQ5B,KAAK4E,SAAUyV,GAAe,GAEfra,KAAK4E,UAAU,IACvD,CACA,OAAAG,GACE/E,KAAKsY,UAAUvT,UACf/E,KAAKwY,WAAW3C,aAChBlR,MAAMI,SACR,CAGA,mBAAAwT,GACE,MASM5d,EAAYmG,QAAQd,KAAK6E,QAAQ+P,UACvC,OAAO,IAAIL,GAAS,CAClBJ,UA3HsB,qBA4HtBxZ,YACAyK,YAAY,EACZiP,YAAarU,KAAK4E,SAAS7f,WAC3BqvB,cAAezZ,EAfK,KACU,WAA1BqF,KAAK6E,QAAQ+P,SAIjB5U,KAAK4P,OAHHrP,GAAaqB,QAAQ5B,KAAK4E,SAAUwV,GAG3B,EAUgC,MAE/C,CACA,oBAAA3B,GACE,OAAO,IAAIlD,GAAU,CACnBF,YAAarV,KAAK4E,UAEtB,CACA,kBAAAiH,GACEtL,GAAac,GAAGrB,KAAK4E,SAAU4V,IAAuBpb,IA5IvC,WA6ITA,EAAMtiB,MAGNkjB,KAAK6E,QAAQmG,SACfhL,KAAK4P,OAGPrP,GAAaqB,QAAQ5B,KAAK4E,SAAUwV,IAAqB,GAE7D,CAGA,sBAAO3d,CAAgBqH,GACrB,OAAO9D,KAAKwH,MAAK,WACf,MAAMnd,EAAOswB,GAAUrV,oBAAoBtF,KAAM8D,GACjD,GAAsB,iBAAXA,EAAX,CAGA,QAAqB/K,IAAjB1O,EAAKyZ,IAAyBA,EAAOrC,WAAW,MAAmB,gBAAXqC,EAC1D,MAAM,IAAIU,UAAU,oBAAoBV,MAE1CzZ,EAAKyZ,GAAQ9D,KAJb,CAKF,GACF,EAOFO,GAAac,GAAGhc,SAAUk1B,GA7JK,gCA6J2C,SAAUnb,GAClF,MAAM7S,EAASsZ,GAAec,uBAAuB3G,MAIrD,GAHI,CAAC,IAAK,QAAQoB,SAASpB,KAAKiH,UAC9B7H,EAAMkD,iBAEJpH,GAAW8E,MACb,OAEFO,GAAae,IAAI/U,EAAQ8tB,IAAgB,KAEnC1f,GAAUqF,OACZA,KAAKyS,OACP,IAIF,MAAMgH,EAAc5T,GAAeC,QAAQkU,IACvCP,GAAeA,IAAgBltB,GACjCouB,GAAUtV,YAAYoU,GAAa7J,OAExB+K,GAAUrV,oBAAoB/Y,GACtCob,OAAO3H,KACd,IACAO,GAAac,GAAGzhB,OAAQg6B,IAAuB,KAC7C,IAAK,MAAM7f,KAAY8L,GAAe1T,KAAK6nB,IACzCW,GAAUrV,oBAAoBvL,GAAU8V,MAC1C,IAEFtP,GAAac,GAAGzhB,OAAQ06B,IAAc,KACpC,IAAK,MAAM/6B,KAAWsmB,GAAe1T,KAAK,gDACG,UAAvClN,iBAAiB1F,GAASiC,UAC5Bm5B,GAAUrV,oBAAoB/lB,GAASqwB,MAE3C,IAEF/I,GAAqB8T,IAMrBxe,GAAmBwe,IAUnB,MACME,GAAmB,CAEvB,IAAK,CAAC,QAAS,MAAO,KAAM,OAAQ,OAHP,kBAI7BhqB,EAAG,CAAC,SAAU,OAAQ,QAAS,OAC/BiqB,KAAM,GACNhqB,EAAG,GACHiqB,GAAI,GACJC,IAAK,GACLC,KAAM,GACNC,GAAI,GACJC,IAAK,GACLC,GAAI,GACJC,GAAI,GACJC,GAAI,GACJC,GAAI,GACJC,GAAI,GACJC,GAAI,GACJC,GAAI,GACJC,GAAI,GACJC,GAAI,GACJC,GAAI,GACJxqB,EAAG,GACH0b,IAAK,CAAC,MAAO,SAAU,MAAO,QAAS,QAAS,UAChD+O,GAAI,GACJC,GAAI,GACJC,EAAG,GACHC,IAAK,GACLC,EAAG,GACHC,MAAO,GACPC,KAAM,GACNC,IAAK,GACLC,IAAK,GACLC,OAAQ,GACRC,EAAG,GACHC,GAAI,IAIAC,GAAgB,IAAIpmB,IAAI,CAAC,aAAc,OAAQ,OAAQ,WAAY,WAAY,SAAU,MAAO,eAShGqmB,GAAmB,0DACnBC,GAAmB,CAAC76B,EAAW86B,KACnC,MAAMC,EAAgB/6B,EAAUvC,SAASC,cACzC,OAAIo9B,EAAqBzb,SAAS0b,IAC5BJ,GAAc/lB,IAAImmB,IACbhc,QAAQ6b,GAAiBt5B,KAAKtB,EAAUg7B,YAM5CF,EAAqB12B,QAAO62B,GAAkBA,aAA0BzY,SAAQ9R,MAAKwqB,GAASA,EAAM55B,KAAKy5B,IAAe,EA0C3HI,GAAY,CAChBC,UAAWtC,GACXuC,QAAS,CAAC,EAEVC,WAAY,GACZxwB,MAAM,EACNywB,UAAU,EACVC,WAAY,KACZC,SAAU,eAENC,GAAgB,CACpBN,UAAW,SACXC,QAAS,SACTC,WAAY,oBACZxwB,KAAM,UACNywB,SAAU,UACVC,WAAY,kBACZC,SAAU,UAENE,GAAqB,CACzBC,MAAO,iCACP5jB,SAAU,oBAOZ,MAAM6jB,WAAwBna,GAC5B,WAAAU,CAAYL,GACVa,QACA3E,KAAK6E,QAAU7E,KAAK6D,WAAWC,EACjC,CAGA,kBAAWJ,GACT,OAAOwZ,EACT,CACA,sBAAWvZ,GACT,OAAO8Z,EACT,CACA,eAAWlhB,GACT,MA3CW,iBA4Cb,CAGA,UAAAshB,GACE,OAAO7gC,OAAOmiB,OAAOa,KAAK6E,QAAQuY,SAASt6B,KAAIghB,GAAU9D,KAAK8d,yBAAyBha,KAAS3d,OAAO2a,QACzG,CACA,UAAAid,GACE,OAAO/d,KAAK6d,aAAantB,OAAS,CACpC,CACA,aAAAstB,CAAcZ,GAMZ,OALApd,KAAKie,cAAcb,GACnBpd,KAAK6E,QAAQuY,QAAU,IAClBpd,KAAK6E,QAAQuY,WACbA,GAEEpd,IACT,CACA,MAAAke,GACE,MAAMC,EAAkB94B,SAASwvB,cAAc,OAC/CsJ,EAAgBC,UAAYpe,KAAKqe,eAAere,KAAK6E,QAAQ2Y,UAC7D,IAAK,MAAOzjB,EAAUukB,KAASthC,OAAOmkB,QAAQnB,KAAK6E,QAAQuY,SACzDpd,KAAKue,YAAYJ,EAAiBG,EAAMvkB,GAE1C,MAAMyjB,EAAWW,EAAgBpY,SAAS,GACpCsX,EAAard,KAAK8d,yBAAyB9d,KAAK6E,QAAQwY,YAI9D,OAHIA,GACFG,EAASniB,UAAU5E,OAAO4mB,EAAWn7B,MAAM,MAEtCs7B,CACT,CAGA,gBAAAvZ,CAAiBH,GACfa,MAAMV,iBAAiBH,GACvB9D,KAAKie,cAAcna,EAAOsZ,QAC5B,CACA,aAAAa,CAAcO,GACZ,IAAK,MAAOzkB,EAAUqjB,KAAYpgC,OAAOmkB,QAAQqd,GAC/C7Z,MAAMV,iBAAiB,CACrBlK,WACA4jB,MAAOP,GACNM,GAEP,CACA,WAAAa,CAAYf,EAAUJ,EAASrjB,GAC7B,MAAM0kB,EAAkB5Y,GAAeC,QAAQ/L,EAAUyjB,GACpDiB,KAGLrB,EAAUpd,KAAK8d,yBAAyBV,IAKpC,GAAUA,GACZpd,KAAK0e,sBAAsBhkB,GAAW0iB,GAAUqB,GAG9Cze,KAAK6E,QAAQhY,KACf4xB,EAAgBL,UAAYpe,KAAKqe,eAAejB,GAGlDqB,EAAgBE,YAAcvB,EAX5BqB,EAAgB9kB,SAYpB,CACA,cAAA0kB,CAAeG,GACb,OAAOxe,KAAK6E,QAAQyY,SApJxB,SAAsBsB,EAAYzB,EAAW0B,GAC3C,IAAKD,EAAWluB,OACd,OAAOkuB,EAET,GAAIC,GAAgD,mBAArBA,EAC7B,OAAOA,EAAiBD,GAE1B,MACME,GADY,IAAIl/B,OAAOm/B,WACKC,gBAAgBJ,EAAY,aACxD/9B,EAAW,GAAGlC,UAAUmgC,EAAgB5yB,KAAKkU,iBAAiB,MACpE,IAAK,MAAM7gB,KAAWsB,EAAU,CAC9B,MAAMo+B,EAAc1/B,EAAQC,SAASC,cACrC,IAAKzC,OAAO4D,KAAKu8B,GAAW/b,SAAS6d,GAAc,CACjD1/B,EAAQoa,SACR,QACF,CACA,MAAMulB,EAAgB,GAAGvgC,UAAUY,EAAQ0B,YACrCk+B,EAAoB,GAAGxgC,OAAOw+B,EAAU,MAAQ,GAAIA,EAAU8B,IAAgB,IACpF,IAAK,MAAMl9B,KAAam9B,EACjBtC,GAAiB76B,EAAWo9B,IAC/B5/B,EAAQ4B,gBAAgBY,EAAUvC,SAGxC,CACA,OAAOs/B,EAAgB5yB,KAAKkyB,SAC9B,CA2HmCgB,CAAaZ,EAAKxe,KAAK6E,QAAQsY,UAAWnd,KAAK6E,QAAQ0Y,YAAciB,CACtG,CACA,wBAAAV,CAAyBU,GACvB,OAAO3hB,GAAQ2hB,EAAK,CAACxe,MACvB,CACA,qBAAA0e,CAAsBn/B,EAASk/B,GAC7B,GAAIze,KAAK6E,QAAQhY,KAGf,OAFA4xB,EAAgBL,UAAY,QAC5BK,EAAgB3J,OAAOv1B,GAGzBk/B,EAAgBE,YAAcp/B,EAAQo/B,WACxC,EAeF,MACMU,GAAwB,IAAI/oB,IAAI,CAAC,WAAY,YAAa,eAC1DgpB,GAAoB,OAEpBC,GAAoB,OACpBC,GAAyB,iBACzBC,GAAiB,SACjBC,GAAmB,gBACnBC,GAAgB,QAChBC,GAAgB,QAahBC,GAAgB,CACpBC,KAAM,OACNC,IAAK,MACLC,MAAO/jB,KAAU,OAAS,QAC1BgkB,OAAQ,SACRC,KAAMjkB,KAAU,QAAU,QAEtBkkB,GAAY,CAChBhD,UAAWtC,GACXuF,WAAW,EACXnyB,SAAU,kBACVoyB,WAAW,EACXC,YAAa,GACbC,MAAO,EACPvwB,mBAAoB,CAAC,MAAO,QAAS,SAAU,QAC/CnD,MAAM,EACN7E,OAAQ,CAAC,EAAG,GACZtJ,UAAW,MACXszB,aAAc,KACdsL,UAAU,EACVC,WAAY,KACZxjB,UAAU,EACVyjB,SAAU,+GACVgD,MAAO,GACP5e,QAAS,eAEL6e,GAAgB,CACpBtD,UAAW,SACXiD,UAAW,UACXnyB,SAAU,mBACVoyB,UAAW,2BACXC,YAAa,oBACbC,MAAO,kBACPvwB,mBAAoB,QACpBnD,KAAM,UACN7E,OAAQ,0BACRtJ,UAAW,oBACXszB,aAAc,yBACdsL,SAAU,UACVC,WAAY,kBACZxjB,SAAU,mBACVyjB,SAAU,SACVgD,MAAO,4BACP5e,QAAS,UAOX,MAAM8e,WAAgBhc,GACpB,WAAAP,CAAY5kB,EAASukB,GACnB,QAAsB,IAAX,EACT,MAAM,IAAIU,UAAU,+DAEtBG,MAAMplB,EAASukB,GAGf9D,KAAK2gB,YAAa,EAClB3gB,KAAK4gB,SAAW,EAChB5gB,KAAK6gB,WAAa,KAClB7gB,KAAK8gB,eAAiB,CAAC,EACvB9gB,KAAKmS,QAAU,KACfnS,KAAK+gB,iBAAmB,KACxB/gB,KAAKghB,YAAc,KAGnBhhB,KAAKihB,IAAM,KACXjhB,KAAKkhB,gBACAlhB,KAAK6E,QAAQ9K,UAChBiG,KAAKmhB,WAET,CAGA,kBAAWzd,GACT,OAAOyc,EACT,CACA,sBAAWxc,GACT,OAAO8c,EACT,CACA,eAAWlkB,GACT,MAxGW,SAyGb,CAGA,MAAA6kB,GACEphB,KAAK2gB,YAAa,CACpB,CACA,OAAAU,GACErhB,KAAK2gB,YAAa,CACpB,CACA,aAAAW,GACEthB,KAAK2gB,YAAc3gB,KAAK2gB,UAC1B,CACA,MAAAhZ,GACO3H,KAAK2gB,aAGV3gB,KAAK8gB,eAAeS,OAASvhB,KAAK8gB,eAAeS,MAC7CvhB,KAAK2P,WACP3P,KAAKwhB,SAGPxhB,KAAKyhB,SACP,CACA,OAAA1c,GACEmI,aAAalN,KAAK4gB,UAClBrgB,GAAaC,IAAIR,KAAK4E,SAAS5J,QAAQykB,IAAiBC,GAAkB1f,KAAK0hB,mBAC3E1hB,KAAK4E,SAASpJ,aAAa,2BAC7BwE,KAAK4E,SAASxjB,aAAa,QAAS4e,KAAK4E,SAASpJ,aAAa,2BAEjEwE,KAAK2hB,iBACLhd,MAAMI,SACR,CACA,IAAA8K,GACE,GAAoC,SAAhC7P,KAAK4E,SAAS7jB,MAAMgxB,QACtB,MAAM,IAAInO,MAAM,uCAElB,IAAM5D,KAAK4hB,mBAAoB5hB,KAAK2gB,WAClC,OAEF,MAAMnH,EAAYjZ,GAAaqB,QAAQ5B,KAAK4E,SAAU5E,KAAKmE,YAAYqB,UAlItD,SAoIXqc,GADapmB,GAAeuE,KAAK4E,WACL5E,KAAK4E,SAAS9kB,cAAcwF,iBAAiBd,SAASwb,KAAK4E,UAC7F,GAAI4U,EAAUxX,mBAAqB6f,EACjC,OAIF7hB,KAAK2hB,iBACL,MAAMV,EAAMjhB,KAAK8hB,iBACjB9hB,KAAK4E,SAASxjB,aAAa,mBAAoB6/B,EAAIzlB,aAAa,OAChE,MAAM,UACJ6kB,GACErgB,KAAK6E,QAYT,GAXK7E,KAAK4E,SAAS9kB,cAAcwF,gBAAgBd,SAASwb,KAAKihB,OAC7DZ,EAAUvL,OAAOmM,GACjB1gB,GAAaqB,QAAQ5B,KAAK4E,SAAU5E,KAAKmE,YAAYqB,UAhJpC,cAkJnBxF,KAAKmS,QAAUnS,KAAKwS,cAAcyO,GAClCA,EAAI5lB,UAAU5E,IAAI8oB,IAMd,iBAAkBl6B,SAASC,gBAC7B,IAAK,MAAM/F,IAAW,GAAGZ,UAAU0G,SAAS6G,KAAK6Z,UAC/CxF,GAAac,GAAG9hB,EAAS,YAAaqc,IAU1CoE,KAAKmF,gBAPY,KACf5E,GAAaqB,QAAQ5B,KAAK4E,SAAU5E,KAAKmE,YAAYqB,UAhKrC,WAiKQ,IAApBxF,KAAK6gB,YACP7gB,KAAKwhB,SAEPxhB,KAAK6gB,YAAa,CAAK,GAEK7gB,KAAKihB,IAAKjhB,KAAKgO,cAC/C,CACA,IAAA4B,GACE,GAAK5P,KAAK2P,aAGQpP,GAAaqB,QAAQ5B,KAAK4E,SAAU5E,KAAKmE,YAAYqB,UA/KtD,SAgLHxD,iBAAd,CAQA,GALYhC,KAAK8hB,iBACbzmB,UAAU1B,OAAO4lB,IAIjB,iBAAkBl6B,SAASC,gBAC7B,IAAK,MAAM/F,IAAW,GAAGZ,UAAU0G,SAAS6G,KAAK6Z,UAC/CxF,GAAaC,IAAIjhB,EAAS,YAAaqc,IAG3CoE,KAAK8gB,eAA4B,OAAI,EACrC9gB,KAAK8gB,eAAelB,KAAiB,EACrC5f,KAAK8gB,eAAenB,KAAiB,EACrC3f,KAAK6gB,WAAa,KAYlB7gB,KAAKmF,gBAVY,KACXnF,KAAK+hB,yBAGJ/hB,KAAK6gB,YACR7gB,KAAK2hB,iBAEP3hB,KAAK4E,SAASzjB,gBAAgB,oBAC9Bof,GAAaqB,QAAQ5B,KAAK4E,SAAU5E,KAAKmE,YAAYqB,UAzMpC,WAyM8D,GAEnDxF,KAAKihB,IAAKjhB,KAAKgO,cA1B7C,CA2BF,CACA,MAAAjjB,GACMiV,KAAKmS,SACPnS,KAAKmS,QAAQpnB,QAEjB,CAGA,cAAA62B,GACE,OAAO9gB,QAAQd,KAAKgiB,YACtB,CACA,cAAAF,GAIE,OAHK9hB,KAAKihB,MACRjhB,KAAKihB,IAAMjhB,KAAKiiB,kBAAkBjiB,KAAKghB,aAAehhB,KAAKkiB,2BAEtDliB,KAAKihB,GACd,CACA,iBAAAgB,CAAkB7E,GAChB,MAAM6D,EAAMjhB,KAAKmiB,oBAAoB/E,GAASc,SAG9C,IAAK+C,EACH,OAAO,KAETA,EAAI5lB,UAAU1B,OAAO2lB,GAAmBC,IAExC0B,EAAI5lB,UAAU5E,IAAI,MAAMuJ,KAAKmE,YAAY5H,aACzC,MAAM6lB,EAvuGKC,KACb,GACEA,GAAUlgC,KAAKmgC,MA/BH,IA+BSngC,KAAKogC,gBACnBl9B,SAASm9B,eAAeH,IACjC,OAAOA,CAAM,EAmuGGI,CAAOziB,KAAKmE,YAAY5H,MAAM1c,WAK5C,OAJAohC,EAAI7/B,aAAa,KAAMghC,GACnBpiB,KAAKgO,eACPiT,EAAI5lB,UAAU5E,IAAI6oB,IAEb2B,CACT,CACA,UAAAyB,CAAWtF,GACTpd,KAAKghB,YAAc5D,EACfpd,KAAK2P,aACP3P,KAAK2hB,iBACL3hB,KAAK6P,OAET,CACA,mBAAAsS,CAAoB/E,GAYlB,OAXIpd,KAAK+gB,iBACP/gB,KAAK+gB,iBAAiB/C,cAAcZ,GAEpCpd,KAAK+gB,iBAAmB,IAAInD,GAAgB,IACvC5d,KAAK6E,QAGRuY,UACAC,WAAYrd,KAAK8d,yBAAyB9d,KAAK6E,QAAQyb,eAGpDtgB,KAAK+gB,gBACd,CACA,sBAAAmB,GACE,MAAO,CACL,CAAC1C,IAAyBxf,KAAKgiB,YAEnC,CACA,SAAAA,GACE,OAAOhiB,KAAK8d,yBAAyB9d,KAAK6E,QAAQ2b,QAAUxgB,KAAK4E,SAASpJ,aAAa,yBACzF,CAGA,4BAAAmnB,CAA6BvjB,GAC3B,OAAOY,KAAKmE,YAAYmB,oBAAoBlG,EAAMW,eAAgBC,KAAK4iB,qBACzE,CACA,WAAA5U,GACE,OAAOhO,KAAK6E,QAAQub,WAAapgB,KAAKihB,KAAOjhB,KAAKihB,IAAI5lB,UAAU7W,SAAS86B,GAC3E,CACA,QAAA3P,GACE,OAAO3P,KAAKihB,KAAOjhB,KAAKihB,IAAI5lB,UAAU7W,SAAS+6B,GACjD,CACA,aAAA/M,CAAcyO,GACZ,MAAMviC,EAAYme,GAAQmD,KAAK6E,QAAQnmB,UAAW,CAACshB,KAAMihB,EAAKjhB,KAAK4E,WAC7Die,EAAahD,GAAcnhC,EAAU+lB,eAC3C,OAAO,GAAoBzE,KAAK4E,SAAUqc,EAAKjhB,KAAK4S,iBAAiBiQ,GACvE,CACA,UAAA7P,GACE,MAAM,OACJhrB,GACEgY,KAAK6E,QACT,MAAsB,iBAAX7c,EACFA,EAAO9F,MAAM,KAAKY,KAAInF,GAAS4f,OAAOgQ,SAAS5vB,EAAO,MAEzC,mBAAXqK,EACFirB,GAAcjrB,EAAOirB,EAAYjT,KAAK4E,UAExC5c,CACT,CACA,wBAAA81B,CAAyBU,GACvB,OAAO3hB,GAAQ2hB,EAAK,CAACxe,KAAK4E,UAC5B,CACA,gBAAAgO,CAAiBiQ,GACf,MAAM3P,EAAwB,CAC5Bx0B,UAAWmkC,EACXzsB,UAAW,CAAC,CACV9V,KAAM,OACNmB,QAAS,CACPuO,mBAAoBgQ,KAAK6E,QAAQ7U,qBAElC,CACD1P,KAAM,SACNmB,QAAS,CACPuG,OAAQgY,KAAKgT,eAEd,CACD1yB,KAAM,kBACNmB,QAAS,CACPwM,SAAU+R,KAAK6E,QAAQ5W,WAExB,CACD3N,KAAM,QACNmB,QAAS,CACPlC,QAAS,IAAIygB,KAAKmE,YAAY5H,eAE/B,CACDjc,KAAM,kBACNC,SAAS,EACTC,MAAO,aACPC,GAAI4J,IAGF2V,KAAK8hB,iBAAiB1gC,aAAa,wBAAyBiJ,EAAK1J,MAAMjC,UAAU,KAIvF,MAAO,IACFw0B,KACArW,GAAQmD,KAAK6E,QAAQmN,aAAc,CAACkB,IAE3C,CACA,aAAAgO,GACE,MAAM4B,EAAW9iB,KAAK6E,QAAQjD,QAAQ1f,MAAM,KAC5C,IAAK,MAAM0f,KAAWkhB,EACpB,GAAgB,UAAZlhB,EACFrB,GAAac,GAAGrB,KAAK4E,SAAU5E,KAAKmE,YAAYqB,UAjVlC,SAiV4DxF,KAAK6E,QAAQ9K,UAAUqF,IAC/EY,KAAK2iB,6BAA6BvjB,GAC1CuI,QAAQ,SAEb,GA3VU,WA2VN/F,EAA4B,CACrC,MAAMmhB,EAAUnhB,IAAY+d,GAAgB3f,KAAKmE,YAAYqB,UAnV5C,cAmV0ExF,KAAKmE,YAAYqB,UArV5F,WAsVVwd,EAAWphB,IAAY+d,GAAgB3f,KAAKmE,YAAYqB,UAnV7C,cAmV2ExF,KAAKmE,YAAYqB,UArV5F,YAsVjBjF,GAAac,GAAGrB,KAAK4E,SAAUme,EAAS/iB,KAAK6E,QAAQ9K,UAAUqF,IAC7D,MAAMkU,EAAUtT,KAAK2iB,6BAA6BvjB,GAClDkU,EAAQwN,eAA8B,YAAf1hB,EAAMqB,KAAqBmf,GAAgBD,KAAiB,EACnFrM,EAAQmO,QAAQ,IAElBlhB,GAAac,GAAGrB,KAAK4E,SAAUoe,EAAUhjB,KAAK6E,QAAQ9K,UAAUqF,IAC9D,MAAMkU,EAAUtT,KAAK2iB,6BAA6BvjB,GAClDkU,EAAQwN,eAA8B,aAAf1hB,EAAMqB,KAAsBmf,GAAgBD,IAAiBrM,EAAQ1O,SAASpgB,SAAS4a,EAAMU,eACpHwT,EAAQkO,QAAQ,GAEpB,CAEFxhB,KAAK0hB,kBAAoB,KACnB1hB,KAAK4E,UACP5E,KAAK4P,MACP,EAEFrP,GAAac,GAAGrB,KAAK4E,SAAS5J,QAAQykB,IAAiBC,GAAkB1f,KAAK0hB,kBAChF,CACA,SAAAP,GACE,MAAMX,EAAQxgB,KAAK4E,SAASpJ,aAAa,SACpCglB,IAGAxgB,KAAK4E,SAASpJ,aAAa,eAAkBwE,KAAK4E,SAAS+Z,YAAYhZ,QAC1E3F,KAAK4E,SAASxjB,aAAa,aAAco/B,GAE3CxgB,KAAK4E,SAASxjB,aAAa,yBAA0Bo/B,GACrDxgB,KAAK4E,SAASzjB,gBAAgB,SAChC,CACA,MAAAsgC,GACMzhB,KAAK2P,YAAc3P,KAAK6gB,WAC1B7gB,KAAK6gB,YAAa,GAGpB7gB,KAAK6gB,YAAa,EAClB7gB,KAAKijB,aAAY,KACXjjB,KAAK6gB,YACP7gB,KAAK6P,MACP,GACC7P,KAAK6E,QAAQ0b,MAAM1Q,MACxB,CACA,MAAA2R,GACMxhB,KAAK+hB,yBAGT/hB,KAAK6gB,YAAa,EAClB7gB,KAAKijB,aAAY,KACVjjB,KAAK6gB,YACR7gB,KAAK4P,MACP,GACC5P,KAAK6E,QAAQ0b,MAAM3Q,MACxB,CACA,WAAAqT,CAAYrlB,EAASslB,GACnBhW,aAAalN,KAAK4gB,UAClB5gB,KAAK4gB,SAAW/iB,WAAWD,EAASslB,EACtC,CACA,oBAAAnB,GACE,OAAO/kC,OAAOmiB,OAAOa,KAAK8gB,gBAAgB1f,UAAS,EACrD,CACA,UAAAyC,CAAWC,GACT,MAAMqf,EAAiBngB,GAAYG,kBAAkBnD,KAAK4E,UAC1D,IAAK,MAAMwe,KAAiBpmC,OAAO4D,KAAKuiC,GAClC9D,GAAsB1oB,IAAIysB,WACrBD,EAAeC,GAU1B,OAPAtf,EAAS,IACJqf,KACmB,iBAAXrf,GAAuBA,EAASA,EAAS,CAAC,GAEvDA,EAAS9D,KAAK+D,gBAAgBD,GAC9BA,EAAS9D,KAAKgE,kBAAkBF,GAChC9D,KAAKiE,iBAAiBH,GACfA,CACT,CACA,iBAAAE,CAAkBF,GAchB,OAbAA,EAAOuc,WAAiC,IAArBvc,EAAOuc,UAAsBh7B,SAAS6G,KAAOwO,GAAWoJ,EAAOuc,WACtD,iBAAjBvc,EAAOyc,QAChBzc,EAAOyc,MAAQ,CACb1Q,KAAM/L,EAAOyc,MACb3Q,KAAM9L,EAAOyc,QAGW,iBAAjBzc,EAAO0c,QAChB1c,EAAO0c,MAAQ1c,EAAO0c,MAAM3gC,YAEA,iBAAnBikB,EAAOsZ,UAChBtZ,EAAOsZ,QAAUtZ,EAAOsZ,QAAQv9B,YAE3BikB,CACT,CACA,kBAAA8e,GACE,MAAM9e,EAAS,CAAC,EAChB,IAAK,MAAOhnB,EAAKa,KAAUX,OAAOmkB,QAAQnB,KAAK6E,SACzC7E,KAAKmE,YAAYT,QAAQ5mB,KAASa,IACpCmmB,EAAOhnB,GAAOa,GASlB,OANAmmB,EAAO/J,UAAW,EAClB+J,EAAOlC,QAAU,SAKVkC,CACT,CACA,cAAA6d,GACM3hB,KAAKmS,UACPnS,KAAKmS,QAAQnZ,UACbgH,KAAKmS,QAAU,MAEbnS,KAAKihB,MACPjhB,KAAKihB,IAAItnB,SACTqG,KAAKihB,IAAM,KAEf,CAGA,sBAAOxkB,CAAgBqH,GACrB,OAAO9D,KAAKwH,MAAK,WACf,MAAMnd,EAAOq2B,GAAQpb,oBAAoBtF,KAAM8D,GAC/C,GAAsB,iBAAXA,EAAX,CAGA,QAA4B,IAAjBzZ,EAAKyZ,GACd,MAAM,IAAIU,UAAU,oBAAoBV,MAE1CzZ,EAAKyZ,IAJL,CAKF,GACF,EAOF3H,GAAmBukB,IAcnB,MACM2C,GAAiB,kBACjBC,GAAmB,gBACnBC,GAAY,IACb7C,GAAQhd,QACX0Z,QAAS,GACTp1B,OAAQ,CAAC,EAAG,GACZtJ,UAAW,QACX8+B,SAAU,8IACV5b,QAAS,SAEL4hB,GAAgB,IACjB9C,GAAQ/c,YACXyZ,QAAS,kCAOX,MAAMqG,WAAgB/C,GAEpB,kBAAWhd,GACT,OAAO6f,EACT,CACA,sBAAW5f,GACT,OAAO6f,EACT,CACA,eAAWjnB,GACT,MA7BW,SA8Bb,CAGA,cAAAqlB,GACE,OAAO5hB,KAAKgiB,aAAehiB,KAAK0jB,aAClC,CAGA,sBAAAxB,GACE,MAAO,CACL,CAACmB,IAAiBrjB,KAAKgiB,YACvB,CAACsB,IAAmBtjB,KAAK0jB,cAE7B,CACA,WAAAA,GACE,OAAO1jB,KAAK8d,yBAAyB9d,KAAK6E,QAAQuY,QACpD,CAGA,sBAAO3gB,CAAgBqH,GACrB,OAAO9D,KAAKwH,MAAK,WACf,MAAMnd,EAAOo5B,GAAQne,oBAAoBtF,KAAM8D,GAC/C,GAAsB,iBAAXA,EAAX,CAGA,QAA4B,IAAjBzZ,EAAKyZ,GACd,MAAM,IAAIU,UAAU,oBAAoBV,MAE1CzZ,EAAKyZ,IAJL,CAKF,GACF,EAOF3H,GAAmBsnB,IAcnB,MAEME,GAAc,gBAEdC,GAAiB,WAAWD,KAC5BE,GAAc,QAAQF,KACtBG,GAAwB,OAAOH,cAE/BI,GAAsB,SAEtBC,GAAwB,SAExBC,GAAqB,YAGrBC,GAAsB,GAAGD,mBAA+CA,uBAGxEE,GAAY,CAChBn8B,OAAQ,KAERo8B,WAAY,eACZC,cAAc,EACd93B,OAAQ,KACR+3B,UAAW,CAAC,GAAK,GAAK,IAElBC,GAAgB,CACpBv8B,OAAQ,gBAERo8B,WAAY,SACZC,aAAc,UACd93B,OAAQ,UACR+3B,UAAW,SAOb,MAAME,WAAkB9f,GACtB,WAAAP,CAAY5kB,EAASukB,GACnBa,MAAMplB,EAASukB,GAGf9D,KAAKykB,aAAe,IAAIvzB,IACxB8O,KAAK0kB,oBAAsB,IAAIxzB,IAC/B8O,KAAK2kB,aAA6D,YAA9C1/B,iBAAiB+a,KAAK4E,UAAU5Y,UAA0B,KAAOgU,KAAK4E,SAC1F5E,KAAK4kB,cAAgB,KACrB5kB,KAAK6kB,UAAY,KACjB7kB,KAAK8kB,oBAAsB,CACzBC,gBAAiB,EACjBC,gBAAiB,GAEnBhlB,KAAKilB,SACP,CAGA,kBAAWvhB,GACT,OAAOygB,EACT,CACA,sBAAWxgB,GACT,OAAO4gB,EACT,CACA,eAAWhoB,GACT,MAhEW,WAiEb,CAGA,OAAA0oB,GACEjlB,KAAKklB,mCACLllB,KAAKmlB,2BACDnlB,KAAK6kB,UACP7kB,KAAK6kB,UAAUO,aAEfplB,KAAK6kB,UAAY7kB,KAAKqlB,kBAExB,IAAK,MAAMC,KAAWtlB,KAAK0kB,oBAAoBvlB,SAC7Ca,KAAK6kB,UAAUU,QAAQD,EAE3B,CACA,OAAAvgB,GACE/E,KAAK6kB,UAAUO,aACfzgB,MAAMI,SACR,CAGA,iBAAAf,CAAkBF,GAShB,OAPAA,EAAOvX,OAASmO,GAAWoJ,EAAOvX,SAAWlH,SAAS6G,KAGtD4X,EAAOsgB,WAAatgB,EAAO9b,OAAS,GAAG8b,EAAO9b,oBAAsB8b,EAAOsgB,WAC3C,iBAArBtgB,EAAOwgB,YAChBxgB,EAAOwgB,UAAYxgB,EAAOwgB,UAAUpiC,MAAM,KAAKY,KAAInF,GAAS4f,OAAOC,WAAW7f,MAEzEmmB,CACT,CACA,wBAAAqhB,GACOnlB,KAAK6E,QAAQwf,eAKlB9jB,GAAaC,IAAIR,KAAK6E,QAAQtY,OAAQs3B,IACtCtjB,GAAac,GAAGrB,KAAK6E,QAAQtY,OAAQs3B,GAAaG,IAAuB5kB,IACvE,MAAMomB,EAAoBxlB,KAAK0kB,oBAAoBvnC,IAAIiiB,EAAM7S,OAAOtB,MACpE,GAAIu6B,EAAmB,CACrBpmB,EAAMkD,iBACN,MAAM3G,EAAOqE,KAAK2kB,cAAgB/kC,OAC5BmE,EAASyhC,EAAkBnhC,UAAY2b,KAAK4E,SAASvgB,UAC3D,GAAIsX,EAAK8pB,SAKP,YAJA9pB,EAAK8pB,SAAS,CACZ9jC,IAAKoC,EACL2hC,SAAU,WAMd/pB,EAAKlQ,UAAY1H,CACnB,KAEJ,CACA,eAAAshC,GACE,MAAM5jC,EAAU,CACdka,KAAMqE,KAAK2kB,aACXL,UAAWtkB,KAAK6E,QAAQyf,UACxBF,WAAYpkB,KAAK6E,QAAQuf,YAE3B,OAAO,IAAIuB,sBAAqBxkB,GAAWnB,KAAK4lB,kBAAkBzkB,IAAU1f,EAC9E,CAGA,iBAAAmkC,CAAkBzkB,GAChB,MAAM0kB,EAAgBlI,GAAS3d,KAAKykB,aAAatnC,IAAI,IAAIwgC,EAAMpxB,OAAO4N,MAChEub,EAAWiI,IACf3d,KAAK8kB,oBAAoBC,gBAAkBpH,EAAMpxB,OAAOlI,UACxD2b,KAAK8lB,SAASD,EAAclI,GAAO,EAE/BqH,GAAmBhlB,KAAK2kB,cAAgBt/B,SAASC,iBAAiBmG,UAClEs6B,EAAkBf,GAAmBhlB,KAAK8kB,oBAAoBE,gBACpEhlB,KAAK8kB,oBAAoBE,gBAAkBA,EAC3C,IAAK,MAAMrH,KAASxc,EAAS,CAC3B,IAAKwc,EAAMqI,eAAgB,CACzBhmB,KAAK4kB,cAAgB,KACrB5kB,KAAKimB,kBAAkBJ,EAAclI,IACrC,QACF,CACA,MAAMuI,EAA2BvI,EAAMpxB,OAAOlI,WAAa2b,KAAK8kB,oBAAoBC,gBAEpF,GAAIgB,GAAmBG,GAGrB,GAFAxQ,EAASiI,IAEJqH,EACH,YAMCe,GAAoBG,GACvBxQ,EAASiI,EAEb,CACF,CACA,gCAAAuH,GACEllB,KAAKykB,aAAe,IAAIvzB,IACxB8O,KAAK0kB,oBAAsB,IAAIxzB,IAC/B,MAAMi1B,EAActgB,GAAe1T,KAAK6xB,GAAuBhkB,KAAK6E,QAAQtY,QAC5E,IAAK,MAAM65B,KAAUD,EAAa,CAEhC,IAAKC,EAAOn7B,MAAQiQ,GAAWkrB,GAC7B,SAEF,MAAMZ,EAAoB3f,GAAeC,QAAQugB,UAAUD,EAAOn7B,MAAO+U,KAAK4E,UAG1EjK,GAAU6qB,KACZxlB,KAAKykB,aAAa1yB,IAAIs0B,UAAUD,EAAOn7B,MAAOm7B,GAC9CpmB,KAAK0kB,oBAAoB3yB,IAAIq0B,EAAOn7B,KAAMu6B,GAE9C,CACF,CACA,QAAAM,CAASv5B,GACHyT,KAAK4kB,gBAAkBr4B,IAG3ByT,KAAKimB,kBAAkBjmB,KAAK6E,QAAQtY,QACpCyT,KAAK4kB,cAAgBr4B,EACrBA,EAAO8O,UAAU5E,IAAIstB,IACrB/jB,KAAKsmB,iBAAiB/5B,GACtBgU,GAAaqB,QAAQ5B,KAAK4E,SAAUgf,GAAgB,CAClD9jB,cAAevT,IAEnB,CACA,gBAAA+5B,CAAiB/5B,GAEf,GAAIA,EAAO8O,UAAU7W,SA9LQ,iBA+L3BqhB,GAAeC,QArLc,mBAqLsBvZ,EAAOyO,QAtLtC,cAsLkEK,UAAU5E,IAAIstB,SAGtG,IAAK,MAAMwC,KAAa1gB,GAAeI,QAAQ1Z,EA9LnB,qBAiM1B,IAAK,MAAMxJ,KAAQ8iB,GAAeM,KAAKogB,EAAWrC,IAChDnhC,EAAKsY,UAAU5E,IAAIstB,GAGzB,CACA,iBAAAkC,CAAkBxhC,GAChBA,EAAO4W,UAAU1B,OAAOoqB,IACxB,MAAMyC,EAAc3gB,GAAe1T,KAAK,GAAG6xB,MAAyBD,KAAuBt/B,GAC3F,IAAK,MAAM9E,KAAQ6mC,EACjB7mC,EAAK0b,UAAU1B,OAAOoqB,GAE1B,CAGA,sBAAOtnB,CAAgBqH,GACrB,OAAO9D,KAAKwH,MAAK,WACf,MAAMnd,EAAOm6B,GAAUlf,oBAAoBtF,KAAM8D,GACjD,GAAsB,iBAAXA,EAAX,CAGA,QAAqB/K,IAAjB1O,EAAKyZ,IAAyBA,EAAOrC,WAAW,MAAmB,gBAAXqC,EAC1D,MAAM,IAAIU,UAAU,oBAAoBV,MAE1CzZ,EAAKyZ,IAJL,CAKF,GACF,EAOFvD,GAAac,GAAGzhB,OAAQkkC,IAAuB,KAC7C,IAAK,MAAM2C,KAAO5gB,GAAe1T,KApOT,0BAqOtBqyB,GAAUlf,oBAAoBmhB,EAChC,IAOFtqB,GAAmBqoB,IAcnB,MAEMkC,GAAc,UACdC,GAAe,OAAOD,KACtBE,GAAiB,SAASF,KAC1BG,GAAe,OAAOH,KACtBI,GAAgB,QAAQJ,KACxBK,GAAuB,QAAQL,KAC/BM,GAAgB,UAAUN,KAC1BO,GAAsB,OAAOP,KAC7BQ,GAAiB,YACjBC,GAAkB,aAClBC,GAAe,UACfC,GAAiB,YACjBC,GAAW,OACXC,GAAU,MACVC,GAAoB,SACpBC,GAAoB,OACpBC,GAAoB,OAEpBC,GAA2B,mBAE3BC,GAA+B,QAAQD,MAIvCE,GAAuB,2EACvBC,GAAsB,YAFOF,uBAAiDA,mBAA6CA,OAE/EC,KAC5CE,GAA8B,IAAIP,8BAA6CA,+BAA8CA,4BAMnI,MAAMQ,WAAYtjB,GAChB,WAAAP,CAAY5kB,GACVolB,MAAMplB,GACNygB,KAAKoS,QAAUpS,KAAK4E,SAAS5J,QAdN,uCAelBgF,KAAKoS,UAOVpS,KAAKioB,sBAAsBjoB,KAAKoS,QAASpS,KAAKkoB,gBAC9C3nB,GAAac,GAAGrB,KAAK4E,SAAUoiB,IAAe5nB,GAASY,KAAK6M,SAASzN,KACvE,CAGA,eAAW7C,GACT,MAnDW,KAoDb,CAGA,IAAAsT,GAEE,MAAMsY,EAAYnoB,KAAK4E,SACvB,GAAI5E,KAAKooB,cAAcD,GACrB,OAIF,MAAME,EAASroB,KAAKsoB,iBACdC,EAAYF,EAAS9nB,GAAaqB,QAAQymB,EAAQ1B,GAAc,CACpE7mB,cAAeqoB,IACZ,KACa5nB,GAAaqB,QAAQumB,EAAWtB,GAAc,CAC9D/mB,cAAeuoB,IAEHrmB,kBAAoBumB,GAAaA,EAAUvmB,mBAGzDhC,KAAKwoB,YAAYH,EAAQF,GACzBnoB,KAAKyoB,UAAUN,EAAWE,GAC5B,CAGA,SAAAI,CAAUlpC,EAASmpC,GACZnpC,IAGLA,EAAQ8b,UAAU5E,IAAI+wB,IACtBxnB,KAAKyoB,UAAU5iB,GAAec,uBAAuBpnB,IAcrDygB,KAAKmF,gBAZY,KACsB,QAAjC5lB,EAAQic,aAAa,SAIzBjc,EAAQ4B,gBAAgB,YACxB5B,EAAQ6B,aAAa,iBAAiB,GACtC4e,KAAK2oB,gBAAgBppC,GAAS,GAC9BghB,GAAaqB,QAAQriB,EAASunC,GAAe,CAC3ChnB,cAAe4oB,KAPfnpC,EAAQ8b,UAAU5E,IAAIixB,GAQtB,GAE0BnoC,EAASA,EAAQ8b,UAAU7W,SAASijC,KACpE,CACA,WAAAe,CAAYjpC,EAASmpC,GACdnpC,IAGLA,EAAQ8b,UAAU1B,OAAO6tB,IACzBjoC,EAAQq7B,OACR5a,KAAKwoB,YAAY3iB,GAAec,uBAAuBpnB,IAcvDygB,KAAKmF,gBAZY,KACsB,QAAjC5lB,EAAQic,aAAa,SAIzBjc,EAAQ6B,aAAa,iBAAiB,GACtC7B,EAAQ6B,aAAa,WAAY,MACjC4e,KAAK2oB,gBAAgBppC,GAAS,GAC9BghB,GAAaqB,QAAQriB,EAASqnC,GAAgB,CAC5C9mB,cAAe4oB,KAPfnpC,EAAQ8b,UAAU1B,OAAO+tB,GAQzB,GAE0BnoC,EAASA,EAAQ8b,UAAU7W,SAASijC,KACpE,CACA,QAAA5a,CAASzN,GACP,IAAK,CAAC8nB,GAAgBC,GAAiBC,GAAcC,GAAgBC,GAAUC,IAASnmB,SAAShC,EAAMtiB,KACrG,OAEFsiB,EAAM0U,kBACN1U,EAAMkD,iBACN,MAAMyD,EAAW/F,KAAKkoB,eAAe/hC,QAAO5G,IAAY2b,GAAW3b,KACnE,IAAIqpC,EACJ,GAAI,CAACtB,GAAUC,IAASnmB,SAAShC,EAAMtiB,KACrC8rC,EAAoB7iB,EAAS3G,EAAMtiB,MAAQwqC,GAAW,EAAIvhB,EAASrV,OAAS,OACvE,CACL,MAAM8c,EAAS,CAAC2Z,GAAiBE,IAAgBjmB,SAAShC,EAAMtiB,KAChE8rC,EAAoB9qB,GAAqBiI,EAAU3G,EAAM7S,OAAQihB,GAAQ,EAC3E,CACIob,IACFA,EAAkBnW,MAAM,CACtBoW,eAAe,IAEjBb,GAAI1iB,oBAAoBsjB,GAAmB/Y,OAE/C,CACA,YAAAqY,GAEE,OAAOriB,GAAe1T,KAAK21B,GAAqB9nB,KAAKoS,QACvD,CACA,cAAAkW,GACE,OAAOtoB,KAAKkoB,eAAe/1B,MAAKzN,GAASsb,KAAKooB,cAAc1jC,MAAW,IACzE,CACA,qBAAAujC,CAAsBxjC,EAAQshB,GAC5B/F,KAAK8oB,yBAAyBrkC,EAAQ,OAAQ,WAC9C,IAAK,MAAMC,KAASqhB,EAClB/F,KAAK+oB,6BAA6BrkC,EAEtC,CACA,4BAAAqkC,CAA6BrkC,GAC3BA,EAAQsb,KAAKgpB,iBAAiBtkC,GAC9B,MAAMukC,EAAWjpB,KAAKooB,cAAc1jC,GAC9BwkC,EAAYlpB,KAAKmpB,iBAAiBzkC,GACxCA,EAAMtD,aAAa,gBAAiB6nC,GAChCC,IAAcxkC,GAChBsb,KAAK8oB,yBAAyBI,EAAW,OAAQ,gBAE9CD,GACHvkC,EAAMtD,aAAa,WAAY,MAEjC4e,KAAK8oB,yBAAyBpkC,EAAO,OAAQ,OAG7Csb,KAAKopB,mCAAmC1kC,EAC1C,CACA,kCAAA0kC,CAAmC1kC,GACjC,MAAM6H,EAASsZ,GAAec,uBAAuBjiB,GAChD6H,IAGLyT,KAAK8oB,yBAAyBv8B,EAAQ,OAAQ,YAC1C7H,EAAMyV,IACR6F,KAAK8oB,yBAAyBv8B,EAAQ,kBAAmB,GAAG7H,EAAMyV,MAEtE,CACA,eAAAwuB,CAAgBppC,EAAS8pC,GACvB,MAAMH,EAAYlpB,KAAKmpB,iBAAiB5pC,GACxC,IAAK2pC,EAAU7tB,UAAU7W,SApKN,YAqKjB,OAEF,MAAMmjB,EAAS,CAAC5N,EAAUoa,KACxB,MAAM50B,EAAUsmB,GAAeC,QAAQ/L,EAAUmvB,GAC7C3pC,GACFA,EAAQ8b,UAAUsM,OAAOwM,EAAWkV,EACtC,EAEF1hB,EAAOggB,GAA0BH,IACjC7f,EA5K2B,iBA4KI+f,IAC/BwB,EAAU9nC,aAAa,gBAAiBioC,EAC1C,CACA,wBAAAP,CAAyBvpC,EAASwC,EAAWpE,GACtC4B,EAAQgc,aAAaxZ,IACxBxC,EAAQ6B,aAAaW,EAAWpE,EAEpC,CACA,aAAAyqC,CAAc9Y,GACZ,OAAOA,EAAKjU,UAAU7W,SAASgjC,GACjC,CAGA,gBAAAwB,CAAiB1Z,GACf,OAAOA,EAAKtJ,QAAQ8hB,IAAuBxY,EAAOzJ,GAAeC,QAAQgiB,GAAqBxY,EAChG,CAGA,gBAAA6Z,CAAiB7Z,GACf,OAAOA,EAAKtU,QA5LO,gCA4LoBsU,CACzC,CAGA,sBAAO7S,CAAgBqH,GACrB,OAAO9D,KAAKwH,MAAK,WACf,MAAMnd,EAAO29B,GAAI1iB,oBAAoBtF,MACrC,GAAsB,iBAAX8D,EAAX,CAGA,QAAqB/K,IAAjB1O,EAAKyZ,IAAyBA,EAAOrC,WAAW,MAAmB,gBAAXqC,EAC1D,MAAM,IAAIU,UAAU,oBAAoBV,MAE1CzZ,EAAKyZ,IAJL,CAKF,GACF,EAOFvD,GAAac,GAAGhc,SAAU0hC,GAAsBc,IAAsB,SAAUzoB,GAC1E,CAAC,IAAK,QAAQgC,SAASpB,KAAKiH,UAC9B7H,EAAMkD,iBAEJpH,GAAW8E,OAGfgoB,GAAI1iB,oBAAoBtF,MAAM6P,MAChC,IAKAtP,GAAac,GAAGzhB,OAAQqnC,IAAqB,KAC3C,IAAK,MAAM1nC,KAAWsmB,GAAe1T,KAAK41B,IACxCC,GAAI1iB,oBAAoB/lB,EAC1B,IAMF4c,GAAmB6rB,IAcnB,MAEMhjB,GAAY,YACZskB,GAAkB,YAAYtkB,KAC9BukB,GAAiB,WAAWvkB,KAC5BwkB,GAAgB,UAAUxkB,KAC1BykB,GAAiB,WAAWzkB,KAC5B0kB,GAAa,OAAO1kB,KACpB2kB,GAAe,SAAS3kB,KACxB4kB,GAAa,OAAO5kB,KACpB6kB,GAAc,QAAQ7kB,KAEtB8kB,GAAkB,OAClBC,GAAkB,OAClBC,GAAqB,UACrBrmB,GAAc,CAClByc,UAAW,UACX6J,SAAU,UACV1J,MAAO,UAEH7c,GAAU,CACd0c,WAAW,EACX6J,UAAU,EACV1J,MAAO,KAOT,MAAM2J,WAAcxlB,GAClB,WAAAP,CAAY5kB,EAASukB,GACnBa,MAAMplB,EAASukB,GACf9D,KAAK4gB,SAAW,KAChB5gB,KAAKmqB,sBAAuB,EAC5BnqB,KAAKoqB,yBAA0B,EAC/BpqB,KAAKkhB,eACP,CAGA,kBAAWxd,GACT,OAAOA,EACT,CACA,sBAAWC,GACT,OAAOA,EACT,CACA,eAAWpH,GACT,MA/CS,OAgDX,CAGA,IAAAsT,GACoBtP,GAAaqB,QAAQ5B,KAAK4E,SAAUglB,IACxC5nB,mBAGdhC,KAAKqqB,gBACDrqB,KAAK6E,QAAQub,WACfpgB,KAAK4E,SAASvJ,UAAU5E,IA/CN,QAsDpBuJ,KAAK4E,SAASvJ,UAAU1B,OAAOmwB,IAC/BjuB,GAAOmE,KAAK4E,UACZ5E,KAAK4E,SAASvJ,UAAU5E,IAAIszB,GAAiBC,IAC7ChqB,KAAKmF,gBARY,KACfnF,KAAK4E,SAASvJ,UAAU1B,OAAOqwB,IAC/BzpB,GAAaqB,QAAQ5B,KAAK4E,SAAUilB,IACpC7pB,KAAKsqB,oBAAoB,GAKGtqB,KAAK4E,SAAU5E,KAAK6E,QAAQub,WAC5D,CACA,IAAAxQ,GACO5P,KAAKuqB,YAGQhqB,GAAaqB,QAAQ5B,KAAK4E,SAAU8kB,IACxC1nB,mBAQdhC,KAAK4E,SAASvJ,UAAU5E,IAAIuzB,IAC5BhqB,KAAKmF,gBANY,KACfnF,KAAK4E,SAASvJ,UAAU5E,IAAIqzB,IAC5B9pB,KAAK4E,SAASvJ,UAAU1B,OAAOqwB,GAAoBD,IACnDxpB,GAAaqB,QAAQ5B,KAAK4E,SAAU+kB,GAAa,GAGrB3pB,KAAK4E,SAAU5E,KAAK6E,QAAQub,YAC5D,CACA,OAAArb,GACE/E,KAAKqqB,gBACDrqB,KAAKuqB,WACPvqB,KAAK4E,SAASvJ,UAAU1B,OAAOowB,IAEjCplB,MAAMI,SACR,CACA,OAAAwlB,GACE,OAAOvqB,KAAK4E,SAASvJ,UAAU7W,SAASulC,GAC1C,CAIA,kBAAAO,GACOtqB,KAAK6E,QAAQolB,WAGdjqB,KAAKmqB,sBAAwBnqB,KAAKoqB,0BAGtCpqB,KAAK4gB,SAAW/iB,YAAW,KACzBmC,KAAK4P,MAAM,GACV5P,KAAK6E,QAAQ0b,QAClB,CACA,cAAAiK,CAAeprB,EAAOqrB,GACpB,OAAQrrB,EAAMqB,MACZ,IAAK,YACL,IAAK,WAEDT,KAAKmqB,qBAAuBM,EAC5B,MAEJ,IAAK,UACL,IAAK,WAEDzqB,KAAKoqB,wBAA0BK,EAIrC,GAAIA,EAEF,YADAzqB,KAAKqqB,gBAGP,MAAM5c,EAAcrO,EAAMU,cACtBE,KAAK4E,WAAa6I,GAAezN,KAAK4E,SAASpgB,SAASipB,IAG5DzN,KAAKsqB,oBACP,CACA,aAAApJ,GACE3gB,GAAac,GAAGrB,KAAK4E,SAAU0kB,IAAiBlqB,GAASY,KAAKwqB,eAAeprB,GAAO,KACpFmB,GAAac,GAAGrB,KAAK4E,SAAU2kB,IAAgBnqB,GAASY,KAAKwqB,eAAeprB,GAAO,KACnFmB,GAAac,GAAGrB,KAAK4E,SAAU4kB,IAAepqB,GAASY,KAAKwqB,eAAeprB,GAAO,KAClFmB,GAAac,GAAGrB,KAAK4E,SAAU6kB,IAAgBrqB,GAASY,KAAKwqB,eAAeprB,GAAO,IACrF,CACA,aAAAirB,GACEnd,aAAalN,KAAK4gB,UAClB5gB,KAAK4gB,SAAW,IAClB,CAGA,sBAAOnkB,CAAgBqH,GACrB,OAAO9D,KAAKwH,MAAK,WACf,MAAMnd,EAAO6/B,GAAM5kB,oBAAoBtF,KAAM8D,GAC7C,GAAsB,iBAAXA,EAAqB,CAC9B,QAA4B,IAAjBzZ,EAAKyZ,GACd,MAAM,IAAIU,UAAU,oBAAoBV,MAE1CzZ,EAAKyZ,GAAQ9D,KACf,CACF,GACF,ECr0IK,SAAS0qB,GAAcruB,GACD,WAAvBhX,SAASuX,WAAyBP,IACjChX,SAASyF,iBAAiB,mBAAoBuR,EACrD,CDy0IAwK,GAAqBqjB,IAMrB/tB,GAAmB+tB,IEpyInBQ,IAzCA,WAC2B,GAAGt4B,MAAM5U,KAChC6H,SAAS+a,iBAAiB,+BAETtd,KAAI,SAAU6nC,GAC/B,OAAO,IAAI,GAAkBA,EAAkB,CAC7CpK,MAAO,CAAE1Q,KAAM,IAAKD,KAAM,MAE9B,GACF,IAiCA8a,IA5BA,WACYrlC,SAASm9B,eAAe,mBAC9B13B,iBAAiB,SAAS,WAC5BzF,SAAS6G,KAAKT,UAAY,EAC1BpG,SAASC,gBAAgBmG,UAAY,CACvC,GACF,IAuBAi/B,IArBA,WACE,IAAIE,EAAMvlC,SAASm9B,eAAe,mBAC9BqI,EAASxlC,SACVylC,uBAAuB,aAAa,GACpCxnC,wBACH1D,OAAOkL,iBAAiB,UAAU,WAC5BkV,KAAK+qB,UAAY/qB,KAAKgrB,SAAWhrB,KAAKgrB,QAAUH,EAAOjtC,OACzDgtC,EAAI7pC,MAAMgxB,QAAU,QAEpB6Y,EAAI7pC,MAAMgxB,QAAU,OAEtB/R,KAAK+qB,UAAY/qB,KAAKgrB,OACxB,GACF,IAUAprC,OAAOqrC,UAAY","sources":["webpack://pydata_sphinx_theme/webpack/bootstrap","webpack://pydata_sphinx_theme/webpack/runtime/define property getters","webpack://pydata_sphinx_theme/webpack/runtime/hasOwnProperty shorthand","webpack://pydata_sphinx_theme/webpack/runtime/make namespace object","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/enums.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getNodeName.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getWindow.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/instanceOf.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/modifiers/applyStyles.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/getBasePlacement.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/math.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/userAgent.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/isLayoutViewport.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getBoundingClientRect.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getLayoutRect.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/contains.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getComputedStyle.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/isTableElement.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getDocumentElement.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getParentNode.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getOffsetParent.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/getMainAxisFromPlacement.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/within.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/mergePaddingObject.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/getFreshSideObject.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/expandToHashMap.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/modifiers/arrow.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/getVariation.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/modifiers/computeStyles.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/modifiers/eventListeners.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/getOppositePlacement.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/getOppositeVariationPlacement.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getWindowScroll.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getWindowScrollBarX.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/isScrollParent.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getScrollParent.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/listScrollParents.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/rectToClientRect.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getClippingRect.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getViewportRect.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getDocumentRect.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/computeOffsets.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/detectOverflow.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/modifiers/flip.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/computeAutoPlacement.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/modifiers/hide.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/modifiers/offset.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/modifiers/popperOffsets.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/modifiers/preventOverflow.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/getAltAxis.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getCompositeRect.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getNodeScroll.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getHTMLElementScroll.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/orderModifiers.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/createPopper.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/debounce.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/mergeByName.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/popper.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/popper-lite.js","webpack://pydata_sphinx_theme/./node_modules/bootstrap/dist/js/bootstrap.esm.js","webpack://pydata_sphinx_theme/./src/pydata_sphinx_theme/assets/scripts/mixin.js","webpack://pydata_sphinx_theme/./src/pydata_sphinx_theme/assets/scripts/bootstrap.js"],"sourcesContent":["// The require scope\nvar __webpack_require__ = {};\n\n","// define getter functions for harmony exports\n__webpack_require__.d = (exports, definition) => {\n\tfor(var key in definition) {\n\t\tif(__webpack_require__.o(definition, key) && !__webpack_require__.o(exports, key)) {\n\t\t\tObject.defineProperty(exports, key, { enumerable: true, get: definition[key] });\n\t\t}\n\t}\n};","__webpack_require__.o = (obj, prop) => (Object.prototype.hasOwnProperty.call(obj, prop))","// define __esModule on exports\n__webpack_require__.r = (exports) => {\n\tif(typeof Symbol !== 'undefined' && Symbol.toStringTag) {\n\t\tObject.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });\n\t}\n\tObject.defineProperty(exports, '__esModule', { value: true });\n};","export var top = 'top';\nexport var bottom = 'bottom';\nexport var right = 'right';\nexport var left = 'left';\nexport var auto = 'auto';\nexport var basePlacements = [top, bottom, right, left];\nexport var start = 'start';\nexport var end = 'end';\nexport var clippingParents = 'clippingParents';\nexport var viewport = 'viewport';\nexport var popper = 'popper';\nexport var reference = 'reference';\nexport var variationPlacements = /*#__PURE__*/basePlacements.reduce(function (acc, placement) {\n return acc.concat([placement + \"-\" + start, placement + \"-\" + end]);\n}, []);\nexport var placements = /*#__PURE__*/[].concat(basePlacements, [auto]).reduce(function (acc, placement) {\n return acc.concat([placement, placement + \"-\" + start, placement + \"-\" + end]);\n}, []); // modifiers that need to read the DOM\n\nexport var beforeRead = 'beforeRead';\nexport var read = 'read';\nexport var afterRead = 'afterRead'; // pure-logic modifiers\n\nexport var beforeMain = 'beforeMain';\nexport var main = 'main';\nexport var afterMain = 'afterMain'; // modifier with the purpose to write to the DOM (or write into a framework state)\n\nexport var beforeWrite = 'beforeWrite';\nexport var write = 'write';\nexport var afterWrite = 'afterWrite';\nexport var modifierPhases = [beforeRead, read, afterRead, beforeMain, main, afterMain, beforeWrite, write, afterWrite];","export default function getNodeName(element) {\n return element ? (element.nodeName || '').toLowerCase() : null;\n}","export default function getWindow(node) {\n if (node == null) {\n return window;\n }\n\n if (node.toString() !== '[object Window]') {\n var ownerDocument = node.ownerDocument;\n return ownerDocument ? ownerDocument.defaultView || window : window;\n }\n\n return node;\n}","import getWindow from \"./getWindow.js\";\n\nfunction isElement(node) {\n var OwnElement = getWindow(node).Element;\n return node instanceof OwnElement || node instanceof Element;\n}\n\nfunction isHTMLElement(node) {\n var OwnElement = getWindow(node).HTMLElement;\n return node instanceof OwnElement || node instanceof HTMLElement;\n}\n\nfunction isShadowRoot(node) {\n // IE 11 has no ShadowRoot\n if (typeof ShadowRoot === 'undefined') {\n return false;\n }\n\n var OwnElement = getWindow(node).ShadowRoot;\n return node instanceof OwnElement || node instanceof ShadowRoot;\n}\n\nexport { isElement, isHTMLElement, isShadowRoot };","import getNodeName from \"../dom-utils/getNodeName.js\";\nimport { isHTMLElement } from \"../dom-utils/instanceOf.js\"; // This modifier takes the styles prepared by the `computeStyles` modifier\n// and applies them to the HTMLElements such as popper and arrow\n\nfunction applyStyles(_ref) {\n var state = _ref.state;\n Object.keys(state.elements).forEach(function (name) {\n var style = state.styles[name] || {};\n var attributes = state.attributes[name] || {};\n var element = state.elements[name]; // arrow is optional + virtual elements\n\n if (!isHTMLElement(element) || !getNodeName(element)) {\n return;\n } // Flow doesn't support to extend this property, but it's the most\n // effective way to apply styles to an HTMLElement\n // $FlowFixMe[cannot-write]\n\n\n Object.assign(element.style, style);\n Object.keys(attributes).forEach(function (name) {\n var value = attributes[name];\n\n if (value === false) {\n element.removeAttribute(name);\n } else {\n element.setAttribute(name, value === true ? '' : value);\n }\n });\n });\n}\n\nfunction effect(_ref2) {\n var state = _ref2.state;\n var initialStyles = {\n popper: {\n position: state.options.strategy,\n left: '0',\n top: '0',\n margin: '0'\n },\n arrow: {\n position: 'absolute'\n },\n reference: {}\n };\n Object.assign(state.elements.popper.style, initialStyles.popper);\n state.styles = initialStyles;\n\n if (state.elements.arrow) {\n Object.assign(state.elements.arrow.style, initialStyles.arrow);\n }\n\n return function () {\n Object.keys(state.elements).forEach(function (name) {\n var element = state.elements[name];\n var attributes = state.attributes[name] || {};\n var styleProperties = Object.keys(state.styles.hasOwnProperty(name) ? state.styles[name] : initialStyles[name]); // Set all values to an empty string to unset them\n\n var style = styleProperties.reduce(function (style, property) {\n style[property] = '';\n return style;\n }, {}); // arrow is optional + virtual elements\n\n if (!isHTMLElement(element) || !getNodeName(element)) {\n return;\n }\n\n Object.assign(element.style, style);\n Object.keys(attributes).forEach(function (attribute) {\n element.removeAttribute(attribute);\n });\n });\n };\n} // eslint-disable-next-line import/no-unused-modules\n\n\nexport default {\n name: 'applyStyles',\n enabled: true,\n phase: 'write',\n fn: applyStyles,\n effect: effect,\n requires: ['computeStyles']\n};","import { auto } from \"../enums.js\";\nexport default function getBasePlacement(placement) {\n return placement.split('-')[0];\n}","export var max = Math.max;\nexport var min = Math.min;\nexport var round = Math.round;","export default function getUAString() {\n var uaData = navigator.userAgentData;\n\n if (uaData != null && uaData.brands && Array.isArray(uaData.brands)) {\n return uaData.brands.map(function (item) {\n return item.brand + \"/\" + item.version;\n }).join(' ');\n }\n\n return navigator.userAgent;\n}","import getUAString from \"../utils/userAgent.js\";\nexport default function isLayoutViewport() {\n return !/^((?!chrome|android).)*safari/i.test(getUAString());\n}","import { isElement, isHTMLElement } from \"./instanceOf.js\";\nimport { round } from \"../utils/math.js\";\nimport getWindow from \"./getWindow.js\";\nimport isLayoutViewport from \"./isLayoutViewport.js\";\nexport default function getBoundingClientRect(element, includeScale, isFixedStrategy) {\n if (includeScale === void 0) {\n includeScale = false;\n }\n\n if (isFixedStrategy === void 0) {\n isFixedStrategy = false;\n }\n\n var clientRect = element.getBoundingClientRect();\n var scaleX = 1;\n var scaleY = 1;\n\n if (includeScale && isHTMLElement(element)) {\n scaleX = element.offsetWidth > 0 ? round(clientRect.width) / element.offsetWidth || 1 : 1;\n scaleY = element.offsetHeight > 0 ? round(clientRect.height) / element.offsetHeight || 1 : 1;\n }\n\n var _ref = isElement(element) ? getWindow(element) : window,\n visualViewport = _ref.visualViewport;\n\n var addVisualOffsets = !isLayoutViewport() && isFixedStrategy;\n var x = (clientRect.left + (addVisualOffsets && visualViewport ? visualViewport.offsetLeft : 0)) / scaleX;\n var y = (clientRect.top + (addVisualOffsets && visualViewport ? visualViewport.offsetTop : 0)) / scaleY;\n var width = clientRect.width / scaleX;\n var height = clientRect.height / scaleY;\n return {\n width: width,\n height: height,\n top: y,\n right: x + width,\n bottom: y + height,\n left: x,\n x: x,\n y: y\n };\n}","import getBoundingClientRect from \"./getBoundingClientRect.js\"; // Returns the layout rect of an element relative to its offsetParent. Layout\n// means it doesn't take into account transforms.\n\nexport default function getLayoutRect(element) {\n var clientRect = getBoundingClientRect(element); // Use the clientRect sizes if it's not been transformed.\n // Fixes https://github.com/popperjs/popper-core/issues/1223\n\n var width = element.offsetWidth;\n var height = element.offsetHeight;\n\n if (Math.abs(clientRect.width - width) <= 1) {\n width = clientRect.width;\n }\n\n if (Math.abs(clientRect.height - height) <= 1) {\n height = clientRect.height;\n }\n\n return {\n x: element.offsetLeft,\n y: element.offsetTop,\n width: width,\n height: height\n };\n}","import { isShadowRoot } from \"./instanceOf.js\";\nexport default function contains(parent, child) {\n var rootNode = child.getRootNode && child.getRootNode(); // First, attempt with faster native method\n\n if (parent.contains(child)) {\n return true;\n } // then fallback to custom implementation with Shadow DOM support\n else if (rootNode && isShadowRoot(rootNode)) {\n var next = child;\n\n do {\n if (next && parent.isSameNode(next)) {\n return true;\n } // $FlowFixMe[prop-missing]: need a better way to handle this...\n\n\n next = next.parentNode || next.host;\n } while (next);\n } // Give up, the result is false\n\n\n return false;\n}","import getWindow from \"./getWindow.js\";\nexport default function getComputedStyle(element) {\n return getWindow(element).getComputedStyle(element);\n}","import getNodeName from \"./getNodeName.js\";\nexport default function isTableElement(element) {\n return ['table', 'td', 'th'].indexOf(getNodeName(element)) >= 0;\n}","import { isElement } from \"./instanceOf.js\";\nexport default function getDocumentElement(element) {\n // $FlowFixMe[incompatible-return]: assume body is always available\n return ((isElement(element) ? element.ownerDocument : // $FlowFixMe[prop-missing]\n element.document) || window.document).documentElement;\n}","import getNodeName from \"./getNodeName.js\";\nimport getDocumentElement from \"./getDocumentElement.js\";\nimport { isShadowRoot } from \"./instanceOf.js\";\nexport default function getParentNode(element) {\n if (getNodeName(element) === 'html') {\n return element;\n }\n\n return (// this is a quicker (but less type safe) way to save quite some bytes from the bundle\n // $FlowFixMe[incompatible-return]\n // $FlowFixMe[prop-missing]\n element.assignedSlot || // step into the shadow DOM of the parent of a slotted node\n element.parentNode || ( // DOM Element detected\n isShadowRoot(element) ? element.host : null) || // ShadowRoot detected\n // $FlowFixMe[incompatible-call]: HTMLElement is a Node\n getDocumentElement(element) // fallback\n\n );\n}","import getWindow from \"./getWindow.js\";\nimport getNodeName from \"./getNodeName.js\";\nimport getComputedStyle from \"./getComputedStyle.js\";\nimport { isHTMLElement, isShadowRoot } from \"./instanceOf.js\";\nimport isTableElement from \"./isTableElement.js\";\nimport getParentNode from \"./getParentNode.js\";\nimport getUAString from \"../utils/userAgent.js\";\n\nfunction getTrueOffsetParent(element) {\n if (!isHTMLElement(element) || // https://github.com/popperjs/popper-core/issues/837\n getComputedStyle(element).position === 'fixed') {\n return null;\n }\n\n return element.offsetParent;\n} // `.offsetParent` reports `null` for fixed elements, while absolute elements\n// return the containing block\n\n\nfunction getContainingBlock(element) {\n var isFirefox = /firefox/i.test(getUAString());\n var isIE = /Trident/i.test(getUAString());\n\n if (isIE && isHTMLElement(element)) {\n // In IE 9, 10 and 11 fixed elements containing block is always established by the viewport\n var elementCss = getComputedStyle(element);\n\n if (elementCss.position === 'fixed') {\n return null;\n }\n }\n\n var currentNode = getParentNode(element);\n\n if (isShadowRoot(currentNode)) {\n currentNode = currentNode.host;\n }\n\n while (isHTMLElement(currentNode) && ['html', 'body'].indexOf(getNodeName(currentNode)) < 0) {\n var css = getComputedStyle(currentNode); // This is non-exhaustive but covers the most common CSS properties that\n // create a containing block.\n // https://developer.mozilla.org/en-US/docs/Web/CSS/Containing_block#identifying_the_containing_block\n\n if (css.transform !== 'none' || css.perspective !== 'none' || css.contain === 'paint' || ['transform', 'perspective'].indexOf(css.willChange) !== -1 || isFirefox && css.willChange === 'filter' || isFirefox && css.filter && css.filter !== 'none') {\n return currentNode;\n } else {\n currentNode = currentNode.parentNode;\n }\n }\n\n return null;\n} // Gets the closest ancestor positioned element. Handles some edge cases,\n// such as table ancestors and cross browser bugs.\n\n\nexport default function getOffsetParent(element) {\n var window = getWindow(element);\n var offsetParent = getTrueOffsetParent(element);\n\n while (offsetParent && isTableElement(offsetParent) && getComputedStyle(offsetParent).position === 'static') {\n offsetParent = getTrueOffsetParent(offsetParent);\n }\n\n if (offsetParent && (getNodeName(offsetParent) === 'html' || getNodeName(offsetParent) === 'body' && getComputedStyle(offsetParent).position === 'static')) {\n return window;\n }\n\n return offsetParent || getContainingBlock(element) || window;\n}","export default function getMainAxisFromPlacement(placement) {\n return ['top', 'bottom'].indexOf(placement) >= 0 ? 'x' : 'y';\n}","import { max as mathMax, min as mathMin } from \"./math.js\";\nexport function within(min, value, max) {\n return mathMax(min, mathMin(value, max));\n}\nexport function withinMaxClamp(min, value, max) {\n var v = within(min, value, max);\n return v > max ? max : v;\n}","import getFreshSideObject from \"./getFreshSideObject.js\";\nexport default function mergePaddingObject(paddingObject) {\n return Object.assign({}, getFreshSideObject(), paddingObject);\n}","export default function getFreshSideObject() {\n return {\n top: 0,\n right: 0,\n bottom: 0,\n left: 0\n };\n}","export default function expandToHashMap(value, keys) {\n return keys.reduce(function (hashMap, key) {\n hashMap[key] = value;\n return hashMap;\n }, {});\n}","import getBasePlacement from \"../utils/getBasePlacement.js\";\nimport getLayoutRect from \"../dom-utils/getLayoutRect.js\";\nimport contains from \"../dom-utils/contains.js\";\nimport getOffsetParent from \"../dom-utils/getOffsetParent.js\";\nimport getMainAxisFromPlacement from \"../utils/getMainAxisFromPlacement.js\";\nimport { within } from \"../utils/within.js\";\nimport mergePaddingObject from \"../utils/mergePaddingObject.js\";\nimport expandToHashMap from \"../utils/expandToHashMap.js\";\nimport { left, right, basePlacements, top, bottom } from \"../enums.js\"; // eslint-disable-next-line import/no-unused-modules\n\nvar toPaddingObject = function toPaddingObject(padding, state) {\n padding = typeof padding === 'function' ? padding(Object.assign({}, state.rects, {\n placement: state.placement\n })) : padding;\n return mergePaddingObject(typeof padding !== 'number' ? padding : expandToHashMap(padding, basePlacements));\n};\n\nfunction arrow(_ref) {\n var _state$modifiersData$;\n\n var state = _ref.state,\n name = _ref.name,\n options = _ref.options;\n var arrowElement = state.elements.arrow;\n var popperOffsets = state.modifiersData.popperOffsets;\n var basePlacement = getBasePlacement(state.placement);\n var axis = getMainAxisFromPlacement(basePlacement);\n var isVertical = [left, right].indexOf(basePlacement) >= 0;\n var len = isVertical ? 'height' : 'width';\n\n if (!arrowElement || !popperOffsets) {\n return;\n }\n\n var paddingObject = toPaddingObject(options.padding, state);\n var arrowRect = getLayoutRect(arrowElement);\n var minProp = axis === 'y' ? top : left;\n var maxProp = axis === 'y' ? bottom : right;\n var endDiff = state.rects.reference[len] + state.rects.reference[axis] - popperOffsets[axis] - state.rects.popper[len];\n var startDiff = popperOffsets[axis] - state.rects.reference[axis];\n var arrowOffsetParent = getOffsetParent(arrowElement);\n var clientSize = arrowOffsetParent ? axis === 'y' ? arrowOffsetParent.clientHeight || 0 : arrowOffsetParent.clientWidth || 0 : 0;\n var centerToReference = endDiff / 2 - startDiff / 2; // Make sure the arrow doesn't overflow the popper if the center point is\n // outside of the popper bounds\n\n var min = paddingObject[minProp];\n var max = clientSize - arrowRect[len] - paddingObject[maxProp];\n var center = clientSize / 2 - arrowRect[len] / 2 + centerToReference;\n var offset = within(min, center, max); // Prevents breaking syntax highlighting...\n\n var axisProp = axis;\n state.modifiersData[name] = (_state$modifiersData$ = {}, _state$modifiersData$[axisProp] = offset, _state$modifiersData$.centerOffset = offset - center, _state$modifiersData$);\n}\n\nfunction effect(_ref2) {\n var state = _ref2.state,\n options = _ref2.options;\n var _options$element = options.element,\n arrowElement = _options$element === void 0 ? '[data-popper-arrow]' : _options$element;\n\n if (arrowElement == null) {\n return;\n } // CSS selector\n\n\n if (typeof arrowElement === 'string') {\n arrowElement = state.elements.popper.querySelector(arrowElement);\n\n if (!arrowElement) {\n return;\n }\n }\n\n if (!contains(state.elements.popper, arrowElement)) {\n return;\n }\n\n state.elements.arrow = arrowElement;\n} // eslint-disable-next-line import/no-unused-modules\n\n\nexport default {\n name: 'arrow',\n enabled: true,\n phase: 'main',\n fn: arrow,\n effect: effect,\n requires: ['popperOffsets'],\n requiresIfExists: ['preventOverflow']\n};","export default function getVariation(placement) {\n return placement.split('-')[1];\n}","import { top, left, right, bottom, end } from \"../enums.js\";\nimport getOffsetParent from \"../dom-utils/getOffsetParent.js\";\nimport getWindow from \"../dom-utils/getWindow.js\";\nimport getDocumentElement from \"../dom-utils/getDocumentElement.js\";\nimport getComputedStyle from \"../dom-utils/getComputedStyle.js\";\nimport getBasePlacement from \"../utils/getBasePlacement.js\";\nimport getVariation from \"../utils/getVariation.js\";\nimport { round } from \"../utils/math.js\"; // eslint-disable-next-line import/no-unused-modules\n\nvar unsetSides = {\n top: 'auto',\n right: 'auto',\n bottom: 'auto',\n left: 'auto'\n}; // Round the offsets to the nearest suitable subpixel based on the DPR.\n// Zooming can change the DPR, but it seems to report a value that will\n// cleanly divide the values into the appropriate subpixels.\n\nfunction roundOffsetsByDPR(_ref, win) {\n var x = _ref.x,\n y = _ref.y;\n var dpr = win.devicePixelRatio || 1;\n return {\n x: round(x * dpr) / dpr || 0,\n y: round(y * dpr) / dpr || 0\n };\n}\n\nexport function mapToStyles(_ref2) {\n var _Object$assign2;\n\n var popper = _ref2.popper,\n popperRect = _ref2.popperRect,\n placement = _ref2.placement,\n variation = _ref2.variation,\n offsets = _ref2.offsets,\n position = _ref2.position,\n gpuAcceleration = _ref2.gpuAcceleration,\n adaptive = _ref2.adaptive,\n roundOffsets = _ref2.roundOffsets,\n isFixed = _ref2.isFixed;\n var _offsets$x = offsets.x,\n x = _offsets$x === void 0 ? 0 : _offsets$x,\n _offsets$y = offsets.y,\n y = _offsets$y === void 0 ? 0 : _offsets$y;\n\n var _ref3 = typeof roundOffsets === 'function' ? roundOffsets({\n x: x,\n y: y\n }) : {\n x: x,\n y: y\n };\n\n x = _ref3.x;\n y = _ref3.y;\n var hasX = offsets.hasOwnProperty('x');\n var hasY = offsets.hasOwnProperty('y');\n var sideX = left;\n var sideY = top;\n var win = window;\n\n if (adaptive) {\n var offsetParent = getOffsetParent(popper);\n var heightProp = 'clientHeight';\n var widthProp = 'clientWidth';\n\n if (offsetParent === getWindow(popper)) {\n offsetParent = getDocumentElement(popper);\n\n if (getComputedStyle(offsetParent).position !== 'static' && position === 'absolute') {\n heightProp = 'scrollHeight';\n widthProp = 'scrollWidth';\n }\n } // $FlowFixMe[incompatible-cast]: force type refinement, we compare offsetParent with window above, but Flow doesn't detect it\n\n\n offsetParent = offsetParent;\n\n if (placement === top || (placement === left || placement === right) && variation === end) {\n sideY = bottom;\n var offsetY = isFixed && offsetParent === win && win.visualViewport ? win.visualViewport.height : // $FlowFixMe[prop-missing]\n offsetParent[heightProp];\n y -= offsetY - popperRect.height;\n y *= gpuAcceleration ? 1 : -1;\n }\n\n if (placement === left || (placement === top || placement === bottom) && variation === end) {\n sideX = right;\n var offsetX = isFixed && offsetParent === win && win.visualViewport ? win.visualViewport.width : // $FlowFixMe[prop-missing]\n offsetParent[widthProp];\n x -= offsetX - popperRect.width;\n x *= gpuAcceleration ? 1 : -1;\n }\n }\n\n var commonStyles = Object.assign({\n position: position\n }, adaptive && unsetSides);\n\n var _ref4 = roundOffsets === true ? roundOffsetsByDPR({\n x: x,\n y: y\n }, getWindow(popper)) : {\n x: x,\n y: y\n };\n\n x = _ref4.x;\n y = _ref4.y;\n\n if (gpuAcceleration) {\n var _Object$assign;\n\n return Object.assign({}, commonStyles, (_Object$assign = {}, _Object$assign[sideY] = hasY ? '0' : '', _Object$assign[sideX] = hasX ? '0' : '', _Object$assign.transform = (win.devicePixelRatio || 1) <= 1 ? \"translate(\" + x + \"px, \" + y + \"px)\" : \"translate3d(\" + x + \"px, \" + y + \"px, 0)\", _Object$assign));\n }\n\n return Object.assign({}, commonStyles, (_Object$assign2 = {}, _Object$assign2[sideY] = hasY ? y + \"px\" : '', _Object$assign2[sideX] = hasX ? x + \"px\" : '', _Object$assign2.transform = '', _Object$assign2));\n}\n\nfunction computeStyles(_ref5) {\n var state = _ref5.state,\n options = _ref5.options;\n var _options$gpuAccelerat = options.gpuAcceleration,\n gpuAcceleration = _options$gpuAccelerat === void 0 ? true : _options$gpuAccelerat,\n _options$adaptive = options.adaptive,\n adaptive = _options$adaptive === void 0 ? true : _options$adaptive,\n _options$roundOffsets = options.roundOffsets,\n roundOffsets = _options$roundOffsets === void 0 ? true : _options$roundOffsets;\n var commonStyles = {\n placement: getBasePlacement(state.placement),\n variation: getVariation(state.placement),\n popper: state.elements.popper,\n popperRect: state.rects.popper,\n gpuAcceleration: gpuAcceleration,\n isFixed: state.options.strategy === 'fixed'\n };\n\n if (state.modifiersData.popperOffsets != null) {\n state.styles.popper = Object.assign({}, state.styles.popper, mapToStyles(Object.assign({}, commonStyles, {\n offsets: state.modifiersData.popperOffsets,\n position: state.options.strategy,\n adaptive: adaptive,\n roundOffsets: roundOffsets\n })));\n }\n\n if (state.modifiersData.arrow != null) {\n state.styles.arrow = Object.assign({}, state.styles.arrow, mapToStyles(Object.assign({}, commonStyles, {\n offsets: state.modifiersData.arrow,\n position: 'absolute',\n adaptive: false,\n roundOffsets: roundOffsets\n })));\n }\n\n state.attributes.popper = Object.assign({}, state.attributes.popper, {\n 'data-popper-placement': state.placement\n });\n} // eslint-disable-next-line import/no-unused-modules\n\n\nexport default {\n name: 'computeStyles',\n enabled: true,\n phase: 'beforeWrite',\n fn: computeStyles,\n data: {}\n};","import getWindow from \"../dom-utils/getWindow.js\"; // eslint-disable-next-line import/no-unused-modules\n\nvar passive = {\n passive: true\n};\n\nfunction effect(_ref) {\n var state = _ref.state,\n instance = _ref.instance,\n options = _ref.options;\n var _options$scroll = options.scroll,\n scroll = _options$scroll === void 0 ? true : _options$scroll,\n _options$resize = options.resize,\n resize = _options$resize === void 0 ? true : _options$resize;\n var window = getWindow(state.elements.popper);\n var scrollParents = [].concat(state.scrollParents.reference, state.scrollParents.popper);\n\n if (scroll) {\n scrollParents.forEach(function (scrollParent) {\n scrollParent.addEventListener('scroll', instance.update, passive);\n });\n }\n\n if (resize) {\n window.addEventListener('resize', instance.update, passive);\n }\n\n return function () {\n if (scroll) {\n scrollParents.forEach(function (scrollParent) {\n scrollParent.removeEventListener('scroll', instance.update, passive);\n });\n }\n\n if (resize) {\n window.removeEventListener('resize', instance.update, passive);\n }\n };\n} // eslint-disable-next-line import/no-unused-modules\n\n\nexport default {\n name: 'eventListeners',\n enabled: true,\n phase: 'write',\n fn: function fn() {},\n effect: effect,\n data: {}\n};","var hash = {\n left: 'right',\n right: 'left',\n bottom: 'top',\n top: 'bottom'\n};\nexport default function getOppositePlacement(placement) {\n return placement.replace(/left|right|bottom|top/g, function (matched) {\n return hash[matched];\n });\n}","var hash = {\n start: 'end',\n end: 'start'\n};\nexport default function getOppositeVariationPlacement(placement) {\n return placement.replace(/start|end/g, function (matched) {\n return hash[matched];\n });\n}","import getWindow from \"./getWindow.js\";\nexport default function getWindowScroll(node) {\n var win = getWindow(node);\n var scrollLeft = win.pageXOffset;\n var scrollTop = win.pageYOffset;\n return {\n scrollLeft: scrollLeft,\n scrollTop: scrollTop\n };\n}","import getBoundingClientRect from \"./getBoundingClientRect.js\";\nimport getDocumentElement from \"./getDocumentElement.js\";\nimport getWindowScroll from \"./getWindowScroll.js\";\nexport default function getWindowScrollBarX(element) {\n // If has a CSS width greater than the viewport, then this will be\n // incorrect for RTL.\n // Popper 1 is broken in this case and never had a bug report so let's assume\n // it's not an issue. I don't think anyone ever specifies width on \n // anyway.\n // Browsers where the left scrollbar doesn't cause an issue report `0` for\n // this (e.g. Edge 2019, IE11, Safari)\n return getBoundingClientRect(getDocumentElement(element)).left + getWindowScroll(element).scrollLeft;\n}","import getComputedStyle from \"./getComputedStyle.js\";\nexport default function isScrollParent(element) {\n // Firefox wants us to check `-x` and `-y` variations as well\n var _getComputedStyle = getComputedStyle(element),\n overflow = _getComputedStyle.overflow,\n overflowX = _getComputedStyle.overflowX,\n overflowY = _getComputedStyle.overflowY;\n\n return /auto|scroll|overlay|hidden/.test(overflow + overflowY + overflowX);\n}","import getParentNode from \"./getParentNode.js\";\nimport isScrollParent from \"./isScrollParent.js\";\nimport getNodeName from \"./getNodeName.js\";\nimport { isHTMLElement } from \"./instanceOf.js\";\nexport default function getScrollParent(node) {\n if (['html', 'body', '#document'].indexOf(getNodeName(node)) >= 0) {\n // $FlowFixMe[incompatible-return]: assume body is always available\n return node.ownerDocument.body;\n }\n\n if (isHTMLElement(node) && isScrollParent(node)) {\n return node;\n }\n\n return getScrollParent(getParentNode(node));\n}","import getScrollParent from \"./getScrollParent.js\";\nimport getParentNode from \"./getParentNode.js\";\nimport getWindow from \"./getWindow.js\";\nimport isScrollParent from \"./isScrollParent.js\";\n/*\ngiven a DOM element, return the list of all scroll parents, up the list of ancesors\nuntil we get to the top window object. This list is what we attach scroll listeners\nto, because if any of these parent elements scroll, we'll need to re-calculate the\nreference element's position.\n*/\n\nexport default function listScrollParents(element, list) {\n var _element$ownerDocumen;\n\n if (list === void 0) {\n list = [];\n }\n\n var scrollParent = getScrollParent(element);\n var isBody = scrollParent === ((_element$ownerDocumen = element.ownerDocument) == null ? void 0 : _element$ownerDocumen.body);\n var win = getWindow(scrollParent);\n var target = isBody ? [win].concat(win.visualViewport || [], isScrollParent(scrollParent) ? scrollParent : []) : scrollParent;\n var updatedList = list.concat(target);\n return isBody ? updatedList : // $FlowFixMe[incompatible-call]: isBody tells us target will be an HTMLElement here\n updatedList.concat(listScrollParents(getParentNode(target)));\n}","export default function rectToClientRect(rect) {\n return Object.assign({}, rect, {\n left: rect.x,\n top: rect.y,\n right: rect.x + rect.width,\n bottom: rect.y + rect.height\n });\n}","import { viewport } from \"../enums.js\";\nimport getViewportRect from \"./getViewportRect.js\";\nimport getDocumentRect from \"./getDocumentRect.js\";\nimport listScrollParents from \"./listScrollParents.js\";\nimport getOffsetParent from \"./getOffsetParent.js\";\nimport getDocumentElement from \"./getDocumentElement.js\";\nimport getComputedStyle from \"./getComputedStyle.js\";\nimport { isElement, isHTMLElement } from \"./instanceOf.js\";\nimport getBoundingClientRect from \"./getBoundingClientRect.js\";\nimport getParentNode from \"./getParentNode.js\";\nimport contains from \"./contains.js\";\nimport getNodeName from \"./getNodeName.js\";\nimport rectToClientRect from \"../utils/rectToClientRect.js\";\nimport { max, min } from \"../utils/math.js\";\n\nfunction getInnerBoundingClientRect(element, strategy) {\n var rect = getBoundingClientRect(element, false, strategy === 'fixed');\n rect.top = rect.top + element.clientTop;\n rect.left = rect.left + element.clientLeft;\n rect.bottom = rect.top + element.clientHeight;\n rect.right = rect.left + element.clientWidth;\n rect.width = element.clientWidth;\n rect.height = element.clientHeight;\n rect.x = rect.left;\n rect.y = rect.top;\n return rect;\n}\n\nfunction getClientRectFromMixedType(element, clippingParent, strategy) {\n return clippingParent === viewport ? rectToClientRect(getViewportRect(element, strategy)) : isElement(clippingParent) ? getInnerBoundingClientRect(clippingParent, strategy) : rectToClientRect(getDocumentRect(getDocumentElement(element)));\n} // A \"clipping parent\" is an overflowable container with the characteristic of\n// clipping (or hiding) overflowing elements with a position different from\n// `initial`\n\n\nfunction getClippingParents(element) {\n var clippingParents = listScrollParents(getParentNode(element));\n var canEscapeClipping = ['absolute', 'fixed'].indexOf(getComputedStyle(element).position) >= 0;\n var clipperElement = canEscapeClipping && isHTMLElement(element) ? getOffsetParent(element) : element;\n\n if (!isElement(clipperElement)) {\n return [];\n } // $FlowFixMe[incompatible-return]: https://github.com/facebook/flow/issues/1414\n\n\n return clippingParents.filter(function (clippingParent) {\n return isElement(clippingParent) && contains(clippingParent, clipperElement) && getNodeName(clippingParent) !== 'body';\n });\n} // Gets the maximum area that the element is visible in due to any number of\n// clipping parents\n\n\nexport default function getClippingRect(element, boundary, rootBoundary, strategy) {\n var mainClippingParents = boundary === 'clippingParents' ? getClippingParents(element) : [].concat(boundary);\n var clippingParents = [].concat(mainClippingParents, [rootBoundary]);\n var firstClippingParent = clippingParents[0];\n var clippingRect = clippingParents.reduce(function (accRect, clippingParent) {\n var rect = getClientRectFromMixedType(element, clippingParent, strategy);\n accRect.top = max(rect.top, accRect.top);\n accRect.right = min(rect.right, accRect.right);\n accRect.bottom = min(rect.bottom, accRect.bottom);\n accRect.left = max(rect.left, accRect.left);\n return accRect;\n }, getClientRectFromMixedType(element, firstClippingParent, strategy));\n clippingRect.width = clippingRect.right - clippingRect.left;\n clippingRect.height = clippingRect.bottom - clippingRect.top;\n clippingRect.x = clippingRect.left;\n clippingRect.y = clippingRect.top;\n return clippingRect;\n}","import getWindow from \"./getWindow.js\";\nimport getDocumentElement from \"./getDocumentElement.js\";\nimport getWindowScrollBarX from \"./getWindowScrollBarX.js\";\nimport isLayoutViewport from \"./isLayoutViewport.js\";\nexport default function getViewportRect(element, strategy) {\n var win = getWindow(element);\n var html = getDocumentElement(element);\n var visualViewport = win.visualViewport;\n var width = html.clientWidth;\n var height = html.clientHeight;\n var x = 0;\n var y = 0;\n\n if (visualViewport) {\n width = visualViewport.width;\n height = visualViewport.height;\n var layoutViewport = isLayoutViewport();\n\n if (layoutViewport || !layoutViewport && strategy === 'fixed') {\n x = visualViewport.offsetLeft;\n y = visualViewport.offsetTop;\n }\n }\n\n return {\n width: width,\n height: height,\n x: x + getWindowScrollBarX(element),\n y: y\n };\n}","import getDocumentElement from \"./getDocumentElement.js\";\nimport getComputedStyle from \"./getComputedStyle.js\";\nimport getWindowScrollBarX from \"./getWindowScrollBarX.js\";\nimport getWindowScroll from \"./getWindowScroll.js\";\nimport { max } from \"../utils/math.js\"; // Gets the entire size of the scrollable document area, even extending outside\n// of the `` and `` rect bounds if horizontally scrollable\n\nexport default function getDocumentRect(element) {\n var _element$ownerDocumen;\n\n var html = getDocumentElement(element);\n var winScroll = getWindowScroll(element);\n var body = (_element$ownerDocumen = element.ownerDocument) == null ? void 0 : _element$ownerDocumen.body;\n var width = max(html.scrollWidth, html.clientWidth, body ? body.scrollWidth : 0, body ? body.clientWidth : 0);\n var height = max(html.scrollHeight, html.clientHeight, body ? body.scrollHeight : 0, body ? body.clientHeight : 0);\n var x = -winScroll.scrollLeft + getWindowScrollBarX(element);\n var y = -winScroll.scrollTop;\n\n if (getComputedStyle(body || html).direction === 'rtl') {\n x += max(html.clientWidth, body ? body.clientWidth : 0) - width;\n }\n\n return {\n width: width,\n height: height,\n x: x,\n y: y\n };\n}","import getBasePlacement from \"./getBasePlacement.js\";\nimport getVariation from \"./getVariation.js\";\nimport getMainAxisFromPlacement from \"./getMainAxisFromPlacement.js\";\nimport { top, right, bottom, left, start, end } from \"../enums.js\";\nexport default function computeOffsets(_ref) {\n var reference = _ref.reference,\n element = _ref.element,\n placement = _ref.placement;\n var basePlacement = placement ? getBasePlacement(placement) : null;\n var variation = placement ? getVariation(placement) : null;\n var commonX = reference.x + reference.width / 2 - element.width / 2;\n var commonY = reference.y + reference.height / 2 - element.height / 2;\n var offsets;\n\n switch (basePlacement) {\n case top:\n offsets = {\n x: commonX,\n y: reference.y - element.height\n };\n break;\n\n case bottom:\n offsets = {\n x: commonX,\n y: reference.y + reference.height\n };\n break;\n\n case right:\n offsets = {\n x: reference.x + reference.width,\n y: commonY\n };\n break;\n\n case left:\n offsets = {\n x: reference.x - element.width,\n y: commonY\n };\n break;\n\n default:\n offsets = {\n x: reference.x,\n y: reference.y\n };\n }\n\n var mainAxis = basePlacement ? getMainAxisFromPlacement(basePlacement) : null;\n\n if (mainAxis != null) {\n var len = mainAxis === 'y' ? 'height' : 'width';\n\n switch (variation) {\n case start:\n offsets[mainAxis] = offsets[mainAxis] - (reference[len] / 2 - element[len] / 2);\n break;\n\n case end:\n offsets[mainAxis] = offsets[mainAxis] + (reference[len] / 2 - element[len] / 2);\n break;\n\n default:\n }\n }\n\n return offsets;\n}","import getClippingRect from \"../dom-utils/getClippingRect.js\";\nimport getDocumentElement from \"../dom-utils/getDocumentElement.js\";\nimport getBoundingClientRect from \"../dom-utils/getBoundingClientRect.js\";\nimport computeOffsets from \"./computeOffsets.js\";\nimport rectToClientRect from \"./rectToClientRect.js\";\nimport { clippingParents, reference, popper, bottom, top, right, basePlacements, viewport } from \"../enums.js\";\nimport { isElement } from \"../dom-utils/instanceOf.js\";\nimport mergePaddingObject from \"./mergePaddingObject.js\";\nimport expandToHashMap from \"./expandToHashMap.js\"; // eslint-disable-next-line import/no-unused-modules\n\nexport default function detectOverflow(state, options) {\n if (options === void 0) {\n options = {};\n }\n\n var _options = options,\n _options$placement = _options.placement,\n placement = _options$placement === void 0 ? state.placement : _options$placement,\n _options$strategy = _options.strategy,\n strategy = _options$strategy === void 0 ? state.strategy : _options$strategy,\n _options$boundary = _options.boundary,\n boundary = _options$boundary === void 0 ? clippingParents : _options$boundary,\n _options$rootBoundary = _options.rootBoundary,\n rootBoundary = _options$rootBoundary === void 0 ? viewport : _options$rootBoundary,\n _options$elementConte = _options.elementContext,\n elementContext = _options$elementConte === void 0 ? popper : _options$elementConte,\n _options$altBoundary = _options.altBoundary,\n altBoundary = _options$altBoundary === void 0 ? false : _options$altBoundary,\n _options$padding = _options.padding,\n padding = _options$padding === void 0 ? 0 : _options$padding;\n var paddingObject = mergePaddingObject(typeof padding !== 'number' ? padding : expandToHashMap(padding, basePlacements));\n var altContext = elementContext === popper ? reference : popper;\n var popperRect = state.rects.popper;\n var element = state.elements[altBoundary ? altContext : elementContext];\n var clippingClientRect = getClippingRect(isElement(element) ? element : element.contextElement || getDocumentElement(state.elements.popper), boundary, rootBoundary, strategy);\n var referenceClientRect = getBoundingClientRect(state.elements.reference);\n var popperOffsets = computeOffsets({\n reference: referenceClientRect,\n element: popperRect,\n strategy: 'absolute',\n placement: placement\n });\n var popperClientRect = rectToClientRect(Object.assign({}, popperRect, popperOffsets));\n var elementClientRect = elementContext === popper ? popperClientRect : referenceClientRect; // positive = overflowing the clipping rect\n // 0 or negative = within the clipping rect\n\n var overflowOffsets = {\n top: clippingClientRect.top - elementClientRect.top + paddingObject.top,\n bottom: elementClientRect.bottom - clippingClientRect.bottom + paddingObject.bottom,\n left: clippingClientRect.left - elementClientRect.left + paddingObject.left,\n right: elementClientRect.right - clippingClientRect.right + paddingObject.right\n };\n var offsetData = state.modifiersData.offset; // Offsets can be applied only to the popper element\n\n if (elementContext === popper && offsetData) {\n var offset = offsetData[placement];\n Object.keys(overflowOffsets).forEach(function (key) {\n var multiply = [right, bottom].indexOf(key) >= 0 ? 1 : -1;\n var axis = [top, bottom].indexOf(key) >= 0 ? 'y' : 'x';\n overflowOffsets[key] += offset[axis] * multiply;\n });\n }\n\n return overflowOffsets;\n}","import getOppositePlacement from \"../utils/getOppositePlacement.js\";\nimport getBasePlacement from \"../utils/getBasePlacement.js\";\nimport getOppositeVariationPlacement from \"../utils/getOppositeVariationPlacement.js\";\nimport detectOverflow from \"../utils/detectOverflow.js\";\nimport computeAutoPlacement from \"../utils/computeAutoPlacement.js\";\nimport { bottom, top, start, right, left, auto } from \"../enums.js\";\nimport getVariation from \"../utils/getVariation.js\"; // eslint-disable-next-line import/no-unused-modules\n\nfunction getExpandedFallbackPlacements(placement) {\n if (getBasePlacement(placement) === auto) {\n return [];\n }\n\n var oppositePlacement = getOppositePlacement(placement);\n return [getOppositeVariationPlacement(placement), oppositePlacement, getOppositeVariationPlacement(oppositePlacement)];\n}\n\nfunction flip(_ref) {\n var state = _ref.state,\n options = _ref.options,\n name = _ref.name;\n\n if (state.modifiersData[name]._skip) {\n return;\n }\n\n var _options$mainAxis = options.mainAxis,\n checkMainAxis = _options$mainAxis === void 0 ? true : _options$mainAxis,\n _options$altAxis = options.altAxis,\n checkAltAxis = _options$altAxis === void 0 ? true : _options$altAxis,\n specifiedFallbackPlacements = options.fallbackPlacements,\n padding = options.padding,\n boundary = options.boundary,\n rootBoundary = options.rootBoundary,\n altBoundary = options.altBoundary,\n _options$flipVariatio = options.flipVariations,\n flipVariations = _options$flipVariatio === void 0 ? true : _options$flipVariatio,\n allowedAutoPlacements = options.allowedAutoPlacements;\n var preferredPlacement = state.options.placement;\n var basePlacement = getBasePlacement(preferredPlacement);\n var isBasePlacement = basePlacement === preferredPlacement;\n var fallbackPlacements = specifiedFallbackPlacements || (isBasePlacement || !flipVariations ? [getOppositePlacement(preferredPlacement)] : getExpandedFallbackPlacements(preferredPlacement));\n var placements = [preferredPlacement].concat(fallbackPlacements).reduce(function (acc, placement) {\n return acc.concat(getBasePlacement(placement) === auto ? computeAutoPlacement(state, {\n placement: placement,\n boundary: boundary,\n rootBoundary: rootBoundary,\n padding: padding,\n flipVariations: flipVariations,\n allowedAutoPlacements: allowedAutoPlacements\n }) : placement);\n }, []);\n var referenceRect = state.rects.reference;\n var popperRect = state.rects.popper;\n var checksMap = new Map();\n var makeFallbackChecks = true;\n var firstFittingPlacement = placements[0];\n\n for (var i = 0; i < placements.length; i++) {\n var placement = placements[i];\n\n var _basePlacement = getBasePlacement(placement);\n\n var isStartVariation = getVariation(placement) === start;\n var isVertical = [top, bottom].indexOf(_basePlacement) >= 0;\n var len = isVertical ? 'width' : 'height';\n var overflow = detectOverflow(state, {\n placement: placement,\n boundary: boundary,\n rootBoundary: rootBoundary,\n altBoundary: altBoundary,\n padding: padding\n });\n var mainVariationSide = isVertical ? isStartVariation ? right : left : isStartVariation ? bottom : top;\n\n if (referenceRect[len] > popperRect[len]) {\n mainVariationSide = getOppositePlacement(mainVariationSide);\n }\n\n var altVariationSide = getOppositePlacement(mainVariationSide);\n var checks = [];\n\n if (checkMainAxis) {\n checks.push(overflow[_basePlacement] <= 0);\n }\n\n if (checkAltAxis) {\n checks.push(overflow[mainVariationSide] <= 0, overflow[altVariationSide] <= 0);\n }\n\n if (checks.every(function (check) {\n return check;\n })) {\n firstFittingPlacement = placement;\n makeFallbackChecks = false;\n break;\n }\n\n checksMap.set(placement, checks);\n }\n\n if (makeFallbackChecks) {\n // `2` may be desired in some cases – research later\n var numberOfChecks = flipVariations ? 3 : 1;\n\n var _loop = function _loop(_i) {\n var fittingPlacement = placements.find(function (placement) {\n var checks = checksMap.get(placement);\n\n if (checks) {\n return checks.slice(0, _i).every(function (check) {\n return check;\n });\n }\n });\n\n if (fittingPlacement) {\n firstFittingPlacement = fittingPlacement;\n return \"break\";\n }\n };\n\n for (var _i = numberOfChecks; _i > 0; _i--) {\n var _ret = _loop(_i);\n\n if (_ret === \"break\") break;\n }\n }\n\n if (state.placement !== firstFittingPlacement) {\n state.modifiersData[name]._skip = true;\n state.placement = firstFittingPlacement;\n state.reset = true;\n }\n} // eslint-disable-next-line import/no-unused-modules\n\n\nexport default {\n name: 'flip',\n enabled: true,\n phase: 'main',\n fn: flip,\n requiresIfExists: ['offset'],\n data: {\n _skip: false\n }\n};","import getVariation from \"./getVariation.js\";\nimport { variationPlacements, basePlacements, placements as allPlacements } from \"../enums.js\";\nimport detectOverflow from \"./detectOverflow.js\";\nimport getBasePlacement from \"./getBasePlacement.js\";\nexport default function computeAutoPlacement(state, options) {\n if (options === void 0) {\n options = {};\n }\n\n var _options = options,\n placement = _options.placement,\n boundary = _options.boundary,\n rootBoundary = _options.rootBoundary,\n padding = _options.padding,\n flipVariations = _options.flipVariations,\n _options$allowedAutoP = _options.allowedAutoPlacements,\n allowedAutoPlacements = _options$allowedAutoP === void 0 ? allPlacements : _options$allowedAutoP;\n var variation = getVariation(placement);\n var placements = variation ? flipVariations ? variationPlacements : variationPlacements.filter(function (placement) {\n return getVariation(placement) === variation;\n }) : basePlacements;\n var allowedPlacements = placements.filter(function (placement) {\n return allowedAutoPlacements.indexOf(placement) >= 0;\n });\n\n if (allowedPlacements.length === 0) {\n allowedPlacements = placements;\n } // $FlowFixMe[incompatible-type]: Flow seems to have problems with two array unions...\n\n\n var overflows = allowedPlacements.reduce(function (acc, placement) {\n acc[placement] = detectOverflow(state, {\n placement: placement,\n boundary: boundary,\n rootBoundary: rootBoundary,\n padding: padding\n })[getBasePlacement(placement)];\n return acc;\n }, {});\n return Object.keys(overflows).sort(function (a, b) {\n return overflows[a] - overflows[b];\n });\n}","import { top, bottom, left, right } from \"../enums.js\";\nimport detectOverflow from \"../utils/detectOverflow.js\";\n\nfunction getSideOffsets(overflow, rect, preventedOffsets) {\n if (preventedOffsets === void 0) {\n preventedOffsets = {\n x: 0,\n y: 0\n };\n }\n\n return {\n top: overflow.top - rect.height - preventedOffsets.y,\n right: overflow.right - rect.width + preventedOffsets.x,\n bottom: overflow.bottom - rect.height + preventedOffsets.y,\n left: overflow.left - rect.width - preventedOffsets.x\n };\n}\n\nfunction isAnySideFullyClipped(overflow) {\n return [top, right, bottom, left].some(function (side) {\n return overflow[side] >= 0;\n });\n}\n\nfunction hide(_ref) {\n var state = _ref.state,\n name = _ref.name;\n var referenceRect = state.rects.reference;\n var popperRect = state.rects.popper;\n var preventedOffsets = state.modifiersData.preventOverflow;\n var referenceOverflow = detectOverflow(state, {\n elementContext: 'reference'\n });\n var popperAltOverflow = detectOverflow(state, {\n altBoundary: true\n });\n var referenceClippingOffsets = getSideOffsets(referenceOverflow, referenceRect);\n var popperEscapeOffsets = getSideOffsets(popperAltOverflow, popperRect, preventedOffsets);\n var isReferenceHidden = isAnySideFullyClipped(referenceClippingOffsets);\n var hasPopperEscaped = isAnySideFullyClipped(popperEscapeOffsets);\n state.modifiersData[name] = {\n referenceClippingOffsets: referenceClippingOffsets,\n popperEscapeOffsets: popperEscapeOffsets,\n isReferenceHidden: isReferenceHidden,\n hasPopperEscaped: hasPopperEscaped\n };\n state.attributes.popper = Object.assign({}, state.attributes.popper, {\n 'data-popper-reference-hidden': isReferenceHidden,\n 'data-popper-escaped': hasPopperEscaped\n });\n} // eslint-disable-next-line import/no-unused-modules\n\n\nexport default {\n name: 'hide',\n enabled: true,\n phase: 'main',\n requiresIfExists: ['preventOverflow'],\n fn: hide\n};","import getBasePlacement from \"../utils/getBasePlacement.js\";\nimport { top, left, right, placements } from \"../enums.js\"; // eslint-disable-next-line import/no-unused-modules\n\nexport function distanceAndSkiddingToXY(placement, rects, offset) {\n var basePlacement = getBasePlacement(placement);\n var invertDistance = [left, top].indexOf(basePlacement) >= 0 ? -1 : 1;\n\n var _ref = typeof offset === 'function' ? offset(Object.assign({}, rects, {\n placement: placement\n })) : offset,\n skidding = _ref[0],\n distance = _ref[1];\n\n skidding = skidding || 0;\n distance = (distance || 0) * invertDistance;\n return [left, right].indexOf(basePlacement) >= 0 ? {\n x: distance,\n y: skidding\n } : {\n x: skidding,\n y: distance\n };\n}\n\nfunction offset(_ref2) {\n var state = _ref2.state,\n options = _ref2.options,\n name = _ref2.name;\n var _options$offset = options.offset,\n offset = _options$offset === void 0 ? [0, 0] : _options$offset;\n var data = placements.reduce(function (acc, placement) {\n acc[placement] = distanceAndSkiddingToXY(placement, state.rects, offset);\n return acc;\n }, {});\n var _data$state$placement = data[state.placement],\n x = _data$state$placement.x,\n y = _data$state$placement.y;\n\n if (state.modifiersData.popperOffsets != null) {\n state.modifiersData.popperOffsets.x += x;\n state.modifiersData.popperOffsets.y += y;\n }\n\n state.modifiersData[name] = data;\n} // eslint-disable-next-line import/no-unused-modules\n\n\nexport default {\n name: 'offset',\n enabled: true,\n phase: 'main',\n requires: ['popperOffsets'],\n fn: offset\n};","import computeOffsets from \"../utils/computeOffsets.js\";\n\nfunction popperOffsets(_ref) {\n var state = _ref.state,\n name = _ref.name;\n // Offsets are the actual position the popper needs to have to be\n // properly positioned near its reference element\n // This is the most basic placement, and will be adjusted by\n // the modifiers in the next step\n state.modifiersData[name] = computeOffsets({\n reference: state.rects.reference,\n element: state.rects.popper,\n strategy: 'absolute',\n placement: state.placement\n });\n} // eslint-disable-next-line import/no-unused-modules\n\n\nexport default {\n name: 'popperOffsets',\n enabled: true,\n phase: 'read',\n fn: popperOffsets,\n data: {}\n};","import { top, left, right, bottom, start } from \"../enums.js\";\nimport getBasePlacement from \"../utils/getBasePlacement.js\";\nimport getMainAxisFromPlacement from \"../utils/getMainAxisFromPlacement.js\";\nimport getAltAxis from \"../utils/getAltAxis.js\";\nimport { within, withinMaxClamp } from \"../utils/within.js\";\nimport getLayoutRect from \"../dom-utils/getLayoutRect.js\";\nimport getOffsetParent from \"../dom-utils/getOffsetParent.js\";\nimport detectOverflow from \"../utils/detectOverflow.js\";\nimport getVariation from \"../utils/getVariation.js\";\nimport getFreshSideObject from \"../utils/getFreshSideObject.js\";\nimport { min as mathMin, max as mathMax } from \"../utils/math.js\";\n\nfunction preventOverflow(_ref) {\n var state = _ref.state,\n options = _ref.options,\n name = _ref.name;\n var _options$mainAxis = options.mainAxis,\n checkMainAxis = _options$mainAxis === void 0 ? true : _options$mainAxis,\n _options$altAxis = options.altAxis,\n checkAltAxis = _options$altAxis === void 0 ? false : _options$altAxis,\n boundary = options.boundary,\n rootBoundary = options.rootBoundary,\n altBoundary = options.altBoundary,\n padding = options.padding,\n _options$tether = options.tether,\n tether = _options$tether === void 0 ? true : _options$tether,\n _options$tetherOffset = options.tetherOffset,\n tetherOffset = _options$tetherOffset === void 0 ? 0 : _options$tetherOffset;\n var overflow = detectOverflow(state, {\n boundary: boundary,\n rootBoundary: rootBoundary,\n padding: padding,\n altBoundary: altBoundary\n });\n var basePlacement = getBasePlacement(state.placement);\n var variation = getVariation(state.placement);\n var isBasePlacement = !variation;\n var mainAxis = getMainAxisFromPlacement(basePlacement);\n var altAxis = getAltAxis(mainAxis);\n var popperOffsets = state.modifiersData.popperOffsets;\n var referenceRect = state.rects.reference;\n var popperRect = state.rects.popper;\n var tetherOffsetValue = typeof tetherOffset === 'function' ? tetherOffset(Object.assign({}, state.rects, {\n placement: state.placement\n })) : tetherOffset;\n var normalizedTetherOffsetValue = typeof tetherOffsetValue === 'number' ? {\n mainAxis: tetherOffsetValue,\n altAxis: tetherOffsetValue\n } : Object.assign({\n mainAxis: 0,\n altAxis: 0\n }, tetherOffsetValue);\n var offsetModifierState = state.modifiersData.offset ? state.modifiersData.offset[state.placement] : null;\n var data = {\n x: 0,\n y: 0\n };\n\n if (!popperOffsets) {\n return;\n }\n\n if (checkMainAxis) {\n var _offsetModifierState$;\n\n var mainSide = mainAxis === 'y' ? top : left;\n var altSide = mainAxis === 'y' ? bottom : right;\n var len = mainAxis === 'y' ? 'height' : 'width';\n var offset = popperOffsets[mainAxis];\n var min = offset + overflow[mainSide];\n var max = offset - overflow[altSide];\n var additive = tether ? -popperRect[len] / 2 : 0;\n var minLen = variation === start ? referenceRect[len] : popperRect[len];\n var maxLen = variation === start ? -popperRect[len] : -referenceRect[len]; // We need to include the arrow in the calculation so the arrow doesn't go\n // outside the reference bounds\n\n var arrowElement = state.elements.arrow;\n var arrowRect = tether && arrowElement ? getLayoutRect(arrowElement) : {\n width: 0,\n height: 0\n };\n var arrowPaddingObject = state.modifiersData['arrow#persistent'] ? state.modifiersData['arrow#persistent'].padding : getFreshSideObject();\n var arrowPaddingMin = arrowPaddingObject[mainSide];\n var arrowPaddingMax = arrowPaddingObject[altSide]; // If the reference length is smaller than the arrow length, we don't want\n // to include its full size in the calculation. If the reference is small\n // and near the edge of a boundary, the popper can overflow even if the\n // reference is not overflowing as well (e.g. virtual elements with no\n // width or height)\n\n var arrowLen = within(0, referenceRect[len], arrowRect[len]);\n var minOffset = isBasePlacement ? referenceRect[len] / 2 - additive - arrowLen - arrowPaddingMin - normalizedTetherOffsetValue.mainAxis : minLen - arrowLen - arrowPaddingMin - normalizedTetherOffsetValue.mainAxis;\n var maxOffset = isBasePlacement ? -referenceRect[len] / 2 + additive + arrowLen + arrowPaddingMax + normalizedTetherOffsetValue.mainAxis : maxLen + arrowLen + arrowPaddingMax + normalizedTetherOffsetValue.mainAxis;\n var arrowOffsetParent = state.elements.arrow && getOffsetParent(state.elements.arrow);\n var clientOffset = arrowOffsetParent ? mainAxis === 'y' ? arrowOffsetParent.clientTop || 0 : arrowOffsetParent.clientLeft || 0 : 0;\n var offsetModifierValue = (_offsetModifierState$ = offsetModifierState == null ? void 0 : offsetModifierState[mainAxis]) != null ? _offsetModifierState$ : 0;\n var tetherMin = offset + minOffset - offsetModifierValue - clientOffset;\n var tetherMax = offset + maxOffset - offsetModifierValue;\n var preventedOffset = within(tether ? mathMin(min, tetherMin) : min, offset, tether ? mathMax(max, tetherMax) : max);\n popperOffsets[mainAxis] = preventedOffset;\n data[mainAxis] = preventedOffset - offset;\n }\n\n if (checkAltAxis) {\n var _offsetModifierState$2;\n\n var _mainSide = mainAxis === 'x' ? top : left;\n\n var _altSide = mainAxis === 'x' ? bottom : right;\n\n var _offset = popperOffsets[altAxis];\n\n var _len = altAxis === 'y' ? 'height' : 'width';\n\n var _min = _offset + overflow[_mainSide];\n\n var _max = _offset - overflow[_altSide];\n\n var isOriginSide = [top, left].indexOf(basePlacement) !== -1;\n\n var _offsetModifierValue = (_offsetModifierState$2 = offsetModifierState == null ? void 0 : offsetModifierState[altAxis]) != null ? _offsetModifierState$2 : 0;\n\n var _tetherMin = isOriginSide ? _min : _offset - referenceRect[_len] - popperRect[_len] - _offsetModifierValue + normalizedTetherOffsetValue.altAxis;\n\n var _tetherMax = isOriginSide ? _offset + referenceRect[_len] + popperRect[_len] - _offsetModifierValue - normalizedTetherOffsetValue.altAxis : _max;\n\n var _preventedOffset = tether && isOriginSide ? withinMaxClamp(_tetherMin, _offset, _tetherMax) : within(tether ? _tetherMin : _min, _offset, tether ? _tetherMax : _max);\n\n popperOffsets[altAxis] = _preventedOffset;\n data[altAxis] = _preventedOffset - _offset;\n }\n\n state.modifiersData[name] = data;\n} // eslint-disable-next-line import/no-unused-modules\n\n\nexport default {\n name: 'preventOverflow',\n enabled: true,\n phase: 'main',\n fn: preventOverflow,\n requiresIfExists: ['offset']\n};","export default function getAltAxis(axis) {\n return axis === 'x' ? 'y' : 'x';\n}","import getBoundingClientRect from \"./getBoundingClientRect.js\";\nimport getNodeScroll from \"./getNodeScroll.js\";\nimport getNodeName from \"./getNodeName.js\";\nimport { isHTMLElement } from \"./instanceOf.js\";\nimport getWindowScrollBarX from \"./getWindowScrollBarX.js\";\nimport getDocumentElement from \"./getDocumentElement.js\";\nimport isScrollParent from \"./isScrollParent.js\";\nimport { round } from \"../utils/math.js\";\n\nfunction isElementScaled(element) {\n var rect = element.getBoundingClientRect();\n var scaleX = round(rect.width) / element.offsetWidth || 1;\n var scaleY = round(rect.height) / element.offsetHeight || 1;\n return scaleX !== 1 || scaleY !== 1;\n} // Returns the composite rect of an element relative to its offsetParent.\n// Composite means it takes into account transforms as well as layout.\n\n\nexport default function getCompositeRect(elementOrVirtualElement, offsetParent, isFixed) {\n if (isFixed === void 0) {\n isFixed = false;\n }\n\n var isOffsetParentAnElement = isHTMLElement(offsetParent);\n var offsetParentIsScaled = isHTMLElement(offsetParent) && isElementScaled(offsetParent);\n var documentElement = getDocumentElement(offsetParent);\n var rect = getBoundingClientRect(elementOrVirtualElement, offsetParentIsScaled, isFixed);\n var scroll = {\n scrollLeft: 0,\n scrollTop: 0\n };\n var offsets = {\n x: 0,\n y: 0\n };\n\n if (isOffsetParentAnElement || !isOffsetParentAnElement && !isFixed) {\n if (getNodeName(offsetParent) !== 'body' || // https://github.com/popperjs/popper-core/issues/1078\n isScrollParent(documentElement)) {\n scroll = getNodeScroll(offsetParent);\n }\n\n if (isHTMLElement(offsetParent)) {\n offsets = getBoundingClientRect(offsetParent, true);\n offsets.x += offsetParent.clientLeft;\n offsets.y += offsetParent.clientTop;\n } else if (documentElement) {\n offsets.x = getWindowScrollBarX(documentElement);\n }\n }\n\n return {\n x: rect.left + scroll.scrollLeft - offsets.x,\n y: rect.top + scroll.scrollTop - offsets.y,\n width: rect.width,\n height: rect.height\n };\n}","import getWindowScroll from \"./getWindowScroll.js\";\nimport getWindow from \"./getWindow.js\";\nimport { isHTMLElement } from \"./instanceOf.js\";\nimport getHTMLElementScroll from \"./getHTMLElementScroll.js\";\nexport default function getNodeScroll(node) {\n if (node === getWindow(node) || !isHTMLElement(node)) {\n return getWindowScroll(node);\n } else {\n return getHTMLElementScroll(node);\n }\n}","export default function getHTMLElementScroll(element) {\n return {\n scrollLeft: element.scrollLeft,\n scrollTop: element.scrollTop\n };\n}","import { modifierPhases } from \"../enums.js\"; // source: https://stackoverflow.com/questions/49875255\n\nfunction order(modifiers) {\n var map = new Map();\n var visited = new Set();\n var result = [];\n modifiers.forEach(function (modifier) {\n map.set(modifier.name, modifier);\n }); // On visiting object, check for its dependencies and visit them recursively\n\n function sort(modifier) {\n visited.add(modifier.name);\n var requires = [].concat(modifier.requires || [], modifier.requiresIfExists || []);\n requires.forEach(function (dep) {\n if (!visited.has(dep)) {\n var depModifier = map.get(dep);\n\n if (depModifier) {\n sort(depModifier);\n }\n }\n });\n result.push(modifier);\n }\n\n modifiers.forEach(function (modifier) {\n if (!visited.has(modifier.name)) {\n // check for visited object\n sort(modifier);\n }\n });\n return result;\n}\n\nexport default function orderModifiers(modifiers) {\n // order based on dependencies\n var orderedModifiers = order(modifiers); // order based on phase\n\n return modifierPhases.reduce(function (acc, phase) {\n return acc.concat(orderedModifiers.filter(function (modifier) {\n return modifier.phase === phase;\n }));\n }, []);\n}","import getCompositeRect from \"./dom-utils/getCompositeRect.js\";\nimport getLayoutRect from \"./dom-utils/getLayoutRect.js\";\nimport listScrollParents from \"./dom-utils/listScrollParents.js\";\nimport getOffsetParent from \"./dom-utils/getOffsetParent.js\";\nimport orderModifiers from \"./utils/orderModifiers.js\";\nimport debounce from \"./utils/debounce.js\";\nimport mergeByName from \"./utils/mergeByName.js\";\nimport detectOverflow from \"./utils/detectOverflow.js\";\nimport { isElement } from \"./dom-utils/instanceOf.js\";\nvar DEFAULT_OPTIONS = {\n placement: 'bottom',\n modifiers: [],\n strategy: 'absolute'\n};\n\nfunction areValidElements() {\n for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) {\n args[_key] = arguments[_key];\n }\n\n return !args.some(function (element) {\n return !(element && typeof element.getBoundingClientRect === 'function');\n });\n}\n\nexport function popperGenerator(generatorOptions) {\n if (generatorOptions === void 0) {\n generatorOptions = {};\n }\n\n var _generatorOptions = generatorOptions,\n _generatorOptions$def = _generatorOptions.defaultModifiers,\n defaultModifiers = _generatorOptions$def === void 0 ? [] : _generatorOptions$def,\n _generatorOptions$def2 = _generatorOptions.defaultOptions,\n defaultOptions = _generatorOptions$def2 === void 0 ? DEFAULT_OPTIONS : _generatorOptions$def2;\n return function createPopper(reference, popper, options) {\n if (options === void 0) {\n options = defaultOptions;\n }\n\n var state = {\n placement: 'bottom',\n orderedModifiers: [],\n options: Object.assign({}, DEFAULT_OPTIONS, defaultOptions),\n modifiersData: {},\n elements: {\n reference: reference,\n popper: popper\n },\n attributes: {},\n styles: {}\n };\n var effectCleanupFns = [];\n var isDestroyed = false;\n var instance = {\n state: state,\n setOptions: function setOptions(setOptionsAction) {\n var options = typeof setOptionsAction === 'function' ? setOptionsAction(state.options) : setOptionsAction;\n cleanupModifierEffects();\n state.options = Object.assign({}, defaultOptions, state.options, options);\n state.scrollParents = {\n reference: isElement(reference) ? listScrollParents(reference) : reference.contextElement ? listScrollParents(reference.contextElement) : [],\n popper: listScrollParents(popper)\n }; // Orders the modifiers based on their dependencies and `phase`\n // properties\n\n var orderedModifiers = orderModifiers(mergeByName([].concat(defaultModifiers, state.options.modifiers))); // Strip out disabled modifiers\n\n state.orderedModifiers = orderedModifiers.filter(function (m) {\n return m.enabled;\n });\n runModifierEffects();\n return instance.update();\n },\n // Sync update – it will always be executed, even if not necessary. This\n // is useful for low frequency updates where sync behavior simplifies the\n // logic.\n // For high frequency updates (e.g. `resize` and `scroll` events), always\n // prefer the async Popper#update method\n forceUpdate: function forceUpdate() {\n if (isDestroyed) {\n return;\n }\n\n var _state$elements = state.elements,\n reference = _state$elements.reference,\n popper = _state$elements.popper; // Don't proceed if `reference` or `popper` are not valid elements\n // anymore\n\n if (!areValidElements(reference, popper)) {\n return;\n } // Store the reference and popper rects to be read by modifiers\n\n\n state.rects = {\n reference: getCompositeRect(reference, getOffsetParent(popper), state.options.strategy === 'fixed'),\n popper: getLayoutRect(popper)\n }; // Modifiers have the ability to reset the current update cycle. The\n // most common use case for this is the `flip` modifier changing the\n // placement, which then needs to re-run all the modifiers, because the\n // logic was previously ran for the previous placement and is therefore\n // stale/incorrect\n\n state.reset = false;\n state.placement = state.options.placement; // On each update cycle, the `modifiersData` property for each modifier\n // is filled with the initial data specified by the modifier. This means\n // it doesn't persist and is fresh on each update.\n // To ensure persistent data, use `${name}#persistent`\n\n state.orderedModifiers.forEach(function (modifier) {\n return state.modifiersData[modifier.name] = Object.assign({}, modifier.data);\n });\n\n for (var index = 0; index < state.orderedModifiers.length; index++) {\n if (state.reset === true) {\n state.reset = false;\n index = -1;\n continue;\n }\n\n var _state$orderedModifie = state.orderedModifiers[index],\n fn = _state$orderedModifie.fn,\n _state$orderedModifie2 = _state$orderedModifie.options,\n _options = _state$orderedModifie2 === void 0 ? {} : _state$orderedModifie2,\n name = _state$orderedModifie.name;\n\n if (typeof fn === 'function') {\n state = fn({\n state: state,\n options: _options,\n name: name,\n instance: instance\n }) || state;\n }\n }\n },\n // Async and optimistically optimized update – it will not be executed if\n // not necessary (debounced to run at most once-per-tick)\n update: debounce(function () {\n return new Promise(function (resolve) {\n instance.forceUpdate();\n resolve(state);\n });\n }),\n destroy: function destroy() {\n cleanupModifierEffects();\n isDestroyed = true;\n }\n };\n\n if (!areValidElements(reference, popper)) {\n return instance;\n }\n\n instance.setOptions(options).then(function (state) {\n if (!isDestroyed && options.onFirstUpdate) {\n options.onFirstUpdate(state);\n }\n }); // Modifiers have the ability to execute arbitrary code before the first\n // update cycle runs. They will be executed in the same order as the update\n // cycle. This is useful when a modifier adds some persistent data that\n // other modifiers need to use, but the modifier is run after the dependent\n // one.\n\n function runModifierEffects() {\n state.orderedModifiers.forEach(function (_ref) {\n var name = _ref.name,\n _ref$options = _ref.options,\n options = _ref$options === void 0 ? {} : _ref$options,\n effect = _ref.effect;\n\n if (typeof effect === 'function') {\n var cleanupFn = effect({\n state: state,\n name: name,\n instance: instance,\n options: options\n });\n\n var noopFn = function noopFn() {};\n\n effectCleanupFns.push(cleanupFn || noopFn);\n }\n });\n }\n\n function cleanupModifierEffects() {\n effectCleanupFns.forEach(function (fn) {\n return fn();\n });\n effectCleanupFns = [];\n }\n\n return instance;\n };\n}\nexport var createPopper = /*#__PURE__*/popperGenerator(); // eslint-disable-next-line import/no-unused-modules\n\nexport { detectOverflow };","export default function debounce(fn) {\n var pending;\n return function () {\n if (!pending) {\n pending = new Promise(function (resolve) {\n Promise.resolve().then(function () {\n pending = undefined;\n resolve(fn());\n });\n });\n }\n\n return pending;\n };\n}","export default function mergeByName(modifiers) {\n var merged = modifiers.reduce(function (merged, current) {\n var existing = merged[current.name];\n merged[current.name] = existing ? Object.assign({}, existing, current, {\n options: Object.assign({}, existing.options, current.options),\n data: Object.assign({}, existing.data, current.data)\n }) : current;\n return merged;\n }, {}); // IE11 does not support Object.values\n\n return Object.keys(merged).map(function (key) {\n return merged[key];\n });\n}","import { popperGenerator, detectOverflow } from \"./createPopper.js\";\nimport eventListeners from \"./modifiers/eventListeners.js\";\nimport popperOffsets from \"./modifiers/popperOffsets.js\";\nimport computeStyles from \"./modifiers/computeStyles.js\";\nimport applyStyles from \"./modifiers/applyStyles.js\";\nimport offset from \"./modifiers/offset.js\";\nimport flip from \"./modifiers/flip.js\";\nimport preventOverflow from \"./modifiers/preventOverflow.js\";\nimport arrow from \"./modifiers/arrow.js\";\nimport hide from \"./modifiers/hide.js\";\nvar defaultModifiers = [eventListeners, popperOffsets, computeStyles, applyStyles, offset, flip, preventOverflow, arrow, hide];\nvar createPopper = /*#__PURE__*/popperGenerator({\n defaultModifiers: defaultModifiers\n}); // eslint-disable-next-line import/no-unused-modules\n\nexport { createPopper, popperGenerator, defaultModifiers, detectOverflow }; // eslint-disable-next-line import/no-unused-modules\n\nexport { createPopper as createPopperLite } from \"./popper-lite.js\"; // eslint-disable-next-line import/no-unused-modules\n\nexport * from \"./modifiers/index.js\";","import { popperGenerator, detectOverflow } from \"./createPopper.js\";\nimport eventListeners from \"./modifiers/eventListeners.js\";\nimport popperOffsets from \"./modifiers/popperOffsets.js\";\nimport computeStyles from \"./modifiers/computeStyles.js\";\nimport applyStyles from \"./modifiers/applyStyles.js\";\nvar defaultModifiers = [eventListeners, popperOffsets, computeStyles, applyStyles];\nvar createPopper = /*#__PURE__*/popperGenerator({\n defaultModifiers: defaultModifiers\n}); // eslint-disable-next-line import/no-unused-modules\n\nexport { createPopper, popperGenerator, defaultModifiers, detectOverflow };","/*!\n * Bootstrap v5.3.3 (https://getbootstrap.com/)\n * Copyright 2011-2024 The Bootstrap Authors (https://github.com/twbs/bootstrap/graphs/contributors)\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n */\nimport * as Popper from '@popperjs/core';\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap dom/data.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n/**\n * Constants\n */\n\nconst elementMap = new Map();\nconst Data = {\n set(element, key, instance) {\n if (!elementMap.has(element)) {\n elementMap.set(element, new Map());\n }\n const instanceMap = elementMap.get(element);\n\n // make it clear we only want one instance per element\n // can be removed later when multiple key/instances are fine to be used\n if (!instanceMap.has(key) && instanceMap.size !== 0) {\n // eslint-disable-next-line no-console\n console.error(`Bootstrap doesn't allow more than one instance per element. Bound instance: ${Array.from(instanceMap.keys())[0]}.`);\n return;\n }\n instanceMap.set(key, instance);\n },\n get(element, key) {\n if (elementMap.has(element)) {\n return elementMap.get(element).get(key) || null;\n }\n return null;\n },\n remove(element, key) {\n if (!elementMap.has(element)) {\n return;\n }\n const instanceMap = elementMap.get(element);\n instanceMap.delete(key);\n\n // free up element references if there are no instances left for an element\n if (instanceMap.size === 0) {\n elementMap.delete(element);\n }\n }\n};\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap util/index.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\nconst MAX_UID = 1000000;\nconst MILLISECONDS_MULTIPLIER = 1000;\nconst TRANSITION_END = 'transitionend';\n\n/**\n * Properly escape IDs selectors to handle weird IDs\n * @param {string} selector\n * @returns {string}\n */\nconst parseSelector = selector => {\n if (selector && window.CSS && window.CSS.escape) {\n // document.querySelector needs escaping to handle IDs (html5+) containing for instance /\n selector = selector.replace(/#([^\\s\"#']+)/g, (match, id) => `#${CSS.escape(id)}`);\n }\n return selector;\n};\n\n// Shout-out Angus Croll (https://goo.gl/pxwQGp)\nconst toType = object => {\n if (object === null || object === undefined) {\n return `${object}`;\n }\n return Object.prototype.toString.call(object).match(/\\s([a-z]+)/i)[1].toLowerCase();\n};\n\n/**\n * Public Util API\n */\n\nconst getUID = prefix => {\n do {\n prefix += Math.floor(Math.random() * MAX_UID);\n } while (document.getElementById(prefix));\n return prefix;\n};\nconst getTransitionDurationFromElement = element => {\n if (!element) {\n return 0;\n }\n\n // Get transition-duration of the element\n let {\n transitionDuration,\n transitionDelay\n } = window.getComputedStyle(element);\n const floatTransitionDuration = Number.parseFloat(transitionDuration);\n const floatTransitionDelay = Number.parseFloat(transitionDelay);\n\n // Return 0 if element or transition duration is not found\n if (!floatTransitionDuration && !floatTransitionDelay) {\n return 0;\n }\n\n // If multiple durations are defined, take the first\n transitionDuration = transitionDuration.split(',')[0];\n transitionDelay = transitionDelay.split(',')[0];\n return (Number.parseFloat(transitionDuration) + Number.parseFloat(transitionDelay)) * MILLISECONDS_MULTIPLIER;\n};\nconst triggerTransitionEnd = element => {\n element.dispatchEvent(new Event(TRANSITION_END));\n};\nconst isElement = object => {\n if (!object || typeof object !== 'object') {\n return false;\n }\n if (typeof object.jquery !== 'undefined') {\n object = object[0];\n }\n return typeof object.nodeType !== 'undefined';\n};\nconst getElement = object => {\n // it's a jQuery object or a node element\n if (isElement(object)) {\n return object.jquery ? object[0] : object;\n }\n if (typeof object === 'string' && object.length > 0) {\n return document.querySelector(parseSelector(object));\n }\n return null;\n};\nconst isVisible = element => {\n if (!isElement(element) || element.getClientRects().length === 0) {\n return false;\n }\n const elementIsVisible = getComputedStyle(element).getPropertyValue('visibility') === 'visible';\n // Handle `details` element as its content may falsie appear visible when it is closed\n const closedDetails = element.closest('details:not([open])');\n if (!closedDetails) {\n return elementIsVisible;\n }\n if (closedDetails !== element) {\n const summary = element.closest('summary');\n if (summary && summary.parentNode !== closedDetails) {\n return false;\n }\n if (summary === null) {\n return false;\n }\n }\n return elementIsVisible;\n};\nconst isDisabled = element => {\n if (!element || element.nodeType !== Node.ELEMENT_NODE) {\n return true;\n }\n if (element.classList.contains('disabled')) {\n return true;\n }\n if (typeof element.disabled !== 'undefined') {\n return element.disabled;\n }\n return element.hasAttribute('disabled') && element.getAttribute('disabled') !== 'false';\n};\nconst findShadowRoot = element => {\n if (!document.documentElement.attachShadow) {\n return null;\n }\n\n // Can find the shadow root otherwise it'll return the document\n if (typeof element.getRootNode === 'function') {\n const root = element.getRootNode();\n return root instanceof ShadowRoot ? root : null;\n }\n if (element instanceof ShadowRoot) {\n return element;\n }\n\n // when we don't find a shadow root\n if (!element.parentNode) {\n return null;\n }\n return findShadowRoot(element.parentNode);\n};\nconst noop = () => {};\n\n/**\n * Trick to restart an element's animation\n *\n * @param {HTMLElement} element\n * @return void\n *\n * @see https://www.charistheo.io/blog/2021/02/restart-a-css-animation-with-javascript/#restarting-a-css-animation\n */\nconst reflow = element => {\n element.offsetHeight; // eslint-disable-line no-unused-expressions\n};\nconst getjQuery = () => {\n if (window.jQuery && !document.body.hasAttribute('data-bs-no-jquery')) {\n return window.jQuery;\n }\n return null;\n};\nconst DOMContentLoadedCallbacks = [];\nconst onDOMContentLoaded = callback => {\n if (document.readyState === 'loading') {\n // add listener on the first call when the document is in loading state\n if (!DOMContentLoadedCallbacks.length) {\n document.addEventListener('DOMContentLoaded', () => {\n for (const callback of DOMContentLoadedCallbacks) {\n callback();\n }\n });\n }\n DOMContentLoadedCallbacks.push(callback);\n } else {\n callback();\n }\n};\nconst isRTL = () => document.documentElement.dir === 'rtl';\nconst defineJQueryPlugin = plugin => {\n onDOMContentLoaded(() => {\n const $ = getjQuery();\n /* istanbul ignore if */\n if ($) {\n const name = plugin.NAME;\n const JQUERY_NO_CONFLICT = $.fn[name];\n $.fn[name] = plugin.jQueryInterface;\n $.fn[name].Constructor = plugin;\n $.fn[name].noConflict = () => {\n $.fn[name] = JQUERY_NO_CONFLICT;\n return plugin.jQueryInterface;\n };\n }\n });\n};\nconst execute = (possibleCallback, args = [], defaultValue = possibleCallback) => {\n return typeof possibleCallback === 'function' ? possibleCallback(...args) : defaultValue;\n};\nconst executeAfterTransition = (callback, transitionElement, waitForTransition = true) => {\n if (!waitForTransition) {\n execute(callback);\n return;\n }\n const durationPadding = 5;\n const emulatedDuration = getTransitionDurationFromElement(transitionElement) + durationPadding;\n let called = false;\n const handler = ({\n target\n }) => {\n if (target !== transitionElement) {\n return;\n }\n called = true;\n transitionElement.removeEventListener(TRANSITION_END, handler);\n execute(callback);\n };\n transitionElement.addEventListener(TRANSITION_END, handler);\n setTimeout(() => {\n if (!called) {\n triggerTransitionEnd(transitionElement);\n }\n }, emulatedDuration);\n};\n\n/**\n * Return the previous/next element of a list.\n *\n * @param {array} list The list of elements\n * @param activeElement The active element\n * @param shouldGetNext Choose to get next or previous element\n * @param isCycleAllowed\n * @return {Element|elem} The proper element\n */\nconst getNextActiveElement = (list, activeElement, shouldGetNext, isCycleAllowed) => {\n const listLength = list.length;\n let index = list.indexOf(activeElement);\n\n // if the element does not exist in the list return an element\n // depending on the direction and if cycle is allowed\n if (index === -1) {\n return !shouldGetNext && isCycleAllowed ? list[listLength - 1] : list[0];\n }\n index += shouldGetNext ? 1 : -1;\n if (isCycleAllowed) {\n index = (index + listLength) % listLength;\n }\n return list[Math.max(0, Math.min(index, listLength - 1))];\n};\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap dom/event-handler.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n\n/**\n * Constants\n */\n\nconst namespaceRegex = /[^.]*(?=\\..*)\\.|.*/;\nconst stripNameRegex = /\\..*/;\nconst stripUidRegex = /::\\d+$/;\nconst eventRegistry = {}; // Events storage\nlet uidEvent = 1;\nconst customEvents = {\n mouseenter: 'mouseover',\n mouseleave: 'mouseout'\n};\nconst nativeEvents = new Set(['click', 'dblclick', 'mouseup', 'mousedown', 'contextmenu', 'mousewheel', 'DOMMouseScroll', 'mouseover', 'mouseout', 'mousemove', 'selectstart', 'selectend', 'keydown', 'keypress', 'keyup', 'orientationchange', 'touchstart', 'touchmove', 'touchend', 'touchcancel', 'pointerdown', 'pointermove', 'pointerup', 'pointerleave', 'pointercancel', 'gesturestart', 'gesturechange', 'gestureend', 'focus', 'blur', 'change', 'reset', 'select', 'submit', 'focusin', 'focusout', 'load', 'unload', 'beforeunload', 'resize', 'move', 'DOMContentLoaded', 'readystatechange', 'error', 'abort', 'scroll']);\n\n/**\n * Private methods\n */\n\nfunction makeEventUid(element, uid) {\n return uid && `${uid}::${uidEvent++}` || element.uidEvent || uidEvent++;\n}\nfunction getElementEvents(element) {\n const uid = makeEventUid(element);\n element.uidEvent = uid;\n eventRegistry[uid] = eventRegistry[uid] || {};\n return eventRegistry[uid];\n}\nfunction bootstrapHandler(element, fn) {\n return function handler(event) {\n hydrateObj(event, {\n delegateTarget: element\n });\n if (handler.oneOff) {\n EventHandler.off(element, event.type, fn);\n }\n return fn.apply(element, [event]);\n };\n}\nfunction bootstrapDelegationHandler(element, selector, fn) {\n return function handler(event) {\n const domElements = element.querySelectorAll(selector);\n for (let {\n target\n } = event; target && target !== this; target = target.parentNode) {\n for (const domElement of domElements) {\n if (domElement !== target) {\n continue;\n }\n hydrateObj(event, {\n delegateTarget: target\n });\n if (handler.oneOff) {\n EventHandler.off(element, event.type, selector, fn);\n }\n return fn.apply(target, [event]);\n }\n }\n };\n}\nfunction findHandler(events, callable, delegationSelector = null) {\n return Object.values(events).find(event => event.callable === callable && event.delegationSelector === delegationSelector);\n}\nfunction normalizeParameters(originalTypeEvent, handler, delegationFunction) {\n const isDelegated = typeof handler === 'string';\n // TODO: tooltip passes `false` instead of selector, so we need to check\n const callable = isDelegated ? delegationFunction : handler || delegationFunction;\n let typeEvent = getTypeEvent(originalTypeEvent);\n if (!nativeEvents.has(typeEvent)) {\n typeEvent = originalTypeEvent;\n }\n return [isDelegated, callable, typeEvent];\n}\nfunction addHandler(element, originalTypeEvent, handler, delegationFunction, oneOff) {\n if (typeof originalTypeEvent !== 'string' || !element) {\n return;\n }\n let [isDelegated, callable, typeEvent] = normalizeParameters(originalTypeEvent, handler, delegationFunction);\n\n // in case of mouseenter or mouseleave wrap the handler within a function that checks for its DOM position\n // this prevents the handler from being dispatched the same way as mouseover or mouseout does\n if (originalTypeEvent in customEvents) {\n const wrapFunction = fn => {\n return function (event) {\n if (!event.relatedTarget || event.relatedTarget !== event.delegateTarget && !event.delegateTarget.contains(event.relatedTarget)) {\n return fn.call(this, event);\n }\n };\n };\n callable = wrapFunction(callable);\n }\n const events = getElementEvents(element);\n const handlers = events[typeEvent] || (events[typeEvent] = {});\n const previousFunction = findHandler(handlers, callable, isDelegated ? handler : null);\n if (previousFunction) {\n previousFunction.oneOff = previousFunction.oneOff && oneOff;\n return;\n }\n const uid = makeEventUid(callable, originalTypeEvent.replace(namespaceRegex, ''));\n const fn = isDelegated ? bootstrapDelegationHandler(element, handler, callable) : bootstrapHandler(element, callable);\n fn.delegationSelector = isDelegated ? handler : null;\n fn.callable = callable;\n fn.oneOff = oneOff;\n fn.uidEvent = uid;\n handlers[uid] = fn;\n element.addEventListener(typeEvent, fn, isDelegated);\n}\nfunction removeHandler(element, events, typeEvent, handler, delegationSelector) {\n const fn = findHandler(events[typeEvent], handler, delegationSelector);\n if (!fn) {\n return;\n }\n element.removeEventListener(typeEvent, fn, Boolean(delegationSelector));\n delete events[typeEvent][fn.uidEvent];\n}\nfunction removeNamespacedHandlers(element, events, typeEvent, namespace) {\n const storeElementEvent = events[typeEvent] || {};\n for (const [handlerKey, event] of Object.entries(storeElementEvent)) {\n if (handlerKey.includes(namespace)) {\n removeHandler(element, events, typeEvent, event.callable, event.delegationSelector);\n }\n }\n}\nfunction getTypeEvent(event) {\n // allow to get the native events from namespaced events ('click.bs.button' --> 'click')\n event = event.replace(stripNameRegex, '');\n return customEvents[event] || event;\n}\nconst EventHandler = {\n on(element, event, handler, delegationFunction) {\n addHandler(element, event, handler, delegationFunction, false);\n },\n one(element, event, handler, delegationFunction) {\n addHandler(element, event, handler, delegationFunction, true);\n },\n off(element, originalTypeEvent, handler, delegationFunction) {\n if (typeof originalTypeEvent !== 'string' || !element) {\n return;\n }\n const [isDelegated, callable, typeEvent] = normalizeParameters(originalTypeEvent, handler, delegationFunction);\n const inNamespace = typeEvent !== originalTypeEvent;\n const events = getElementEvents(element);\n const storeElementEvent = events[typeEvent] || {};\n const isNamespace = originalTypeEvent.startsWith('.');\n if (typeof callable !== 'undefined') {\n // Simplest case: handler is passed, remove that listener ONLY.\n if (!Object.keys(storeElementEvent).length) {\n return;\n }\n removeHandler(element, events, typeEvent, callable, isDelegated ? handler : null);\n return;\n }\n if (isNamespace) {\n for (const elementEvent of Object.keys(events)) {\n removeNamespacedHandlers(element, events, elementEvent, originalTypeEvent.slice(1));\n }\n }\n for (const [keyHandlers, event] of Object.entries(storeElementEvent)) {\n const handlerKey = keyHandlers.replace(stripUidRegex, '');\n if (!inNamespace || originalTypeEvent.includes(handlerKey)) {\n removeHandler(element, events, typeEvent, event.callable, event.delegationSelector);\n }\n }\n },\n trigger(element, event, args) {\n if (typeof event !== 'string' || !element) {\n return null;\n }\n const $ = getjQuery();\n const typeEvent = getTypeEvent(event);\n const inNamespace = event !== typeEvent;\n let jQueryEvent = null;\n let bubbles = true;\n let nativeDispatch = true;\n let defaultPrevented = false;\n if (inNamespace && $) {\n jQueryEvent = $.Event(event, args);\n $(element).trigger(jQueryEvent);\n bubbles = !jQueryEvent.isPropagationStopped();\n nativeDispatch = !jQueryEvent.isImmediatePropagationStopped();\n defaultPrevented = jQueryEvent.isDefaultPrevented();\n }\n const evt = hydrateObj(new Event(event, {\n bubbles,\n cancelable: true\n }), args);\n if (defaultPrevented) {\n evt.preventDefault();\n }\n if (nativeDispatch) {\n element.dispatchEvent(evt);\n }\n if (evt.defaultPrevented && jQueryEvent) {\n jQueryEvent.preventDefault();\n }\n return evt;\n }\n};\nfunction hydrateObj(obj, meta = {}) {\n for (const [key, value] of Object.entries(meta)) {\n try {\n obj[key] = value;\n } catch (_unused) {\n Object.defineProperty(obj, key, {\n configurable: true,\n get() {\n return value;\n }\n });\n }\n }\n return obj;\n}\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap dom/manipulator.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\nfunction normalizeData(value) {\n if (value === 'true') {\n return true;\n }\n if (value === 'false') {\n return false;\n }\n if (value === Number(value).toString()) {\n return Number(value);\n }\n if (value === '' || value === 'null') {\n return null;\n }\n if (typeof value !== 'string') {\n return value;\n }\n try {\n return JSON.parse(decodeURIComponent(value));\n } catch (_unused) {\n return value;\n }\n}\nfunction normalizeDataKey(key) {\n return key.replace(/[A-Z]/g, chr => `-${chr.toLowerCase()}`);\n}\nconst Manipulator = {\n setDataAttribute(element, key, value) {\n element.setAttribute(`data-bs-${normalizeDataKey(key)}`, value);\n },\n removeDataAttribute(element, key) {\n element.removeAttribute(`data-bs-${normalizeDataKey(key)}`);\n },\n getDataAttributes(element) {\n if (!element) {\n return {};\n }\n const attributes = {};\n const bsKeys = Object.keys(element.dataset).filter(key => key.startsWith('bs') && !key.startsWith('bsConfig'));\n for (const key of bsKeys) {\n let pureKey = key.replace(/^bs/, '');\n pureKey = pureKey.charAt(0).toLowerCase() + pureKey.slice(1, pureKey.length);\n attributes[pureKey] = normalizeData(element.dataset[key]);\n }\n return attributes;\n },\n getDataAttribute(element, key) {\n return normalizeData(element.getAttribute(`data-bs-${normalizeDataKey(key)}`));\n }\n};\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap util/config.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n\n/**\n * Class definition\n */\n\nclass Config {\n // Getters\n static get Default() {\n return {};\n }\n static get DefaultType() {\n return {};\n }\n static get NAME() {\n throw new Error('You have to implement the static method \"NAME\", for each component!');\n }\n _getConfig(config) {\n config = this._mergeConfigObj(config);\n config = this._configAfterMerge(config);\n this._typeCheckConfig(config);\n return config;\n }\n _configAfterMerge(config) {\n return config;\n }\n _mergeConfigObj(config, element) {\n const jsonConfig = isElement(element) ? Manipulator.getDataAttribute(element, 'config') : {}; // try to parse\n\n return {\n ...this.constructor.Default,\n ...(typeof jsonConfig === 'object' ? jsonConfig : {}),\n ...(isElement(element) ? Manipulator.getDataAttributes(element) : {}),\n ...(typeof config === 'object' ? config : {})\n };\n }\n _typeCheckConfig(config, configTypes = this.constructor.DefaultType) {\n for (const [property, expectedTypes] of Object.entries(configTypes)) {\n const value = config[property];\n const valueType = isElement(value) ? 'element' : toType(value);\n if (!new RegExp(expectedTypes).test(valueType)) {\n throw new TypeError(`${this.constructor.NAME.toUpperCase()}: Option \"${property}\" provided type \"${valueType}\" but expected type \"${expectedTypes}\".`);\n }\n }\n }\n}\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap base-component.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n\n/**\n * Constants\n */\n\nconst VERSION = '5.3.3';\n\n/**\n * Class definition\n */\n\nclass BaseComponent extends Config {\n constructor(element, config) {\n super();\n element = getElement(element);\n if (!element) {\n return;\n }\n this._element = element;\n this._config = this._getConfig(config);\n Data.set(this._element, this.constructor.DATA_KEY, this);\n }\n\n // Public\n dispose() {\n Data.remove(this._element, this.constructor.DATA_KEY);\n EventHandler.off(this._element, this.constructor.EVENT_KEY);\n for (const propertyName of Object.getOwnPropertyNames(this)) {\n this[propertyName] = null;\n }\n }\n _queueCallback(callback, element, isAnimated = true) {\n executeAfterTransition(callback, element, isAnimated);\n }\n _getConfig(config) {\n config = this._mergeConfigObj(config, this._element);\n config = this._configAfterMerge(config);\n this._typeCheckConfig(config);\n return config;\n }\n\n // Static\n static getInstance(element) {\n return Data.get(getElement(element), this.DATA_KEY);\n }\n static getOrCreateInstance(element, config = {}) {\n return this.getInstance(element) || new this(element, typeof config === 'object' ? config : null);\n }\n static get VERSION() {\n return VERSION;\n }\n static get DATA_KEY() {\n return `bs.${this.NAME}`;\n }\n static get EVENT_KEY() {\n return `.${this.DATA_KEY}`;\n }\n static eventName(name) {\n return `${name}${this.EVENT_KEY}`;\n }\n}\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap dom/selector-engine.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\nconst getSelector = element => {\n let selector = element.getAttribute('data-bs-target');\n if (!selector || selector === '#') {\n let hrefAttribute = element.getAttribute('href');\n\n // The only valid content that could double as a selector are IDs or classes,\n // so everything starting with `#` or `.`. If a \"real\" URL is used as the selector,\n // `document.querySelector` will rightfully complain it is invalid.\n // See https://github.com/twbs/bootstrap/issues/32273\n if (!hrefAttribute || !hrefAttribute.includes('#') && !hrefAttribute.startsWith('.')) {\n return null;\n }\n\n // Just in case some CMS puts out a full URL with the anchor appended\n if (hrefAttribute.includes('#') && !hrefAttribute.startsWith('#')) {\n hrefAttribute = `#${hrefAttribute.split('#')[1]}`;\n }\n selector = hrefAttribute && hrefAttribute !== '#' ? hrefAttribute.trim() : null;\n }\n return selector ? selector.split(',').map(sel => parseSelector(sel)).join(',') : null;\n};\nconst SelectorEngine = {\n find(selector, element = document.documentElement) {\n return [].concat(...Element.prototype.querySelectorAll.call(element, selector));\n },\n findOne(selector, element = document.documentElement) {\n return Element.prototype.querySelector.call(element, selector);\n },\n children(element, selector) {\n return [].concat(...element.children).filter(child => child.matches(selector));\n },\n parents(element, selector) {\n const parents = [];\n let ancestor = element.parentNode.closest(selector);\n while (ancestor) {\n parents.push(ancestor);\n ancestor = ancestor.parentNode.closest(selector);\n }\n return parents;\n },\n prev(element, selector) {\n let previous = element.previousElementSibling;\n while (previous) {\n if (previous.matches(selector)) {\n return [previous];\n }\n previous = previous.previousElementSibling;\n }\n return [];\n },\n // TODO: this is now unused; remove later along with prev()\n next(element, selector) {\n let next = element.nextElementSibling;\n while (next) {\n if (next.matches(selector)) {\n return [next];\n }\n next = next.nextElementSibling;\n }\n return [];\n },\n focusableChildren(element) {\n const focusables = ['a', 'button', 'input', 'textarea', 'select', 'details', '[tabindex]', '[contenteditable=\"true\"]'].map(selector => `${selector}:not([tabindex^=\"-\"])`).join(',');\n return this.find(focusables, element).filter(el => !isDisabled(el) && isVisible(el));\n },\n getSelectorFromElement(element) {\n const selector = getSelector(element);\n if (selector) {\n return SelectorEngine.findOne(selector) ? selector : null;\n }\n return null;\n },\n getElementFromSelector(element) {\n const selector = getSelector(element);\n return selector ? SelectorEngine.findOne(selector) : null;\n },\n getMultipleElementsFromSelector(element) {\n const selector = getSelector(element);\n return selector ? SelectorEngine.find(selector) : [];\n }\n};\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap util/component-functions.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\nconst enableDismissTrigger = (component, method = 'hide') => {\n const clickEvent = `click.dismiss${component.EVENT_KEY}`;\n const name = component.NAME;\n EventHandler.on(document, clickEvent, `[data-bs-dismiss=\"${name}\"]`, function (event) {\n if (['A', 'AREA'].includes(this.tagName)) {\n event.preventDefault();\n }\n if (isDisabled(this)) {\n return;\n }\n const target = SelectorEngine.getElementFromSelector(this) || this.closest(`.${name}`);\n const instance = component.getOrCreateInstance(target);\n\n // Method argument is left, for Alert and only, as it doesn't implement the 'hide' method\n instance[method]();\n });\n};\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap alert.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n\n/**\n * Constants\n */\n\nconst NAME$f = 'alert';\nconst DATA_KEY$a = 'bs.alert';\nconst EVENT_KEY$b = `.${DATA_KEY$a}`;\nconst EVENT_CLOSE = `close${EVENT_KEY$b}`;\nconst EVENT_CLOSED = `closed${EVENT_KEY$b}`;\nconst CLASS_NAME_FADE$5 = 'fade';\nconst CLASS_NAME_SHOW$8 = 'show';\n\n/**\n * Class definition\n */\n\nclass Alert extends BaseComponent {\n // Getters\n static get NAME() {\n return NAME$f;\n }\n\n // Public\n close() {\n const closeEvent = EventHandler.trigger(this._element, EVENT_CLOSE);\n if (closeEvent.defaultPrevented) {\n return;\n }\n this._element.classList.remove(CLASS_NAME_SHOW$8);\n const isAnimated = this._element.classList.contains(CLASS_NAME_FADE$5);\n this._queueCallback(() => this._destroyElement(), this._element, isAnimated);\n }\n\n // Private\n _destroyElement() {\n this._element.remove();\n EventHandler.trigger(this._element, EVENT_CLOSED);\n this.dispose();\n }\n\n // Static\n static jQueryInterface(config) {\n return this.each(function () {\n const data = Alert.getOrCreateInstance(this);\n if (typeof config !== 'string') {\n return;\n }\n if (data[config] === undefined || config.startsWith('_') || config === 'constructor') {\n throw new TypeError(`No method named \"${config}\"`);\n }\n data[config](this);\n });\n }\n}\n\n/**\n * Data API implementation\n */\n\nenableDismissTrigger(Alert, 'close');\n\n/**\n * jQuery\n */\n\ndefineJQueryPlugin(Alert);\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap button.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n\n/**\n * Constants\n */\n\nconst NAME$e = 'button';\nconst DATA_KEY$9 = 'bs.button';\nconst EVENT_KEY$a = `.${DATA_KEY$9}`;\nconst DATA_API_KEY$6 = '.data-api';\nconst CLASS_NAME_ACTIVE$3 = 'active';\nconst SELECTOR_DATA_TOGGLE$5 = '[data-bs-toggle=\"button\"]';\nconst EVENT_CLICK_DATA_API$6 = `click${EVENT_KEY$a}${DATA_API_KEY$6}`;\n\n/**\n * Class definition\n */\n\nclass Button extends BaseComponent {\n // Getters\n static get NAME() {\n return NAME$e;\n }\n\n // Public\n toggle() {\n // Toggle class and sync the `aria-pressed` attribute with the return value of the `.toggle()` method\n this._element.setAttribute('aria-pressed', this._element.classList.toggle(CLASS_NAME_ACTIVE$3));\n }\n\n // Static\n static jQueryInterface(config) {\n return this.each(function () {\n const data = Button.getOrCreateInstance(this);\n if (config === 'toggle') {\n data[config]();\n }\n });\n }\n}\n\n/**\n * Data API implementation\n */\n\nEventHandler.on(document, EVENT_CLICK_DATA_API$6, SELECTOR_DATA_TOGGLE$5, event => {\n event.preventDefault();\n const button = event.target.closest(SELECTOR_DATA_TOGGLE$5);\n const data = Button.getOrCreateInstance(button);\n data.toggle();\n});\n\n/**\n * jQuery\n */\n\ndefineJQueryPlugin(Button);\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap util/swipe.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n\n/**\n * Constants\n */\n\nconst NAME$d = 'swipe';\nconst EVENT_KEY$9 = '.bs.swipe';\nconst EVENT_TOUCHSTART = `touchstart${EVENT_KEY$9}`;\nconst EVENT_TOUCHMOVE = `touchmove${EVENT_KEY$9}`;\nconst EVENT_TOUCHEND = `touchend${EVENT_KEY$9}`;\nconst EVENT_POINTERDOWN = `pointerdown${EVENT_KEY$9}`;\nconst EVENT_POINTERUP = `pointerup${EVENT_KEY$9}`;\nconst POINTER_TYPE_TOUCH = 'touch';\nconst POINTER_TYPE_PEN = 'pen';\nconst CLASS_NAME_POINTER_EVENT = 'pointer-event';\nconst SWIPE_THRESHOLD = 40;\nconst Default$c = {\n endCallback: null,\n leftCallback: null,\n rightCallback: null\n};\nconst DefaultType$c = {\n endCallback: '(function|null)',\n leftCallback: '(function|null)',\n rightCallback: '(function|null)'\n};\n\n/**\n * Class definition\n */\n\nclass Swipe extends Config {\n constructor(element, config) {\n super();\n this._element = element;\n if (!element || !Swipe.isSupported()) {\n return;\n }\n this._config = this._getConfig(config);\n this._deltaX = 0;\n this._supportPointerEvents = Boolean(window.PointerEvent);\n this._initEvents();\n }\n\n // Getters\n static get Default() {\n return Default$c;\n }\n static get DefaultType() {\n return DefaultType$c;\n }\n static get NAME() {\n return NAME$d;\n }\n\n // Public\n dispose() {\n EventHandler.off(this._element, EVENT_KEY$9);\n }\n\n // Private\n _start(event) {\n if (!this._supportPointerEvents) {\n this._deltaX = event.touches[0].clientX;\n return;\n }\n if (this._eventIsPointerPenTouch(event)) {\n this._deltaX = event.clientX;\n }\n }\n _end(event) {\n if (this._eventIsPointerPenTouch(event)) {\n this._deltaX = event.clientX - this._deltaX;\n }\n this._handleSwipe();\n execute(this._config.endCallback);\n }\n _move(event) {\n this._deltaX = event.touches && event.touches.length > 1 ? 0 : event.touches[0].clientX - this._deltaX;\n }\n _handleSwipe() {\n const absDeltaX = Math.abs(this._deltaX);\n if (absDeltaX <= SWIPE_THRESHOLD) {\n return;\n }\n const direction = absDeltaX / this._deltaX;\n this._deltaX = 0;\n if (!direction) {\n return;\n }\n execute(direction > 0 ? this._config.rightCallback : this._config.leftCallback);\n }\n _initEvents() {\n if (this._supportPointerEvents) {\n EventHandler.on(this._element, EVENT_POINTERDOWN, event => this._start(event));\n EventHandler.on(this._element, EVENT_POINTERUP, event => this._end(event));\n this._element.classList.add(CLASS_NAME_POINTER_EVENT);\n } else {\n EventHandler.on(this._element, EVENT_TOUCHSTART, event => this._start(event));\n EventHandler.on(this._element, EVENT_TOUCHMOVE, event => this._move(event));\n EventHandler.on(this._element, EVENT_TOUCHEND, event => this._end(event));\n }\n }\n _eventIsPointerPenTouch(event) {\n return this._supportPointerEvents && (event.pointerType === POINTER_TYPE_PEN || event.pointerType === POINTER_TYPE_TOUCH);\n }\n\n // Static\n static isSupported() {\n return 'ontouchstart' in document.documentElement || navigator.maxTouchPoints > 0;\n }\n}\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap carousel.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n\n/**\n * Constants\n */\n\nconst NAME$c = 'carousel';\nconst DATA_KEY$8 = 'bs.carousel';\nconst EVENT_KEY$8 = `.${DATA_KEY$8}`;\nconst DATA_API_KEY$5 = '.data-api';\nconst ARROW_LEFT_KEY$1 = 'ArrowLeft';\nconst ARROW_RIGHT_KEY$1 = 'ArrowRight';\nconst TOUCHEVENT_COMPAT_WAIT = 500; // Time for mouse compat events to fire after touch\n\nconst ORDER_NEXT = 'next';\nconst ORDER_PREV = 'prev';\nconst DIRECTION_LEFT = 'left';\nconst DIRECTION_RIGHT = 'right';\nconst EVENT_SLIDE = `slide${EVENT_KEY$8}`;\nconst EVENT_SLID = `slid${EVENT_KEY$8}`;\nconst EVENT_KEYDOWN$1 = `keydown${EVENT_KEY$8}`;\nconst EVENT_MOUSEENTER$1 = `mouseenter${EVENT_KEY$8}`;\nconst EVENT_MOUSELEAVE$1 = `mouseleave${EVENT_KEY$8}`;\nconst EVENT_DRAG_START = `dragstart${EVENT_KEY$8}`;\nconst EVENT_LOAD_DATA_API$3 = `load${EVENT_KEY$8}${DATA_API_KEY$5}`;\nconst EVENT_CLICK_DATA_API$5 = `click${EVENT_KEY$8}${DATA_API_KEY$5}`;\nconst CLASS_NAME_CAROUSEL = 'carousel';\nconst CLASS_NAME_ACTIVE$2 = 'active';\nconst CLASS_NAME_SLIDE = 'slide';\nconst CLASS_NAME_END = 'carousel-item-end';\nconst CLASS_NAME_START = 'carousel-item-start';\nconst CLASS_NAME_NEXT = 'carousel-item-next';\nconst CLASS_NAME_PREV = 'carousel-item-prev';\nconst SELECTOR_ACTIVE = '.active';\nconst SELECTOR_ITEM = '.carousel-item';\nconst SELECTOR_ACTIVE_ITEM = SELECTOR_ACTIVE + SELECTOR_ITEM;\nconst SELECTOR_ITEM_IMG = '.carousel-item img';\nconst SELECTOR_INDICATORS = '.carousel-indicators';\nconst SELECTOR_DATA_SLIDE = '[data-bs-slide], [data-bs-slide-to]';\nconst SELECTOR_DATA_RIDE = '[data-bs-ride=\"carousel\"]';\nconst KEY_TO_DIRECTION = {\n [ARROW_LEFT_KEY$1]: DIRECTION_RIGHT,\n [ARROW_RIGHT_KEY$1]: DIRECTION_LEFT\n};\nconst Default$b = {\n interval: 5000,\n keyboard: true,\n pause: 'hover',\n ride: false,\n touch: true,\n wrap: true\n};\nconst DefaultType$b = {\n interval: '(number|boolean)',\n // TODO:v6 remove boolean support\n keyboard: 'boolean',\n pause: '(string|boolean)',\n ride: '(boolean|string)',\n touch: 'boolean',\n wrap: 'boolean'\n};\n\n/**\n * Class definition\n */\n\nclass Carousel extends BaseComponent {\n constructor(element, config) {\n super(element, config);\n this._interval = null;\n this._activeElement = null;\n this._isSliding = false;\n this.touchTimeout = null;\n this._swipeHelper = null;\n this._indicatorsElement = SelectorEngine.findOne(SELECTOR_INDICATORS, this._element);\n this._addEventListeners();\n if (this._config.ride === CLASS_NAME_CAROUSEL) {\n this.cycle();\n }\n }\n\n // Getters\n static get Default() {\n return Default$b;\n }\n static get DefaultType() {\n return DefaultType$b;\n }\n static get NAME() {\n return NAME$c;\n }\n\n // Public\n next() {\n this._slide(ORDER_NEXT);\n }\n nextWhenVisible() {\n // FIXME TODO use `document.visibilityState`\n // Don't call next when the page isn't visible\n // or the carousel or its parent isn't visible\n if (!document.hidden && isVisible(this._element)) {\n this.next();\n }\n }\n prev() {\n this._slide(ORDER_PREV);\n }\n pause() {\n if (this._isSliding) {\n triggerTransitionEnd(this._element);\n }\n this._clearInterval();\n }\n cycle() {\n this._clearInterval();\n this._updateInterval();\n this._interval = setInterval(() => this.nextWhenVisible(), this._config.interval);\n }\n _maybeEnableCycle() {\n if (!this._config.ride) {\n return;\n }\n if (this._isSliding) {\n EventHandler.one(this._element, EVENT_SLID, () => this.cycle());\n return;\n }\n this.cycle();\n }\n to(index) {\n const items = this._getItems();\n if (index > items.length - 1 || index < 0) {\n return;\n }\n if (this._isSliding) {\n EventHandler.one(this._element, EVENT_SLID, () => this.to(index));\n return;\n }\n const activeIndex = this._getItemIndex(this._getActive());\n if (activeIndex === index) {\n return;\n }\n const order = index > activeIndex ? ORDER_NEXT : ORDER_PREV;\n this._slide(order, items[index]);\n }\n dispose() {\n if (this._swipeHelper) {\n this._swipeHelper.dispose();\n }\n super.dispose();\n }\n\n // Private\n _configAfterMerge(config) {\n config.defaultInterval = config.interval;\n return config;\n }\n _addEventListeners() {\n if (this._config.keyboard) {\n EventHandler.on(this._element, EVENT_KEYDOWN$1, event => this._keydown(event));\n }\n if (this._config.pause === 'hover') {\n EventHandler.on(this._element, EVENT_MOUSEENTER$1, () => this.pause());\n EventHandler.on(this._element, EVENT_MOUSELEAVE$1, () => this._maybeEnableCycle());\n }\n if (this._config.touch && Swipe.isSupported()) {\n this._addTouchEventListeners();\n }\n }\n _addTouchEventListeners() {\n for (const img of SelectorEngine.find(SELECTOR_ITEM_IMG, this._element)) {\n EventHandler.on(img, EVENT_DRAG_START, event => event.preventDefault());\n }\n const endCallBack = () => {\n if (this._config.pause !== 'hover') {\n return;\n }\n\n // If it's a touch-enabled device, mouseenter/leave are fired as\n // part of the mouse compatibility events on first tap - the carousel\n // would stop cycling until user tapped out of it;\n // here, we listen for touchend, explicitly pause the carousel\n // (as if it's the second time we tap on it, mouseenter compat event\n // is NOT fired) and after a timeout (to allow for mouse compatibility\n // events to fire) we explicitly restart cycling\n\n this.pause();\n if (this.touchTimeout) {\n clearTimeout(this.touchTimeout);\n }\n this.touchTimeout = setTimeout(() => this._maybeEnableCycle(), TOUCHEVENT_COMPAT_WAIT + this._config.interval);\n };\n const swipeConfig = {\n leftCallback: () => this._slide(this._directionToOrder(DIRECTION_LEFT)),\n rightCallback: () => this._slide(this._directionToOrder(DIRECTION_RIGHT)),\n endCallback: endCallBack\n };\n this._swipeHelper = new Swipe(this._element, swipeConfig);\n }\n _keydown(event) {\n if (/input|textarea/i.test(event.target.tagName)) {\n return;\n }\n const direction = KEY_TO_DIRECTION[event.key];\n if (direction) {\n event.preventDefault();\n this._slide(this._directionToOrder(direction));\n }\n }\n _getItemIndex(element) {\n return this._getItems().indexOf(element);\n }\n _setActiveIndicatorElement(index) {\n if (!this._indicatorsElement) {\n return;\n }\n const activeIndicator = SelectorEngine.findOne(SELECTOR_ACTIVE, this._indicatorsElement);\n activeIndicator.classList.remove(CLASS_NAME_ACTIVE$2);\n activeIndicator.removeAttribute('aria-current');\n const newActiveIndicator = SelectorEngine.findOne(`[data-bs-slide-to=\"${index}\"]`, this._indicatorsElement);\n if (newActiveIndicator) {\n newActiveIndicator.classList.add(CLASS_NAME_ACTIVE$2);\n newActiveIndicator.setAttribute('aria-current', 'true');\n }\n }\n _updateInterval() {\n const element = this._activeElement || this._getActive();\n if (!element) {\n return;\n }\n const elementInterval = Number.parseInt(element.getAttribute('data-bs-interval'), 10);\n this._config.interval = elementInterval || this._config.defaultInterval;\n }\n _slide(order, element = null) {\n if (this._isSliding) {\n return;\n }\n const activeElement = this._getActive();\n const isNext = order === ORDER_NEXT;\n const nextElement = element || getNextActiveElement(this._getItems(), activeElement, isNext, this._config.wrap);\n if (nextElement === activeElement) {\n return;\n }\n const nextElementIndex = this._getItemIndex(nextElement);\n const triggerEvent = eventName => {\n return EventHandler.trigger(this._element, eventName, {\n relatedTarget: nextElement,\n direction: this._orderToDirection(order),\n from: this._getItemIndex(activeElement),\n to: nextElementIndex\n });\n };\n const slideEvent = triggerEvent(EVENT_SLIDE);\n if (slideEvent.defaultPrevented) {\n return;\n }\n if (!activeElement || !nextElement) {\n // Some weirdness is happening, so we bail\n // TODO: change tests that use empty divs to avoid this check\n return;\n }\n const isCycling = Boolean(this._interval);\n this.pause();\n this._isSliding = true;\n this._setActiveIndicatorElement(nextElementIndex);\n this._activeElement = nextElement;\n const directionalClassName = isNext ? CLASS_NAME_START : CLASS_NAME_END;\n const orderClassName = isNext ? CLASS_NAME_NEXT : CLASS_NAME_PREV;\n nextElement.classList.add(orderClassName);\n reflow(nextElement);\n activeElement.classList.add(directionalClassName);\n nextElement.classList.add(directionalClassName);\n const completeCallBack = () => {\n nextElement.classList.remove(directionalClassName, orderClassName);\n nextElement.classList.add(CLASS_NAME_ACTIVE$2);\n activeElement.classList.remove(CLASS_NAME_ACTIVE$2, orderClassName, directionalClassName);\n this._isSliding = false;\n triggerEvent(EVENT_SLID);\n };\n this._queueCallback(completeCallBack, activeElement, this._isAnimated());\n if (isCycling) {\n this.cycle();\n }\n }\n _isAnimated() {\n return this._element.classList.contains(CLASS_NAME_SLIDE);\n }\n _getActive() {\n return SelectorEngine.findOne(SELECTOR_ACTIVE_ITEM, this._element);\n }\n _getItems() {\n return SelectorEngine.find(SELECTOR_ITEM, this._element);\n }\n _clearInterval() {\n if (this._interval) {\n clearInterval(this._interval);\n this._interval = null;\n }\n }\n _directionToOrder(direction) {\n if (isRTL()) {\n return direction === DIRECTION_LEFT ? ORDER_PREV : ORDER_NEXT;\n }\n return direction === DIRECTION_LEFT ? ORDER_NEXT : ORDER_PREV;\n }\n _orderToDirection(order) {\n if (isRTL()) {\n return order === ORDER_PREV ? DIRECTION_LEFT : DIRECTION_RIGHT;\n }\n return order === ORDER_PREV ? DIRECTION_RIGHT : DIRECTION_LEFT;\n }\n\n // Static\n static jQueryInterface(config) {\n return this.each(function () {\n const data = Carousel.getOrCreateInstance(this, config);\n if (typeof config === 'number') {\n data.to(config);\n return;\n }\n if (typeof config === 'string') {\n if (data[config] === undefined || config.startsWith('_') || config === 'constructor') {\n throw new TypeError(`No method named \"${config}\"`);\n }\n data[config]();\n }\n });\n }\n}\n\n/**\n * Data API implementation\n */\n\nEventHandler.on(document, EVENT_CLICK_DATA_API$5, SELECTOR_DATA_SLIDE, function (event) {\n const target = SelectorEngine.getElementFromSelector(this);\n if (!target || !target.classList.contains(CLASS_NAME_CAROUSEL)) {\n return;\n }\n event.preventDefault();\n const carousel = Carousel.getOrCreateInstance(target);\n const slideIndex = this.getAttribute('data-bs-slide-to');\n if (slideIndex) {\n carousel.to(slideIndex);\n carousel._maybeEnableCycle();\n return;\n }\n if (Manipulator.getDataAttribute(this, 'slide') === 'next') {\n carousel.next();\n carousel._maybeEnableCycle();\n return;\n }\n carousel.prev();\n carousel._maybeEnableCycle();\n});\nEventHandler.on(window, EVENT_LOAD_DATA_API$3, () => {\n const carousels = SelectorEngine.find(SELECTOR_DATA_RIDE);\n for (const carousel of carousels) {\n Carousel.getOrCreateInstance(carousel);\n }\n});\n\n/**\n * jQuery\n */\n\ndefineJQueryPlugin(Carousel);\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap collapse.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n\n/**\n * Constants\n */\n\nconst NAME$b = 'collapse';\nconst DATA_KEY$7 = 'bs.collapse';\nconst EVENT_KEY$7 = `.${DATA_KEY$7}`;\nconst DATA_API_KEY$4 = '.data-api';\nconst EVENT_SHOW$6 = `show${EVENT_KEY$7}`;\nconst EVENT_SHOWN$6 = `shown${EVENT_KEY$7}`;\nconst EVENT_HIDE$6 = `hide${EVENT_KEY$7}`;\nconst EVENT_HIDDEN$6 = `hidden${EVENT_KEY$7}`;\nconst EVENT_CLICK_DATA_API$4 = `click${EVENT_KEY$7}${DATA_API_KEY$4}`;\nconst CLASS_NAME_SHOW$7 = 'show';\nconst CLASS_NAME_COLLAPSE = 'collapse';\nconst CLASS_NAME_COLLAPSING = 'collapsing';\nconst CLASS_NAME_COLLAPSED = 'collapsed';\nconst CLASS_NAME_DEEPER_CHILDREN = `:scope .${CLASS_NAME_COLLAPSE} .${CLASS_NAME_COLLAPSE}`;\nconst CLASS_NAME_HORIZONTAL = 'collapse-horizontal';\nconst WIDTH = 'width';\nconst HEIGHT = 'height';\nconst SELECTOR_ACTIVES = '.collapse.show, .collapse.collapsing';\nconst SELECTOR_DATA_TOGGLE$4 = '[data-bs-toggle=\"collapse\"]';\nconst Default$a = {\n parent: null,\n toggle: true\n};\nconst DefaultType$a = {\n parent: '(null|element)',\n toggle: 'boolean'\n};\n\n/**\n * Class definition\n */\n\nclass Collapse extends BaseComponent {\n constructor(element, config) {\n super(element, config);\n this._isTransitioning = false;\n this._triggerArray = [];\n const toggleList = SelectorEngine.find(SELECTOR_DATA_TOGGLE$4);\n for (const elem of toggleList) {\n const selector = SelectorEngine.getSelectorFromElement(elem);\n const filterElement = SelectorEngine.find(selector).filter(foundElement => foundElement === this._element);\n if (selector !== null && filterElement.length) {\n this._triggerArray.push(elem);\n }\n }\n this._initializeChildren();\n if (!this._config.parent) {\n this._addAriaAndCollapsedClass(this._triggerArray, this._isShown());\n }\n if (this._config.toggle) {\n this.toggle();\n }\n }\n\n // Getters\n static get Default() {\n return Default$a;\n }\n static get DefaultType() {\n return DefaultType$a;\n }\n static get NAME() {\n return NAME$b;\n }\n\n // Public\n toggle() {\n if (this._isShown()) {\n this.hide();\n } else {\n this.show();\n }\n }\n show() {\n if (this._isTransitioning || this._isShown()) {\n return;\n }\n let activeChildren = [];\n\n // find active children\n if (this._config.parent) {\n activeChildren = this._getFirstLevelChildren(SELECTOR_ACTIVES).filter(element => element !== this._element).map(element => Collapse.getOrCreateInstance(element, {\n toggle: false\n }));\n }\n if (activeChildren.length && activeChildren[0]._isTransitioning) {\n return;\n }\n const startEvent = EventHandler.trigger(this._element, EVENT_SHOW$6);\n if (startEvent.defaultPrevented) {\n return;\n }\n for (const activeInstance of activeChildren) {\n activeInstance.hide();\n }\n const dimension = this._getDimension();\n this._element.classList.remove(CLASS_NAME_COLLAPSE);\n this._element.classList.add(CLASS_NAME_COLLAPSING);\n this._element.style[dimension] = 0;\n this._addAriaAndCollapsedClass(this._triggerArray, true);\n this._isTransitioning = true;\n const complete = () => {\n this._isTransitioning = false;\n this._element.classList.remove(CLASS_NAME_COLLAPSING);\n this._element.classList.add(CLASS_NAME_COLLAPSE, CLASS_NAME_SHOW$7);\n this._element.style[dimension] = '';\n EventHandler.trigger(this._element, EVENT_SHOWN$6);\n };\n const capitalizedDimension = dimension[0].toUpperCase() + dimension.slice(1);\n const scrollSize = `scroll${capitalizedDimension}`;\n this._queueCallback(complete, this._element, true);\n this._element.style[dimension] = `${this._element[scrollSize]}px`;\n }\n hide() {\n if (this._isTransitioning || !this._isShown()) {\n return;\n }\n const startEvent = EventHandler.trigger(this._element, EVENT_HIDE$6);\n if (startEvent.defaultPrevented) {\n return;\n }\n const dimension = this._getDimension();\n this._element.style[dimension] = `${this._element.getBoundingClientRect()[dimension]}px`;\n reflow(this._element);\n this._element.classList.add(CLASS_NAME_COLLAPSING);\n this._element.classList.remove(CLASS_NAME_COLLAPSE, CLASS_NAME_SHOW$7);\n for (const trigger of this._triggerArray) {\n const element = SelectorEngine.getElementFromSelector(trigger);\n if (element && !this._isShown(element)) {\n this._addAriaAndCollapsedClass([trigger], false);\n }\n }\n this._isTransitioning = true;\n const complete = () => {\n this._isTransitioning = false;\n this._element.classList.remove(CLASS_NAME_COLLAPSING);\n this._element.classList.add(CLASS_NAME_COLLAPSE);\n EventHandler.trigger(this._element, EVENT_HIDDEN$6);\n };\n this._element.style[dimension] = '';\n this._queueCallback(complete, this._element, true);\n }\n _isShown(element = this._element) {\n return element.classList.contains(CLASS_NAME_SHOW$7);\n }\n\n // Private\n _configAfterMerge(config) {\n config.toggle = Boolean(config.toggle); // Coerce string values\n config.parent = getElement(config.parent);\n return config;\n }\n _getDimension() {\n return this._element.classList.contains(CLASS_NAME_HORIZONTAL) ? WIDTH : HEIGHT;\n }\n _initializeChildren() {\n if (!this._config.parent) {\n return;\n }\n const children = this._getFirstLevelChildren(SELECTOR_DATA_TOGGLE$4);\n for (const element of children) {\n const selected = SelectorEngine.getElementFromSelector(element);\n if (selected) {\n this._addAriaAndCollapsedClass([element], this._isShown(selected));\n }\n }\n }\n _getFirstLevelChildren(selector) {\n const children = SelectorEngine.find(CLASS_NAME_DEEPER_CHILDREN, this._config.parent);\n // remove children if greater depth\n return SelectorEngine.find(selector, this._config.parent).filter(element => !children.includes(element));\n }\n _addAriaAndCollapsedClass(triggerArray, isOpen) {\n if (!triggerArray.length) {\n return;\n }\n for (const element of triggerArray) {\n element.classList.toggle(CLASS_NAME_COLLAPSED, !isOpen);\n element.setAttribute('aria-expanded', isOpen);\n }\n }\n\n // Static\n static jQueryInterface(config) {\n const _config = {};\n if (typeof config === 'string' && /show|hide/.test(config)) {\n _config.toggle = false;\n }\n return this.each(function () {\n const data = Collapse.getOrCreateInstance(this, _config);\n if (typeof config === 'string') {\n if (typeof data[config] === 'undefined') {\n throw new TypeError(`No method named \"${config}\"`);\n }\n data[config]();\n }\n });\n }\n}\n\n/**\n * Data API implementation\n */\n\nEventHandler.on(document, EVENT_CLICK_DATA_API$4, SELECTOR_DATA_TOGGLE$4, function (event) {\n // preventDefault only for elements (which change the URL) not inside the collapsible element\n if (event.target.tagName === 'A' || event.delegateTarget && event.delegateTarget.tagName === 'A') {\n event.preventDefault();\n }\n for (const element of SelectorEngine.getMultipleElementsFromSelector(this)) {\n Collapse.getOrCreateInstance(element, {\n toggle: false\n }).toggle();\n }\n});\n\n/**\n * jQuery\n */\n\ndefineJQueryPlugin(Collapse);\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap dropdown.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n\n/**\n * Constants\n */\n\nconst NAME$a = 'dropdown';\nconst DATA_KEY$6 = 'bs.dropdown';\nconst EVENT_KEY$6 = `.${DATA_KEY$6}`;\nconst DATA_API_KEY$3 = '.data-api';\nconst ESCAPE_KEY$2 = 'Escape';\nconst TAB_KEY$1 = 'Tab';\nconst ARROW_UP_KEY$1 = 'ArrowUp';\nconst ARROW_DOWN_KEY$1 = 'ArrowDown';\nconst RIGHT_MOUSE_BUTTON = 2; // MouseEvent.button value for the secondary button, usually the right button\n\nconst EVENT_HIDE$5 = `hide${EVENT_KEY$6}`;\nconst EVENT_HIDDEN$5 = `hidden${EVENT_KEY$6}`;\nconst EVENT_SHOW$5 = `show${EVENT_KEY$6}`;\nconst EVENT_SHOWN$5 = `shown${EVENT_KEY$6}`;\nconst EVENT_CLICK_DATA_API$3 = `click${EVENT_KEY$6}${DATA_API_KEY$3}`;\nconst EVENT_KEYDOWN_DATA_API = `keydown${EVENT_KEY$6}${DATA_API_KEY$3}`;\nconst EVENT_KEYUP_DATA_API = `keyup${EVENT_KEY$6}${DATA_API_KEY$3}`;\nconst CLASS_NAME_SHOW$6 = 'show';\nconst CLASS_NAME_DROPUP = 'dropup';\nconst CLASS_NAME_DROPEND = 'dropend';\nconst CLASS_NAME_DROPSTART = 'dropstart';\nconst CLASS_NAME_DROPUP_CENTER = 'dropup-center';\nconst CLASS_NAME_DROPDOWN_CENTER = 'dropdown-center';\nconst SELECTOR_DATA_TOGGLE$3 = '[data-bs-toggle=\"dropdown\"]:not(.disabled):not(:disabled)';\nconst SELECTOR_DATA_TOGGLE_SHOWN = `${SELECTOR_DATA_TOGGLE$3}.${CLASS_NAME_SHOW$6}`;\nconst SELECTOR_MENU = '.dropdown-menu';\nconst SELECTOR_NAVBAR = '.navbar';\nconst SELECTOR_NAVBAR_NAV = '.navbar-nav';\nconst SELECTOR_VISIBLE_ITEMS = '.dropdown-menu .dropdown-item:not(.disabled):not(:disabled)';\nconst PLACEMENT_TOP = isRTL() ? 'top-end' : 'top-start';\nconst PLACEMENT_TOPEND = isRTL() ? 'top-start' : 'top-end';\nconst PLACEMENT_BOTTOM = isRTL() ? 'bottom-end' : 'bottom-start';\nconst PLACEMENT_BOTTOMEND = isRTL() ? 'bottom-start' : 'bottom-end';\nconst PLACEMENT_RIGHT = isRTL() ? 'left-start' : 'right-start';\nconst PLACEMENT_LEFT = isRTL() ? 'right-start' : 'left-start';\nconst PLACEMENT_TOPCENTER = 'top';\nconst PLACEMENT_BOTTOMCENTER = 'bottom';\nconst Default$9 = {\n autoClose: true,\n boundary: 'clippingParents',\n display: 'dynamic',\n offset: [0, 2],\n popperConfig: null,\n reference: 'toggle'\n};\nconst DefaultType$9 = {\n autoClose: '(boolean|string)',\n boundary: '(string|element)',\n display: 'string',\n offset: '(array|string|function)',\n popperConfig: '(null|object|function)',\n reference: '(string|element|object)'\n};\n\n/**\n * Class definition\n */\n\nclass Dropdown extends BaseComponent {\n constructor(element, config) {\n super(element, config);\n this._popper = null;\n this._parent = this._element.parentNode; // dropdown wrapper\n // TODO: v6 revert #37011 & change markup https://getbootstrap.com/docs/5.3/forms/input-group/\n this._menu = SelectorEngine.next(this._element, SELECTOR_MENU)[0] || SelectorEngine.prev(this._element, SELECTOR_MENU)[0] || SelectorEngine.findOne(SELECTOR_MENU, this._parent);\n this._inNavbar = this._detectNavbar();\n }\n\n // Getters\n static get Default() {\n return Default$9;\n }\n static get DefaultType() {\n return DefaultType$9;\n }\n static get NAME() {\n return NAME$a;\n }\n\n // Public\n toggle() {\n return this._isShown() ? this.hide() : this.show();\n }\n show() {\n if (isDisabled(this._element) || this._isShown()) {\n return;\n }\n const relatedTarget = {\n relatedTarget: this._element\n };\n const showEvent = EventHandler.trigger(this._element, EVENT_SHOW$5, relatedTarget);\n if (showEvent.defaultPrevented) {\n return;\n }\n this._createPopper();\n\n // If this is a touch-enabled device we add extra\n // empty mouseover listeners to the body's immediate children;\n // only needed because of broken event delegation on iOS\n // https://www.quirksmode.org/blog/archives/2014/02/mouse_event_bub.html\n if ('ontouchstart' in document.documentElement && !this._parent.closest(SELECTOR_NAVBAR_NAV)) {\n for (const element of [].concat(...document.body.children)) {\n EventHandler.on(element, 'mouseover', noop);\n }\n }\n this._element.focus();\n this._element.setAttribute('aria-expanded', true);\n this._menu.classList.add(CLASS_NAME_SHOW$6);\n this._element.classList.add(CLASS_NAME_SHOW$6);\n EventHandler.trigger(this._element, EVENT_SHOWN$5, relatedTarget);\n }\n hide() {\n if (isDisabled(this._element) || !this._isShown()) {\n return;\n }\n const relatedTarget = {\n relatedTarget: this._element\n };\n this._completeHide(relatedTarget);\n }\n dispose() {\n if (this._popper) {\n this._popper.destroy();\n }\n super.dispose();\n }\n update() {\n this._inNavbar = this._detectNavbar();\n if (this._popper) {\n this._popper.update();\n }\n }\n\n // Private\n _completeHide(relatedTarget) {\n const hideEvent = EventHandler.trigger(this._element, EVENT_HIDE$5, relatedTarget);\n if (hideEvent.defaultPrevented) {\n return;\n }\n\n // If this is a touch-enabled device we remove the extra\n // empty mouseover listeners we added for iOS support\n if ('ontouchstart' in document.documentElement) {\n for (const element of [].concat(...document.body.children)) {\n EventHandler.off(element, 'mouseover', noop);\n }\n }\n if (this._popper) {\n this._popper.destroy();\n }\n this._menu.classList.remove(CLASS_NAME_SHOW$6);\n this._element.classList.remove(CLASS_NAME_SHOW$6);\n this._element.setAttribute('aria-expanded', 'false');\n Manipulator.removeDataAttribute(this._menu, 'popper');\n EventHandler.trigger(this._element, EVENT_HIDDEN$5, relatedTarget);\n }\n _getConfig(config) {\n config = super._getConfig(config);\n if (typeof config.reference === 'object' && !isElement(config.reference) && typeof config.reference.getBoundingClientRect !== 'function') {\n // Popper virtual elements require a getBoundingClientRect method\n throw new TypeError(`${NAME$a.toUpperCase()}: Option \"reference\" provided type \"object\" without a required \"getBoundingClientRect\" method.`);\n }\n return config;\n }\n _createPopper() {\n if (typeof Popper === 'undefined') {\n throw new TypeError('Bootstrap\\'s dropdowns require Popper (https://popper.js.org)');\n }\n let referenceElement = this._element;\n if (this._config.reference === 'parent') {\n referenceElement = this._parent;\n } else if (isElement(this._config.reference)) {\n referenceElement = getElement(this._config.reference);\n } else if (typeof this._config.reference === 'object') {\n referenceElement = this._config.reference;\n }\n const popperConfig = this._getPopperConfig();\n this._popper = Popper.createPopper(referenceElement, this._menu, popperConfig);\n }\n _isShown() {\n return this._menu.classList.contains(CLASS_NAME_SHOW$6);\n }\n _getPlacement() {\n const parentDropdown = this._parent;\n if (parentDropdown.classList.contains(CLASS_NAME_DROPEND)) {\n return PLACEMENT_RIGHT;\n }\n if (parentDropdown.classList.contains(CLASS_NAME_DROPSTART)) {\n return PLACEMENT_LEFT;\n }\n if (parentDropdown.classList.contains(CLASS_NAME_DROPUP_CENTER)) {\n return PLACEMENT_TOPCENTER;\n }\n if (parentDropdown.classList.contains(CLASS_NAME_DROPDOWN_CENTER)) {\n return PLACEMENT_BOTTOMCENTER;\n }\n\n // We need to trim the value because custom properties can also include spaces\n const isEnd = getComputedStyle(this._menu).getPropertyValue('--bs-position').trim() === 'end';\n if (parentDropdown.classList.contains(CLASS_NAME_DROPUP)) {\n return isEnd ? PLACEMENT_TOPEND : PLACEMENT_TOP;\n }\n return isEnd ? PLACEMENT_BOTTOMEND : PLACEMENT_BOTTOM;\n }\n _detectNavbar() {\n return this._element.closest(SELECTOR_NAVBAR) !== null;\n }\n _getOffset() {\n const {\n offset\n } = this._config;\n if (typeof offset === 'string') {\n return offset.split(',').map(value => Number.parseInt(value, 10));\n }\n if (typeof offset === 'function') {\n return popperData => offset(popperData, this._element);\n }\n return offset;\n }\n _getPopperConfig() {\n const defaultBsPopperConfig = {\n placement: this._getPlacement(),\n modifiers: [{\n name: 'preventOverflow',\n options: {\n boundary: this._config.boundary\n }\n }, {\n name: 'offset',\n options: {\n offset: this._getOffset()\n }\n }]\n };\n\n // Disable Popper if we have a static display or Dropdown is in Navbar\n if (this._inNavbar || this._config.display === 'static') {\n Manipulator.setDataAttribute(this._menu, 'popper', 'static'); // TODO: v6 remove\n defaultBsPopperConfig.modifiers = [{\n name: 'applyStyles',\n enabled: false\n }];\n }\n return {\n ...defaultBsPopperConfig,\n ...execute(this._config.popperConfig, [defaultBsPopperConfig])\n };\n }\n _selectMenuItem({\n key,\n target\n }) {\n const items = SelectorEngine.find(SELECTOR_VISIBLE_ITEMS, this._menu).filter(element => isVisible(element));\n if (!items.length) {\n return;\n }\n\n // if target isn't included in items (e.g. when expanding the dropdown)\n // allow cycling to get the last item in case key equals ARROW_UP_KEY\n getNextActiveElement(items, target, key === ARROW_DOWN_KEY$1, !items.includes(target)).focus();\n }\n\n // Static\n static jQueryInterface(config) {\n return this.each(function () {\n const data = Dropdown.getOrCreateInstance(this, config);\n if (typeof config !== 'string') {\n return;\n }\n if (typeof data[config] === 'undefined') {\n throw new TypeError(`No method named \"${config}\"`);\n }\n data[config]();\n });\n }\n static clearMenus(event) {\n if (event.button === RIGHT_MOUSE_BUTTON || event.type === 'keyup' && event.key !== TAB_KEY$1) {\n return;\n }\n const openToggles = SelectorEngine.find(SELECTOR_DATA_TOGGLE_SHOWN);\n for (const toggle of openToggles) {\n const context = Dropdown.getInstance(toggle);\n if (!context || context._config.autoClose === false) {\n continue;\n }\n const composedPath = event.composedPath();\n const isMenuTarget = composedPath.includes(context._menu);\n if (composedPath.includes(context._element) || context._config.autoClose === 'inside' && !isMenuTarget || context._config.autoClose === 'outside' && isMenuTarget) {\n continue;\n }\n\n // Tab navigation through the dropdown menu or events from contained inputs shouldn't close the menu\n if (context._menu.contains(event.target) && (event.type === 'keyup' && event.key === TAB_KEY$1 || /input|select|option|textarea|form/i.test(event.target.tagName))) {\n continue;\n }\n const relatedTarget = {\n relatedTarget: context._element\n };\n if (event.type === 'click') {\n relatedTarget.clickEvent = event;\n }\n context._completeHide(relatedTarget);\n }\n }\n static dataApiKeydownHandler(event) {\n // If not an UP | DOWN | ESCAPE key => not a dropdown command\n // If input/textarea && if key is other than ESCAPE => not a dropdown command\n\n const isInput = /input|textarea/i.test(event.target.tagName);\n const isEscapeEvent = event.key === ESCAPE_KEY$2;\n const isUpOrDownEvent = [ARROW_UP_KEY$1, ARROW_DOWN_KEY$1].includes(event.key);\n if (!isUpOrDownEvent && !isEscapeEvent) {\n return;\n }\n if (isInput && !isEscapeEvent) {\n return;\n }\n event.preventDefault();\n\n // TODO: v6 revert #37011 & change markup https://getbootstrap.com/docs/5.3/forms/input-group/\n const getToggleButton = this.matches(SELECTOR_DATA_TOGGLE$3) ? this : SelectorEngine.prev(this, SELECTOR_DATA_TOGGLE$3)[0] || SelectorEngine.next(this, SELECTOR_DATA_TOGGLE$3)[0] || SelectorEngine.findOne(SELECTOR_DATA_TOGGLE$3, event.delegateTarget.parentNode);\n const instance = Dropdown.getOrCreateInstance(getToggleButton);\n if (isUpOrDownEvent) {\n event.stopPropagation();\n instance.show();\n instance._selectMenuItem(event);\n return;\n }\n if (instance._isShown()) {\n // else is escape and we check if it is shown\n event.stopPropagation();\n instance.hide();\n getToggleButton.focus();\n }\n }\n}\n\n/**\n * Data API implementation\n */\n\nEventHandler.on(document, EVENT_KEYDOWN_DATA_API, SELECTOR_DATA_TOGGLE$3, Dropdown.dataApiKeydownHandler);\nEventHandler.on(document, EVENT_KEYDOWN_DATA_API, SELECTOR_MENU, Dropdown.dataApiKeydownHandler);\nEventHandler.on(document, EVENT_CLICK_DATA_API$3, Dropdown.clearMenus);\nEventHandler.on(document, EVENT_KEYUP_DATA_API, Dropdown.clearMenus);\nEventHandler.on(document, EVENT_CLICK_DATA_API$3, SELECTOR_DATA_TOGGLE$3, function (event) {\n event.preventDefault();\n Dropdown.getOrCreateInstance(this).toggle();\n});\n\n/**\n * jQuery\n */\n\ndefineJQueryPlugin(Dropdown);\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap util/backdrop.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n\n/**\n * Constants\n */\n\nconst NAME$9 = 'backdrop';\nconst CLASS_NAME_FADE$4 = 'fade';\nconst CLASS_NAME_SHOW$5 = 'show';\nconst EVENT_MOUSEDOWN = `mousedown.bs.${NAME$9}`;\nconst Default$8 = {\n className: 'modal-backdrop',\n clickCallback: null,\n isAnimated: false,\n isVisible: true,\n // if false, we use the backdrop helper without adding any element to the dom\n rootElement: 'body' // give the choice to place backdrop under different elements\n};\nconst DefaultType$8 = {\n className: 'string',\n clickCallback: '(function|null)',\n isAnimated: 'boolean',\n isVisible: 'boolean',\n rootElement: '(element|string)'\n};\n\n/**\n * Class definition\n */\n\nclass Backdrop extends Config {\n constructor(config) {\n super();\n this._config = this._getConfig(config);\n this._isAppended = false;\n this._element = null;\n }\n\n // Getters\n static get Default() {\n return Default$8;\n }\n static get DefaultType() {\n return DefaultType$8;\n }\n static get NAME() {\n return NAME$9;\n }\n\n // Public\n show(callback) {\n if (!this._config.isVisible) {\n execute(callback);\n return;\n }\n this._append();\n const element = this._getElement();\n if (this._config.isAnimated) {\n reflow(element);\n }\n element.classList.add(CLASS_NAME_SHOW$5);\n this._emulateAnimation(() => {\n execute(callback);\n });\n }\n hide(callback) {\n if (!this._config.isVisible) {\n execute(callback);\n return;\n }\n this._getElement().classList.remove(CLASS_NAME_SHOW$5);\n this._emulateAnimation(() => {\n this.dispose();\n execute(callback);\n });\n }\n dispose() {\n if (!this._isAppended) {\n return;\n }\n EventHandler.off(this._element, EVENT_MOUSEDOWN);\n this._element.remove();\n this._isAppended = false;\n }\n\n // Private\n _getElement() {\n if (!this._element) {\n const backdrop = document.createElement('div');\n backdrop.className = this._config.className;\n if (this._config.isAnimated) {\n backdrop.classList.add(CLASS_NAME_FADE$4);\n }\n this._element = backdrop;\n }\n return this._element;\n }\n _configAfterMerge(config) {\n // use getElement() with the default \"body\" to get a fresh Element on each instantiation\n config.rootElement = getElement(config.rootElement);\n return config;\n }\n _append() {\n if (this._isAppended) {\n return;\n }\n const element = this._getElement();\n this._config.rootElement.append(element);\n EventHandler.on(element, EVENT_MOUSEDOWN, () => {\n execute(this._config.clickCallback);\n });\n this._isAppended = true;\n }\n _emulateAnimation(callback) {\n executeAfterTransition(callback, this._getElement(), this._config.isAnimated);\n }\n}\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap util/focustrap.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n\n/**\n * Constants\n */\n\nconst NAME$8 = 'focustrap';\nconst DATA_KEY$5 = 'bs.focustrap';\nconst EVENT_KEY$5 = `.${DATA_KEY$5}`;\nconst EVENT_FOCUSIN$2 = `focusin${EVENT_KEY$5}`;\nconst EVENT_KEYDOWN_TAB = `keydown.tab${EVENT_KEY$5}`;\nconst TAB_KEY = 'Tab';\nconst TAB_NAV_FORWARD = 'forward';\nconst TAB_NAV_BACKWARD = 'backward';\nconst Default$7 = {\n autofocus: true,\n trapElement: null // The element to trap focus inside of\n};\nconst DefaultType$7 = {\n autofocus: 'boolean',\n trapElement: 'element'\n};\n\n/**\n * Class definition\n */\n\nclass FocusTrap extends Config {\n constructor(config) {\n super();\n this._config = this._getConfig(config);\n this._isActive = false;\n this._lastTabNavDirection = null;\n }\n\n // Getters\n static get Default() {\n return Default$7;\n }\n static get DefaultType() {\n return DefaultType$7;\n }\n static get NAME() {\n return NAME$8;\n }\n\n // Public\n activate() {\n if (this._isActive) {\n return;\n }\n if (this._config.autofocus) {\n this._config.trapElement.focus();\n }\n EventHandler.off(document, EVENT_KEY$5); // guard against infinite focus loop\n EventHandler.on(document, EVENT_FOCUSIN$2, event => this._handleFocusin(event));\n EventHandler.on(document, EVENT_KEYDOWN_TAB, event => this._handleKeydown(event));\n this._isActive = true;\n }\n deactivate() {\n if (!this._isActive) {\n return;\n }\n this._isActive = false;\n EventHandler.off(document, EVENT_KEY$5);\n }\n\n // Private\n _handleFocusin(event) {\n const {\n trapElement\n } = this._config;\n if (event.target === document || event.target === trapElement || trapElement.contains(event.target)) {\n return;\n }\n const elements = SelectorEngine.focusableChildren(trapElement);\n if (elements.length === 0) {\n trapElement.focus();\n } else if (this._lastTabNavDirection === TAB_NAV_BACKWARD) {\n elements[elements.length - 1].focus();\n } else {\n elements[0].focus();\n }\n }\n _handleKeydown(event) {\n if (event.key !== TAB_KEY) {\n return;\n }\n this._lastTabNavDirection = event.shiftKey ? TAB_NAV_BACKWARD : TAB_NAV_FORWARD;\n }\n}\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap util/scrollBar.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n\n/**\n * Constants\n */\n\nconst SELECTOR_FIXED_CONTENT = '.fixed-top, .fixed-bottom, .is-fixed, .sticky-top';\nconst SELECTOR_STICKY_CONTENT = '.sticky-top';\nconst PROPERTY_PADDING = 'padding-right';\nconst PROPERTY_MARGIN = 'margin-right';\n\n/**\n * Class definition\n */\n\nclass ScrollBarHelper {\n constructor() {\n this._element = document.body;\n }\n\n // Public\n getWidth() {\n // https://developer.mozilla.org/en-US/docs/Web/API/Window/innerWidth#usage_notes\n const documentWidth = document.documentElement.clientWidth;\n return Math.abs(window.innerWidth - documentWidth);\n }\n hide() {\n const width = this.getWidth();\n this._disableOverFlow();\n // give padding to element to balance the hidden scrollbar width\n this._setElementAttributes(this._element, PROPERTY_PADDING, calculatedValue => calculatedValue + width);\n // trick: We adjust positive paddingRight and negative marginRight to sticky-top elements to keep showing fullwidth\n this._setElementAttributes(SELECTOR_FIXED_CONTENT, PROPERTY_PADDING, calculatedValue => calculatedValue + width);\n this._setElementAttributes(SELECTOR_STICKY_CONTENT, PROPERTY_MARGIN, calculatedValue => calculatedValue - width);\n }\n reset() {\n this._resetElementAttributes(this._element, 'overflow');\n this._resetElementAttributes(this._element, PROPERTY_PADDING);\n this._resetElementAttributes(SELECTOR_FIXED_CONTENT, PROPERTY_PADDING);\n this._resetElementAttributes(SELECTOR_STICKY_CONTENT, PROPERTY_MARGIN);\n }\n isOverflowing() {\n return this.getWidth() > 0;\n }\n\n // Private\n _disableOverFlow() {\n this._saveInitialAttribute(this._element, 'overflow');\n this._element.style.overflow = 'hidden';\n }\n _setElementAttributes(selector, styleProperty, callback) {\n const scrollbarWidth = this.getWidth();\n const manipulationCallBack = element => {\n if (element !== this._element && window.innerWidth > element.clientWidth + scrollbarWidth) {\n return;\n }\n this._saveInitialAttribute(element, styleProperty);\n const calculatedValue = window.getComputedStyle(element).getPropertyValue(styleProperty);\n element.style.setProperty(styleProperty, `${callback(Number.parseFloat(calculatedValue))}px`);\n };\n this._applyManipulationCallback(selector, manipulationCallBack);\n }\n _saveInitialAttribute(element, styleProperty) {\n const actualValue = element.style.getPropertyValue(styleProperty);\n if (actualValue) {\n Manipulator.setDataAttribute(element, styleProperty, actualValue);\n }\n }\n _resetElementAttributes(selector, styleProperty) {\n const manipulationCallBack = element => {\n const value = Manipulator.getDataAttribute(element, styleProperty);\n // We only want to remove the property if the value is `null`; the value can also be zero\n if (value === null) {\n element.style.removeProperty(styleProperty);\n return;\n }\n Manipulator.removeDataAttribute(element, styleProperty);\n element.style.setProperty(styleProperty, value);\n };\n this._applyManipulationCallback(selector, manipulationCallBack);\n }\n _applyManipulationCallback(selector, callBack) {\n if (isElement(selector)) {\n callBack(selector);\n return;\n }\n for (const sel of SelectorEngine.find(selector, this._element)) {\n callBack(sel);\n }\n }\n}\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap modal.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n\n/**\n * Constants\n */\n\nconst NAME$7 = 'modal';\nconst DATA_KEY$4 = 'bs.modal';\nconst EVENT_KEY$4 = `.${DATA_KEY$4}`;\nconst DATA_API_KEY$2 = '.data-api';\nconst ESCAPE_KEY$1 = 'Escape';\nconst EVENT_HIDE$4 = `hide${EVENT_KEY$4}`;\nconst EVENT_HIDE_PREVENTED$1 = `hidePrevented${EVENT_KEY$4}`;\nconst EVENT_HIDDEN$4 = `hidden${EVENT_KEY$4}`;\nconst EVENT_SHOW$4 = `show${EVENT_KEY$4}`;\nconst EVENT_SHOWN$4 = `shown${EVENT_KEY$4}`;\nconst EVENT_RESIZE$1 = `resize${EVENT_KEY$4}`;\nconst EVENT_CLICK_DISMISS = `click.dismiss${EVENT_KEY$4}`;\nconst EVENT_MOUSEDOWN_DISMISS = `mousedown.dismiss${EVENT_KEY$4}`;\nconst EVENT_KEYDOWN_DISMISS$1 = `keydown.dismiss${EVENT_KEY$4}`;\nconst EVENT_CLICK_DATA_API$2 = `click${EVENT_KEY$4}${DATA_API_KEY$2}`;\nconst CLASS_NAME_OPEN = 'modal-open';\nconst CLASS_NAME_FADE$3 = 'fade';\nconst CLASS_NAME_SHOW$4 = 'show';\nconst CLASS_NAME_STATIC = 'modal-static';\nconst OPEN_SELECTOR$1 = '.modal.show';\nconst SELECTOR_DIALOG = '.modal-dialog';\nconst SELECTOR_MODAL_BODY = '.modal-body';\nconst SELECTOR_DATA_TOGGLE$2 = '[data-bs-toggle=\"modal\"]';\nconst Default$6 = {\n backdrop: true,\n focus: true,\n keyboard: true\n};\nconst DefaultType$6 = {\n backdrop: '(boolean|string)',\n focus: 'boolean',\n keyboard: 'boolean'\n};\n\n/**\n * Class definition\n */\n\nclass Modal extends BaseComponent {\n constructor(element, config) {\n super(element, config);\n this._dialog = SelectorEngine.findOne(SELECTOR_DIALOG, this._element);\n this._backdrop = this._initializeBackDrop();\n this._focustrap = this._initializeFocusTrap();\n this._isShown = false;\n this._isTransitioning = false;\n this._scrollBar = new ScrollBarHelper();\n this._addEventListeners();\n }\n\n // Getters\n static get Default() {\n return Default$6;\n }\n static get DefaultType() {\n return DefaultType$6;\n }\n static get NAME() {\n return NAME$7;\n }\n\n // Public\n toggle(relatedTarget) {\n return this._isShown ? this.hide() : this.show(relatedTarget);\n }\n show(relatedTarget) {\n if (this._isShown || this._isTransitioning) {\n return;\n }\n const showEvent = EventHandler.trigger(this._element, EVENT_SHOW$4, {\n relatedTarget\n });\n if (showEvent.defaultPrevented) {\n return;\n }\n this._isShown = true;\n this._isTransitioning = true;\n this._scrollBar.hide();\n document.body.classList.add(CLASS_NAME_OPEN);\n this._adjustDialog();\n this._backdrop.show(() => this._showElement(relatedTarget));\n }\n hide() {\n if (!this._isShown || this._isTransitioning) {\n return;\n }\n const hideEvent = EventHandler.trigger(this._element, EVENT_HIDE$4);\n if (hideEvent.defaultPrevented) {\n return;\n }\n this._isShown = false;\n this._isTransitioning = true;\n this._focustrap.deactivate();\n this._element.classList.remove(CLASS_NAME_SHOW$4);\n this._queueCallback(() => this._hideModal(), this._element, this._isAnimated());\n }\n dispose() {\n EventHandler.off(window, EVENT_KEY$4);\n EventHandler.off(this._dialog, EVENT_KEY$4);\n this._backdrop.dispose();\n this._focustrap.deactivate();\n super.dispose();\n }\n handleUpdate() {\n this._adjustDialog();\n }\n\n // Private\n _initializeBackDrop() {\n return new Backdrop({\n isVisible: Boolean(this._config.backdrop),\n // 'static' option will be translated to true, and booleans will keep their value,\n isAnimated: this._isAnimated()\n });\n }\n _initializeFocusTrap() {\n return new FocusTrap({\n trapElement: this._element\n });\n }\n _showElement(relatedTarget) {\n // try to append dynamic modal\n if (!document.body.contains(this._element)) {\n document.body.append(this._element);\n }\n this._element.style.display = 'block';\n this._element.removeAttribute('aria-hidden');\n this._element.setAttribute('aria-modal', true);\n this._element.setAttribute('role', 'dialog');\n this._element.scrollTop = 0;\n const modalBody = SelectorEngine.findOne(SELECTOR_MODAL_BODY, this._dialog);\n if (modalBody) {\n modalBody.scrollTop = 0;\n }\n reflow(this._element);\n this._element.classList.add(CLASS_NAME_SHOW$4);\n const transitionComplete = () => {\n if (this._config.focus) {\n this._focustrap.activate();\n }\n this._isTransitioning = false;\n EventHandler.trigger(this._element, EVENT_SHOWN$4, {\n relatedTarget\n });\n };\n this._queueCallback(transitionComplete, this._dialog, this._isAnimated());\n }\n _addEventListeners() {\n EventHandler.on(this._element, EVENT_KEYDOWN_DISMISS$1, event => {\n if (event.key !== ESCAPE_KEY$1) {\n return;\n }\n if (this._config.keyboard) {\n this.hide();\n return;\n }\n this._triggerBackdropTransition();\n });\n EventHandler.on(window, EVENT_RESIZE$1, () => {\n if (this._isShown && !this._isTransitioning) {\n this._adjustDialog();\n }\n });\n EventHandler.on(this._element, EVENT_MOUSEDOWN_DISMISS, event => {\n // a bad trick to segregate clicks that may start inside dialog but end outside, and avoid listen to scrollbar clicks\n EventHandler.one(this._element, EVENT_CLICK_DISMISS, event2 => {\n if (this._element !== event.target || this._element !== event2.target) {\n return;\n }\n if (this._config.backdrop === 'static') {\n this._triggerBackdropTransition();\n return;\n }\n if (this._config.backdrop) {\n this.hide();\n }\n });\n });\n }\n _hideModal() {\n this._element.style.display = 'none';\n this._element.setAttribute('aria-hidden', true);\n this._element.removeAttribute('aria-modal');\n this._element.removeAttribute('role');\n this._isTransitioning = false;\n this._backdrop.hide(() => {\n document.body.classList.remove(CLASS_NAME_OPEN);\n this._resetAdjustments();\n this._scrollBar.reset();\n EventHandler.trigger(this._element, EVENT_HIDDEN$4);\n });\n }\n _isAnimated() {\n return this._element.classList.contains(CLASS_NAME_FADE$3);\n }\n _triggerBackdropTransition() {\n const hideEvent = EventHandler.trigger(this._element, EVENT_HIDE_PREVENTED$1);\n if (hideEvent.defaultPrevented) {\n return;\n }\n const isModalOverflowing = this._element.scrollHeight > document.documentElement.clientHeight;\n const initialOverflowY = this._element.style.overflowY;\n // return if the following background transition hasn't yet completed\n if (initialOverflowY === 'hidden' || this._element.classList.contains(CLASS_NAME_STATIC)) {\n return;\n }\n if (!isModalOverflowing) {\n this._element.style.overflowY = 'hidden';\n }\n this._element.classList.add(CLASS_NAME_STATIC);\n this._queueCallback(() => {\n this._element.classList.remove(CLASS_NAME_STATIC);\n this._queueCallback(() => {\n this._element.style.overflowY = initialOverflowY;\n }, this._dialog);\n }, this._dialog);\n this._element.focus();\n }\n\n /**\n * The following methods are used to handle overflowing modals\n */\n\n _adjustDialog() {\n const isModalOverflowing = this._element.scrollHeight > document.documentElement.clientHeight;\n const scrollbarWidth = this._scrollBar.getWidth();\n const isBodyOverflowing = scrollbarWidth > 0;\n if (isBodyOverflowing && !isModalOverflowing) {\n const property = isRTL() ? 'paddingLeft' : 'paddingRight';\n this._element.style[property] = `${scrollbarWidth}px`;\n }\n if (!isBodyOverflowing && isModalOverflowing) {\n const property = isRTL() ? 'paddingRight' : 'paddingLeft';\n this._element.style[property] = `${scrollbarWidth}px`;\n }\n }\n _resetAdjustments() {\n this._element.style.paddingLeft = '';\n this._element.style.paddingRight = '';\n }\n\n // Static\n static jQueryInterface(config, relatedTarget) {\n return this.each(function () {\n const data = Modal.getOrCreateInstance(this, config);\n if (typeof config !== 'string') {\n return;\n }\n if (typeof data[config] === 'undefined') {\n throw new TypeError(`No method named \"${config}\"`);\n }\n data[config](relatedTarget);\n });\n }\n}\n\n/**\n * Data API implementation\n */\n\nEventHandler.on(document, EVENT_CLICK_DATA_API$2, SELECTOR_DATA_TOGGLE$2, function (event) {\n const target = SelectorEngine.getElementFromSelector(this);\n if (['A', 'AREA'].includes(this.tagName)) {\n event.preventDefault();\n }\n EventHandler.one(target, EVENT_SHOW$4, showEvent => {\n if (showEvent.defaultPrevented) {\n // only register focus restorer if modal will actually get shown\n return;\n }\n EventHandler.one(target, EVENT_HIDDEN$4, () => {\n if (isVisible(this)) {\n this.focus();\n }\n });\n });\n\n // avoid conflict when clicking modal toggler while another one is open\n const alreadyOpen = SelectorEngine.findOne(OPEN_SELECTOR$1);\n if (alreadyOpen) {\n Modal.getInstance(alreadyOpen).hide();\n }\n const data = Modal.getOrCreateInstance(target);\n data.toggle(this);\n});\nenableDismissTrigger(Modal);\n\n/**\n * jQuery\n */\n\ndefineJQueryPlugin(Modal);\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap offcanvas.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n\n/**\n * Constants\n */\n\nconst NAME$6 = 'offcanvas';\nconst DATA_KEY$3 = 'bs.offcanvas';\nconst EVENT_KEY$3 = `.${DATA_KEY$3}`;\nconst DATA_API_KEY$1 = '.data-api';\nconst EVENT_LOAD_DATA_API$2 = `load${EVENT_KEY$3}${DATA_API_KEY$1}`;\nconst ESCAPE_KEY = 'Escape';\nconst CLASS_NAME_SHOW$3 = 'show';\nconst CLASS_NAME_SHOWING$1 = 'showing';\nconst CLASS_NAME_HIDING = 'hiding';\nconst CLASS_NAME_BACKDROP = 'offcanvas-backdrop';\nconst OPEN_SELECTOR = '.offcanvas.show';\nconst EVENT_SHOW$3 = `show${EVENT_KEY$3}`;\nconst EVENT_SHOWN$3 = `shown${EVENT_KEY$3}`;\nconst EVENT_HIDE$3 = `hide${EVENT_KEY$3}`;\nconst EVENT_HIDE_PREVENTED = `hidePrevented${EVENT_KEY$3}`;\nconst EVENT_HIDDEN$3 = `hidden${EVENT_KEY$3}`;\nconst EVENT_RESIZE = `resize${EVENT_KEY$3}`;\nconst EVENT_CLICK_DATA_API$1 = `click${EVENT_KEY$3}${DATA_API_KEY$1}`;\nconst EVENT_KEYDOWN_DISMISS = `keydown.dismiss${EVENT_KEY$3}`;\nconst SELECTOR_DATA_TOGGLE$1 = '[data-bs-toggle=\"offcanvas\"]';\nconst Default$5 = {\n backdrop: true,\n keyboard: true,\n scroll: false\n};\nconst DefaultType$5 = {\n backdrop: '(boolean|string)',\n keyboard: 'boolean',\n scroll: 'boolean'\n};\n\n/**\n * Class definition\n */\n\nclass Offcanvas extends BaseComponent {\n constructor(element, config) {\n super(element, config);\n this._isShown = false;\n this._backdrop = this._initializeBackDrop();\n this._focustrap = this._initializeFocusTrap();\n this._addEventListeners();\n }\n\n // Getters\n static get Default() {\n return Default$5;\n }\n static get DefaultType() {\n return DefaultType$5;\n }\n static get NAME() {\n return NAME$6;\n }\n\n // Public\n toggle(relatedTarget) {\n return this._isShown ? this.hide() : this.show(relatedTarget);\n }\n show(relatedTarget) {\n if (this._isShown) {\n return;\n }\n const showEvent = EventHandler.trigger(this._element, EVENT_SHOW$3, {\n relatedTarget\n });\n if (showEvent.defaultPrevented) {\n return;\n }\n this._isShown = true;\n this._backdrop.show();\n if (!this._config.scroll) {\n new ScrollBarHelper().hide();\n }\n this._element.setAttribute('aria-modal', true);\n this._element.setAttribute('role', 'dialog');\n this._element.classList.add(CLASS_NAME_SHOWING$1);\n const completeCallBack = () => {\n if (!this._config.scroll || this._config.backdrop) {\n this._focustrap.activate();\n }\n this._element.classList.add(CLASS_NAME_SHOW$3);\n this._element.classList.remove(CLASS_NAME_SHOWING$1);\n EventHandler.trigger(this._element, EVENT_SHOWN$3, {\n relatedTarget\n });\n };\n this._queueCallback(completeCallBack, this._element, true);\n }\n hide() {\n if (!this._isShown) {\n return;\n }\n const hideEvent = EventHandler.trigger(this._element, EVENT_HIDE$3);\n if (hideEvent.defaultPrevented) {\n return;\n }\n this._focustrap.deactivate();\n this._element.blur();\n this._isShown = false;\n this._element.classList.add(CLASS_NAME_HIDING);\n this._backdrop.hide();\n const completeCallback = () => {\n this._element.classList.remove(CLASS_NAME_SHOW$3, CLASS_NAME_HIDING);\n this._element.removeAttribute('aria-modal');\n this._element.removeAttribute('role');\n if (!this._config.scroll) {\n new ScrollBarHelper().reset();\n }\n EventHandler.trigger(this._element, EVENT_HIDDEN$3);\n };\n this._queueCallback(completeCallback, this._element, true);\n }\n dispose() {\n this._backdrop.dispose();\n this._focustrap.deactivate();\n super.dispose();\n }\n\n // Private\n _initializeBackDrop() {\n const clickCallback = () => {\n if (this._config.backdrop === 'static') {\n EventHandler.trigger(this._element, EVENT_HIDE_PREVENTED);\n return;\n }\n this.hide();\n };\n\n // 'static' option will be translated to true, and booleans will keep their value\n const isVisible = Boolean(this._config.backdrop);\n return new Backdrop({\n className: CLASS_NAME_BACKDROP,\n isVisible,\n isAnimated: true,\n rootElement: this._element.parentNode,\n clickCallback: isVisible ? clickCallback : null\n });\n }\n _initializeFocusTrap() {\n return new FocusTrap({\n trapElement: this._element\n });\n }\n _addEventListeners() {\n EventHandler.on(this._element, EVENT_KEYDOWN_DISMISS, event => {\n if (event.key !== ESCAPE_KEY) {\n return;\n }\n if (this._config.keyboard) {\n this.hide();\n return;\n }\n EventHandler.trigger(this._element, EVENT_HIDE_PREVENTED);\n });\n }\n\n // Static\n static jQueryInterface(config) {\n return this.each(function () {\n const data = Offcanvas.getOrCreateInstance(this, config);\n if (typeof config !== 'string') {\n return;\n }\n if (data[config] === undefined || config.startsWith('_') || config === 'constructor') {\n throw new TypeError(`No method named \"${config}\"`);\n }\n data[config](this);\n });\n }\n}\n\n/**\n * Data API implementation\n */\n\nEventHandler.on(document, EVENT_CLICK_DATA_API$1, SELECTOR_DATA_TOGGLE$1, function (event) {\n const target = SelectorEngine.getElementFromSelector(this);\n if (['A', 'AREA'].includes(this.tagName)) {\n event.preventDefault();\n }\n if (isDisabled(this)) {\n return;\n }\n EventHandler.one(target, EVENT_HIDDEN$3, () => {\n // focus on trigger when it is closed\n if (isVisible(this)) {\n this.focus();\n }\n });\n\n // avoid conflict when clicking a toggler of an offcanvas, while another is open\n const alreadyOpen = SelectorEngine.findOne(OPEN_SELECTOR);\n if (alreadyOpen && alreadyOpen !== target) {\n Offcanvas.getInstance(alreadyOpen).hide();\n }\n const data = Offcanvas.getOrCreateInstance(target);\n data.toggle(this);\n});\nEventHandler.on(window, EVENT_LOAD_DATA_API$2, () => {\n for (const selector of SelectorEngine.find(OPEN_SELECTOR)) {\n Offcanvas.getOrCreateInstance(selector).show();\n }\n});\nEventHandler.on(window, EVENT_RESIZE, () => {\n for (const element of SelectorEngine.find('[aria-modal][class*=show][class*=offcanvas-]')) {\n if (getComputedStyle(element).position !== 'fixed') {\n Offcanvas.getOrCreateInstance(element).hide();\n }\n }\n});\nenableDismissTrigger(Offcanvas);\n\n/**\n * jQuery\n */\n\ndefineJQueryPlugin(Offcanvas);\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap util/sanitizer.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n// js-docs-start allow-list\nconst ARIA_ATTRIBUTE_PATTERN = /^aria-[\\w-]*$/i;\nconst DefaultAllowlist = {\n // Global attributes allowed on any supplied element below.\n '*': ['class', 'dir', 'id', 'lang', 'role', ARIA_ATTRIBUTE_PATTERN],\n a: ['target', 'href', 'title', 'rel'],\n area: [],\n b: [],\n br: [],\n col: [],\n code: [],\n dd: [],\n div: [],\n dl: [],\n dt: [],\n em: [],\n hr: [],\n h1: [],\n h2: [],\n h3: [],\n h4: [],\n h5: [],\n h6: [],\n i: [],\n img: ['src', 'srcset', 'alt', 'title', 'width', 'height'],\n li: [],\n ol: [],\n p: [],\n pre: [],\n s: [],\n small: [],\n span: [],\n sub: [],\n sup: [],\n strong: [],\n u: [],\n ul: []\n};\n// js-docs-end allow-list\n\nconst uriAttributes = new Set(['background', 'cite', 'href', 'itemtype', 'longdesc', 'poster', 'src', 'xlink:href']);\n\n/**\n * A pattern that recognizes URLs that are safe wrt. XSS in URL navigation\n * contexts.\n *\n * Shout-out to Angular https://github.com/angular/angular/blob/15.2.8/packages/core/src/sanitization/url_sanitizer.ts#L38\n */\n// eslint-disable-next-line unicorn/better-regex\nconst SAFE_URL_PATTERN = /^(?!javascript:)(?:[a-z0-9+.-]+:|[^&:/?#]*(?:[/?#]|$))/i;\nconst allowedAttribute = (attribute, allowedAttributeList) => {\n const attributeName = attribute.nodeName.toLowerCase();\n if (allowedAttributeList.includes(attributeName)) {\n if (uriAttributes.has(attributeName)) {\n return Boolean(SAFE_URL_PATTERN.test(attribute.nodeValue));\n }\n return true;\n }\n\n // Check if a regular expression validates the attribute.\n return allowedAttributeList.filter(attributeRegex => attributeRegex instanceof RegExp).some(regex => regex.test(attributeName));\n};\nfunction sanitizeHtml(unsafeHtml, allowList, sanitizeFunction) {\n if (!unsafeHtml.length) {\n return unsafeHtml;\n }\n if (sanitizeFunction && typeof sanitizeFunction === 'function') {\n return sanitizeFunction(unsafeHtml);\n }\n const domParser = new window.DOMParser();\n const createdDocument = domParser.parseFromString(unsafeHtml, 'text/html');\n const elements = [].concat(...createdDocument.body.querySelectorAll('*'));\n for (const element of elements) {\n const elementName = element.nodeName.toLowerCase();\n if (!Object.keys(allowList).includes(elementName)) {\n element.remove();\n continue;\n }\n const attributeList = [].concat(...element.attributes);\n const allowedAttributes = [].concat(allowList['*'] || [], allowList[elementName] || []);\n for (const attribute of attributeList) {\n if (!allowedAttribute(attribute, allowedAttributes)) {\n element.removeAttribute(attribute.nodeName);\n }\n }\n }\n return createdDocument.body.innerHTML;\n}\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap util/template-factory.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n\n/**\n * Constants\n */\n\nconst NAME$5 = 'TemplateFactory';\nconst Default$4 = {\n allowList: DefaultAllowlist,\n content: {},\n // { selector : text , selector2 : text2 , }\n extraClass: '',\n html: false,\n sanitize: true,\n sanitizeFn: null,\n template: '
'\n};\nconst DefaultType$4 = {\n allowList: 'object',\n content: 'object',\n extraClass: '(string|function)',\n html: 'boolean',\n sanitize: 'boolean',\n sanitizeFn: '(null|function)',\n template: 'string'\n};\nconst DefaultContentType = {\n entry: '(string|element|function|null)',\n selector: '(string|element)'\n};\n\n/**\n * Class definition\n */\n\nclass TemplateFactory extends Config {\n constructor(config) {\n super();\n this._config = this._getConfig(config);\n }\n\n // Getters\n static get Default() {\n return Default$4;\n }\n static get DefaultType() {\n return DefaultType$4;\n }\n static get NAME() {\n return NAME$5;\n }\n\n // Public\n getContent() {\n return Object.values(this._config.content).map(config => this._resolvePossibleFunction(config)).filter(Boolean);\n }\n hasContent() {\n return this.getContent().length > 0;\n }\n changeContent(content) {\n this._checkContent(content);\n this._config.content = {\n ...this._config.content,\n ...content\n };\n return this;\n }\n toHtml() {\n const templateWrapper = document.createElement('div');\n templateWrapper.innerHTML = this._maybeSanitize(this._config.template);\n for (const [selector, text] of Object.entries(this._config.content)) {\n this._setContent(templateWrapper, text, selector);\n }\n const template = templateWrapper.children[0];\n const extraClass = this._resolvePossibleFunction(this._config.extraClass);\n if (extraClass) {\n template.classList.add(...extraClass.split(' '));\n }\n return template;\n }\n\n // Private\n _typeCheckConfig(config) {\n super._typeCheckConfig(config);\n this._checkContent(config.content);\n }\n _checkContent(arg) {\n for (const [selector, content] of Object.entries(arg)) {\n super._typeCheckConfig({\n selector,\n entry: content\n }, DefaultContentType);\n }\n }\n _setContent(template, content, selector) {\n const templateElement = SelectorEngine.findOne(selector, template);\n if (!templateElement) {\n return;\n }\n content = this._resolvePossibleFunction(content);\n if (!content) {\n templateElement.remove();\n return;\n }\n if (isElement(content)) {\n this._putElementInTemplate(getElement(content), templateElement);\n return;\n }\n if (this._config.html) {\n templateElement.innerHTML = this._maybeSanitize(content);\n return;\n }\n templateElement.textContent = content;\n }\n _maybeSanitize(arg) {\n return this._config.sanitize ? sanitizeHtml(arg, this._config.allowList, this._config.sanitizeFn) : arg;\n }\n _resolvePossibleFunction(arg) {\n return execute(arg, [this]);\n }\n _putElementInTemplate(element, templateElement) {\n if (this._config.html) {\n templateElement.innerHTML = '';\n templateElement.append(element);\n return;\n }\n templateElement.textContent = element.textContent;\n }\n}\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap tooltip.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n\n/**\n * Constants\n */\n\nconst NAME$4 = 'tooltip';\nconst DISALLOWED_ATTRIBUTES = new Set(['sanitize', 'allowList', 'sanitizeFn']);\nconst CLASS_NAME_FADE$2 = 'fade';\nconst CLASS_NAME_MODAL = 'modal';\nconst CLASS_NAME_SHOW$2 = 'show';\nconst SELECTOR_TOOLTIP_INNER = '.tooltip-inner';\nconst SELECTOR_MODAL = `.${CLASS_NAME_MODAL}`;\nconst EVENT_MODAL_HIDE = 'hide.bs.modal';\nconst TRIGGER_HOVER = 'hover';\nconst TRIGGER_FOCUS = 'focus';\nconst TRIGGER_CLICK = 'click';\nconst TRIGGER_MANUAL = 'manual';\nconst EVENT_HIDE$2 = 'hide';\nconst EVENT_HIDDEN$2 = 'hidden';\nconst EVENT_SHOW$2 = 'show';\nconst EVENT_SHOWN$2 = 'shown';\nconst EVENT_INSERTED = 'inserted';\nconst EVENT_CLICK$1 = 'click';\nconst EVENT_FOCUSIN$1 = 'focusin';\nconst EVENT_FOCUSOUT$1 = 'focusout';\nconst EVENT_MOUSEENTER = 'mouseenter';\nconst EVENT_MOUSELEAVE = 'mouseleave';\nconst AttachmentMap = {\n AUTO: 'auto',\n TOP: 'top',\n RIGHT: isRTL() ? 'left' : 'right',\n BOTTOM: 'bottom',\n LEFT: isRTL() ? 'right' : 'left'\n};\nconst Default$3 = {\n allowList: DefaultAllowlist,\n animation: true,\n boundary: 'clippingParents',\n container: false,\n customClass: '',\n delay: 0,\n fallbackPlacements: ['top', 'right', 'bottom', 'left'],\n html: false,\n offset: [0, 6],\n placement: 'top',\n popperConfig: null,\n sanitize: true,\n sanitizeFn: null,\n selector: false,\n template: '
' + '
' + '
' + '
',\n title: '',\n trigger: 'hover focus'\n};\nconst DefaultType$3 = {\n allowList: 'object',\n animation: 'boolean',\n boundary: '(string|element)',\n container: '(string|element|boolean)',\n customClass: '(string|function)',\n delay: '(number|object)',\n fallbackPlacements: 'array',\n html: 'boolean',\n offset: '(array|string|function)',\n placement: '(string|function)',\n popperConfig: '(null|object|function)',\n sanitize: 'boolean',\n sanitizeFn: '(null|function)',\n selector: '(string|boolean)',\n template: 'string',\n title: '(string|element|function)',\n trigger: 'string'\n};\n\n/**\n * Class definition\n */\n\nclass Tooltip extends BaseComponent {\n constructor(element, config) {\n if (typeof Popper === 'undefined') {\n throw new TypeError('Bootstrap\\'s tooltips require Popper (https://popper.js.org)');\n }\n super(element, config);\n\n // Private\n this._isEnabled = true;\n this._timeout = 0;\n this._isHovered = null;\n this._activeTrigger = {};\n this._popper = null;\n this._templateFactory = null;\n this._newContent = null;\n\n // Protected\n this.tip = null;\n this._setListeners();\n if (!this._config.selector) {\n this._fixTitle();\n }\n }\n\n // Getters\n static get Default() {\n return Default$3;\n }\n static get DefaultType() {\n return DefaultType$3;\n }\n static get NAME() {\n return NAME$4;\n }\n\n // Public\n enable() {\n this._isEnabled = true;\n }\n disable() {\n this._isEnabled = false;\n }\n toggleEnabled() {\n this._isEnabled = !this._isEnabled;\n }\n toggle() {\n if (!this._isEnabled) {\n return;\n }\n this._activeTrigger.click = !this._activeTrigger.click;\n if (this._isShown()) {\n this._leave();\n return;\n }\n this._enter();\n }\n dispose() {\n clearTimeout(this._timeout);\n EventHandler.off(this._element.closest(SELECTOR_MODAL), EVENT_MODAL_HIDE, this._hideModalHandler);\n if (this._element.getAttribute('data-bs-original-title')) {\n this._element.setAttribute('title', this._element.getAttribute('data-bs-original-title'));\n }\n this._disposePopper();\n super.dispose();\n }\n show() {\n if (this._element.style.display === 'none') {\n throw new Error('Please use show on visible elements');\n }\n if (!(this._isWithContent() && this._isEnabled)) {\n return;\n }\n const showEvent = EventHandler.trigger(this._element, this.constructor.eventName(EVENT_SHOW$2));\n const shadowRoot = findShadowRoot(this._element);\n const isInTheDom = (shadowRoot || this._element.ownerDocument.documentElement).contains(this._element);\n if (showEvent.defaultPrevented || !isInTheDom) {\n return;\n }\n\n // TODO: v6 remove this or make it optional\n this._disposePopper();\n const tip = this._getTipElement();\n this._element.setAttribute('aria-describedby', tip.getAttribute('id'));\n const {\n container\n } = this._config;\n if (!this._element.ownerDocument.documentElement.contains(this.tip)) {\n container.append(tip);\n EventHandler.trigger(this._element, this.constructor.eventName(EVENT_INSERTED));\n }\n this._popper = this._createPopper(tip);\n tip.classList.add(CLASS_NAME_SHOW$2);\n\n // If this is a touch-enabled device we add extra\n // empty mouseover listeners to the body's immediate children;\n // only needed because of broken event delegation on iOS\n // https://www.quirksmode.org/blog/archives/2014/02/mouse_event_bub.html\n if ('ontouchstart' in document.documentElement) {\n for (const element of [].concat(...document.body.children)) {\n EventHandler.on(element, 'mouseover', noop);\n }\n }\n const complete = () => {\n EventHandler.trigger(this._element, this.constructor.eventName(EVENT_SHOWN$2));\n if (this._isHovered === false) {\n this._leave();\n }\n this._isHovered = false;\n };\n this._queueCallback(complete, this.tip, this._isAnimated());\n }\n hide() {\n if (!this._isShown()) {\n return;\n }\n const hideEvent = EventHandler.trigger(this._element, this.constructor.eventName(EVENT_HIDE$2));\n if (hideEvent.defaultPrevented) {\n return;\n }\n const tip = this._getTipElement();\n tip.classList.remove(CLASS_NAME_SHOW$2);\n\n // If this is a touch-enabled device we remove the extra\n // empty mouseover listeners we added for iOS support\n if ('ontouchstart' in document.documentElement) {\n for (const element of [].concat(...document.body.children)) {\n EventHandler.off(element, 'mouseover', noop);\n }\n }\n this._activeTrigger[TRIGGER_CLICK] = false;\n this._activeTrigger[TRIGGER_FOCUS] = false;\n this._activeTrigger[TRIGGER_HOVER] = false;\n this._isHovered = null; // it is a trick to support manual triggering\n\n const complete = () => {\n if (this._isWithActiveTrigger()) {\n return;\n }\n if (!this._isHovered) {\n this._disposePopper();\n }\n this._element.removeAttribute('aria-describedby');\n EventHandler.trigger(this._element, this.constructor.eventName(EVENT_HIDDEN$2));\n };\n this._queueCallback(complete, this.tip, this._isAnimated());\n }\n update() {\n if (this._popper) {\n this._popper.update();\n }\n }\n\n // Protected\n _isWithContent() {\n return Boolean(this._getTitle());\n }\n _getTipElement() {\n if (!this.tip) {\n this.tip = this._createTipElement(this._newContent || this._getContentForTemplate());\n }\n return this.tip;\n }\n _createTipElement(content) {\n const tip = this._getTemplateFactory(content).toHtml();\n\n // TODO: remove this check in v6\n if (!tip) {\n return null;\n }\n tip.classList.remove(CLASS_NAME_FADE$2, CLASS_NAME_SHOW$2);\n // TODO: v6 the following can be achieved with CSS only\n tip.classList.add(`bs-${this.constructor.NAME}-auto`);\n const tipId = getUID(this.constructor.NAME).toString();\n tip.setAttribute('id', tipId);\n if (this._isAnimated()) {\n tip.classList.add(CLASS_NAME_FADE$2);\n }\n return tip;\n }\n setContent(content) {\n this._newContent = content;\n if (this._isShown()) {\n this._disposePopper();\n this.show();\n }\n }\n _getTemplateFactory(content) {\n if (this._templateFactory) {\n this._templateFactory.changeContent(content);\n } else {\n this._templateFactory = new TemplateFactory({\n ...this._config,\n // the `content` var has to be after `this._config`\n // to override config.content in case of popover\n content,\n extraClass: this._resolvePossibleFunction(this._config.customClass)\n });\n }\n return this._templateFactory;\n }\n _getContentForTemplate() {\n return {\n [SELECTOR_TOOLTIP_INNER]: this._getTitle()\n };\n }\n _getTitle() {\n return this._resolvePossibleFunction(this._config.title) || this._element.getAttribute('data-bs-original-title');\n }\n\n // Private\n _initializeOnDelegatedTarget(event) {\n return this.constructor.getOrCreateInstance(event.delegateTarget, this._getDelegateConfig());\n }\n _isAnimated() {\n return this._config.animation || this.tip && this.tip.classList.contains(CLASS_NAME_FADE$2);\n }\n _isShown() {\n return this.tip && this.tip.classList.contains(CLASS_NAME_SHOW$2);\n }\n _createPopper(tip) {\n const placement = execute(this._config.placement, [this, tip, this._element]);\n const attachment = AttachmentMap[placement.toUpperCase()];\n return Popper.createPopper(this._element, tip, this._getPopperConfig(attachment));\n }\n _getOffset() {\n const {\n offset\n } = this._config;\n if (typeof offset === 'string') {\n return offset.split(',').map(value => Number.parseInt(value, 10));\n }\n if (typeof offset === 'function') {\n return popperData => offset(popperData, this._element);\n }\n return offset;\n }\n _resolvePossibleFunction(arg) {\n return execute(arg, [this._element]);\n }\n _getPopperConfig(attachment) {\n const defaultBsPopperConfig = {\n placement: attachment,\n modifiers: [{\n name: 'flip',\n options: {\n fallbackPlacements: this._config.fallbackPlacements\n }\n }, {\n name: 'offset',\n options: {\n offset: this._getOffset()\n }\n }, {\n name: 'preventOverflow',\n options: {\n boundary: this._config.boundary\n }\n }, {\n name: 'arrow',\n options: {\n element: `.${this.constructor.NAME}-arrow`\n }\n }, {\n name: 'preSetPlacement',\n enabled: true,\n phase: 'beforeMain',\n fn: data => {\n // Pre-set Popper's placement attribute in order to read the arrow sizes properly.\n // Otherwise, Popper mixes up the width and height dimensions since the initial arrow style is for top placement\n this._getTipElement().setAttribute('data-popper-placement', data.state.placement);\n }\n }]\n };\n return {\n ...defaultBsPopperConfig,\n ...execute(this._config.popperConfig, [defaultBsPopperConfig])\n };\n }\n _setListeners() {\n const triggers = this._config.trigger.split(' ');\n for (const trigger of triggers) {\n if (trigger === 'click') {\n EventHandler.on(this._element, this.constructor.eventName(EVENT_CLICK$1), this._config.selector, event => {\n const context = this._initializeOnDelegatedTarget(event);\n context.toggle();\n });\n } else if (trigger !== TRIGGER_MANUAL) {\n const eventIn = trigger === TRIGGER_HOVER ? this.constructor.eventName(EVENT_MOUSEENTER) : this.constructor.eventName(EVENT_FOCUSIN$1);\n const eventOut = trigger === TRIGGER_HOVER ? this.constructor.eventName(EVENT_MOUSELEAVE) : this.constructor.eventName(EVENT_FOCUSOUT$1);\n EventHandler.on(this._element, eventIn, this._config.selector, event => {\n const context = this._initializeOnDelegatedTarget(event);\n context._activeTrigger[event.type === 'focusin' ? TRIGGER_FOCUS : TRIGGER_HOVER] = true;\n context._enter();\n });\n EventHandler.on(this._element, eventOut, this._config.selector, event => {\n const context = this._initializeOnDelegatedTarget(event);\n context._activeTrigger[event.type === 'focusout' ? TRIGGER_FOCUS : TRIGGER_HOVER] = context._element.contains(event.relatedTarget);\n context._leave();\n });\n }\n }\n this._hideModalHandler = () => {\n if (this._element) {\n this.hide();\n }\n };\n EventHandler.on(this._element.closest(SELECTOR_MODAL), EVENT_MODAL_HIDE, this._hideModalHandler);\n }\n _fixTitle() {\n const title = this._element.getAttribute('title');\n if (!title) {\n return;\n }\n if (!this._element.getAttribute('aria-label') && !this._element.textContent.trim()) {\n this._element.setAttribute('aria-label', title);\n }\n this._element.setAttribute('data-bs-original-title', title); // DO NOT USE IT. Is only for backwards compatibility\n this._element.removeAttribute('title');\n }\n _enter() {\n if (this._isShown() || this._isHovered) {\n this._isHovered = true;\n return;\n }\n this._isHovered = true;\n this._setTimeout(() => {\n if (this._isHovered) {\n this.show();\n }\n }, this._config.delay.show);\n }\n _leave() {\n if (this._isWithActiveTrigger()) {\n return;\n }\n this._isHovered = false;\n this._setTimeout(() => {\n if (!this._isHovered) {\n this.hide();\n }\n }, this._config.delay.hide);\n }\n _setTimeout(handler, timeout) {\n clearTimeout(this._timeout);\n this._timeout = setTimeout(handler, timeout);\n }\n _isWithActiveTrigger() {\n return Object.values(this._activeTrigger).includes(true);\n }\n _getConfig(config) {\n const dataAttributes = Manipulator.getDataAttributes(this._element);\n for (const dataAttribute of Object.keys(dataAttributes)) {\n if (DISALLOWED_ATTRIBUTES.has(dataAttribute)) {\n delete dataAttributes[dataAttribute];\n }\n }\n config = {\n ...dataAttributes,\n ...(typeof config === 'object' && config ? config : {})\n };\n config = this._mergeConfigObj(config);\n config = this._configAfterMerge(config);\n this._typeCheckConfig(config);\n return config;\n }\n _configAfterMerge(config) {\n config.container = config.container === false ? document.body : getElement(config.container);\n if (typeof config.delay === 'number') {\n config.delay = {\n show: config.delay,\n hide: config.delay\n };\n }\n if (typeof config.title === 'number') {\n config.title = config.title.toString();\n }\n if (typeof config.content === 'number') {\n config.content = config.content.toString();\n }\n return config;\n }\n _getDelegateConfig() {\n const config = {};\n for (const [key, value] of Object.entries(this._config)) {\n if (this.constructor.Default[key] !== value) {\n config[key] = value;\n }\n }\n config.selector = false;\n config.trigger = 'manual';\n\n // In the future can be replaced with:\n // const keysWithDifferentValues = Object.entries(this._config).filter(entry => this.constructor.Default[entry[0]] !== this._config[entry[0]])\n // `Object.fromEntries(keysWithDifferentValues)`\n return config;\n }\n _disposePopper() {\n if (this._popper) {\n this._popper.destroy();\n this._popper = null;\n }\n if (this.tip) {\n this.tip.remove();\n this.tip = null;\n }\n }\n\n // Static\n static jQueryInterface(config) {\n return this.each(function () {\n const data = Tooltip.getOrCreateInstance(this, config);\n if (typeof config !== 'string') {\n return;\n }\n if (typeof data[config] === 'undefined') {\n throw new TypeError(`No method named \"${config}\"`);\n }\n data[config]();\n });\n }\n}\n\n/**\n * jQuery\n */\n\ndefineJQueryPlugin(Tooltip);\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap popover.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n\n/**\n * Constants\n */\n\nconst NAME$3 = 'popover';\nconst SELECTOR_TITLE = '.popover-header';\nconst SELECTOR_CONTENT = '.popover-body';\nconst Default$2 = {\n ...Tooltip.Default,\n content: '',\n offset: [0, 8],\n placement: 'right',\n template: '
' + '
' + '

' + '
' + '
',\n trigger: 'click'\n};\nconst DefaultType$2 = {\n ...Tooltip.DefaultType,\n content: '(null|string|element|function)'\n};\n\n/**\n * Class definition\n */\n\nclass Popover extends Tooltip {\n // Getters\n static get Default() {\n return Default$2;\n }\n static get DefaultType() {\n return DefaultType$2;\n }\n static get NAME() {\n return NAME$3;\n }\n\n // Overrides\n _isWithContent() {\n return this._getTitle() || this._getContent();\n }\n\n // Private\n _getContentForTemplate() {\n return {\n [SELECTOR_TITLE]: this._getTitle(),\n [SELECTOR_CONTENT]: this._getContent()\n };\n }\n _getContent() {\n return this._resolvePossibleFunction(this._config.content);\n }\n\n // Static\n static jQueryInterface(config) {\n return this.each(function () {\n const data = Popover.getOrCreateInstance(this, config);\n if (typeof config !== 'string') {\n return;\n }\n if (typeof data[config] === 'undefined') {\n throw new TypeError(`No method named \"${config}\"`);\n }\n data[config]();\n });\n }\n}\n\n/**\n * jQuery\n */\n\ndefineJQueryPlugin(Popover);\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap scrollspy.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n\n/**\n * Constants\n */\n\nconst NAME$2 = 'scrollspy';\nconst DATA_KEY$2 = 'bs.scrollspy';\nconst EVENT_KEY$2 = `.${DATA_KEY$2}`;\nconst DATA_API_KEY = '.data-api';\nconst EVENT_ACTIVATE = `activate${EVENT_KEY$2}`;\nconst EVENT_CLICK = `click${EVENT_KEY$2}`;\nconst EVENT_LOAD_DATA_API$1 = `load${EVENT_KEY$2}${DATA_API_KEY}`;\nconst CLASS_NAME_DROPDOWN_ITEM = 'dropdown-item';\nconst CLASS_NAME_ACTIVE$1 = 'active';\nconst SELECTOR_DATA_SPY = '[data-bs-spy=\"scroll\"]';\nconst SELECTOR_TARGET_LINKS = '[href]';\nconst SELECTOR_NAV_LIST_GROUP = '.nav, .list-group';\nconst SELECTOR_NAV_LINKS = '.nav-link';\nconst SELECTOR_NAV_ITEMS = '.nav-item';\nconst SELECTOR_LIST_ITEMS = '.list-group-item';\nconst SELECTOR_LINK_ITEMS = `${SELECTOR_NAV_LINKS}, ${SELECTOR_NAV_ITEMS} > ${SELECTOR_NAV_LINKS}, ${SELECTOR_LIST_ITEMS}`;\nconst SELECTOR_DROPDOWN = '.dropdown';\nconst SELECTOR_DROPDOWN_TOGGLE$1 = '.dropdown-toggle';\nconst Default$1 = {\n offset: null,\n // TODO: v6 @deprecated, keep it for backwards compatibility reasons\n rootMargin: '0px 0px -25%',\n smoothScroll: false,\n target: null,\n threshold: [0.1, 0.5, 1]\n};\nconst DefaultType$1 = {\n offset: '(number|null)',\n // TODO v6 @deprecated, keep it for backwards compatibility reasons\n rootMargin: 'string',\n smoothScroll: 'boolean',\n target: 'element',\n threshold: 'array'\n};\n\n/**\n * Class definition\n */\n\nclass ScrollSpy extends BaseComponent {\n constructor(element, config) {\n super(element, config);\n\n // this._element is the observablesContainer and config.target the menu links wrapper\n this._targetLinks = new Map();\n this._observableSections = new Map();\n this._rootElement = getComputedStyle(this._element).overflowY === 'visible' ? null : this._element;\n this._activeTarget = null;\n this._observer = null;\n this._previousScrollData = {\n visibleEntryTop: 0,\n parentScrollTop: 0\n };\n this.refresh(); // initialize\n }\n\n // Getters\n static get Default() {\n return Default$1;\n }\n static get DefaultType() {\n return DefaultType$1;\n }\n static get NAME() {\n return NAME$2;\n }\n\n // Public\n refresh() {\n this._initializeTargetsAndObservables();\n this._maybeEnableSmoothScroll();\n if (this._observer) {\n this._observer.disconnect();\n } else {\n this._observer = this._getNewObserver();\n }\n for (const section of this._observableSections.values()) {\n this._observer.observe(section);\n }\n }\n dispose() {\n this._observer.disconnect();\n super.dispose();\n }\n\n // Private\n _configAfterMerge(config) {\n // TODO: on v6 target should be given explicitly & remove the {target: 'ss-target'} case\n config.target = getElement(config.target) || document.body;\n\n // TODO: v6 Only for backwards compatibility reasons. Use rootMargin only\n config.rootMargin = config.offset ? `${config.offset}px 0px -30%` : config.rootMargin;\n if (typeof config.threshold === 'string') {\n config.threshold = config.threshold.split(',').map(value => Number.parseFloat(value));\n }\n return config;\n }\n _maybeEnableSmoothScroll() {\n if (!this._config.smoothScroll) {\n return;\n }\n\n // unregister any previous listeners\n EventHandler.off(this._config.target, EVENT_CLICK);\n EventHandler.on(this._config.target, EVENT_CLICK, SELECTOR_TARGET_LINKS, event => {\n const observableSection = this._observableSections.get(event.target.hash);\n if (observableSection) {\n event.preventDefault();\n const root = this._rootElement || window;\n const height = observableSection.offsetTop - this._element.offsetTop;\n if (root.scrollTo) {\n root.scrollTo({\n top: height,\n behavior: 'smooth'\n });\n return;\n }\n\n // Chrome 60 doesn't support `scrollTo`\n root.scrollTop = height;\n }\n });\n }\n _getNewObserver() {\n const options = {\n root: this._rootElement,\n threshold: this._config.threshold,\n rootMargin: this._config.rootMargin\n };\n return new IntersectionObserver(entries => this._observerCallback(entries), options);\n }\n\n // The logic of selection\n _observerCallback(entries) {\n const targetElement = entry => this._targetLinks.get(`#${entry.target.id}`);\n const activate = entry => {\n this._previousScrollData.visibleEntryTop = entry.target.offsetTop;\n this._process(targetElement(entry));\n };\n const parentScrollTop = (this._rootElement || document.documentElement).scrollTop;\n const userScrollsDown = parentScrollTop >= this._previousScrollData.parentScrollTop;\n this._previousScrollData.parentScrollTop = parentScrollTop;\n for (const entry of entries) {\n if (!entry.isIntersecting) {\n this._activeTarget = null;\n this._clearActiveClass(targetElement(entry));\n continue;\n }\n const entryIsLowerThanPrevious = entry.target.offsetTop >= this._previousScrollData.visibleEntryTop;\n // if we are scrolling down, pick the bigger offsetTop\n if (userScrollsDown && entryIsLowerThanPrevious) {\n activate(entry);\n // if parent isn't scrolled, let's keep the first visible item, breaking the iteration\n if (!parentScrollTop) {\n return;\n }\n continue;\n }\n\n // if we are scrolling up, pick the smallest offsetTop\n if (!userScrollsDown && !entryIsLowerThanPrevious) {\n activate(entry);\n }\n }\n }\n _initializeTargetsAndObservables() {\n this._targetLinks = new Map();\n this._observableSections = new Map();\n const targetLinks = SelectorEngine.find(SELECTOR_TARGET_LINKS, this._config.target);\n for (const anchor of targetLinks) {\n // ensure that the anchor has an id and is not disabled\n if (!anchor.hash || isDisabled(anchor)) {\n continue;\n }\n const observableSection = SelectorEngine.findOne(decodeURI(anchor.hash), this._element);\n\n // ensure that the observableSection exists & is visible\n if (isVisible(observableSection)) {\n this._targetLinks.set(decodeURI(anchor.hash), anchor);\n this._observableSections.set(anchor.hash, observableSection);\n }\n }\n }\n _process(target) {\n if (this._activeTarget === target) {\n return;\n }\n this._clearActiveClass(this._config.target);\n this._activeTarget = target;\n target.classList.add(CLASS_NAME_ACTIVE$1);\n this._activateParents(target);\n EventHandler.trigger(this._element, EVENT_ACTIVATE, {\n relatedTarget: target\n });\n }\n _activateParents(target) {\n // Activate dropdown parents\n if (target.classList.contains(CLASS_NAME_DROPDOWN_ITEM)) {\n SelectorEngine.findOne(SELECTOR_DROPDOWN_TOGGLE$1, target.closest(SELECTOR_DROPDOWN)).classList.add(CLASS_NAME_ACTIVE$1);\n return;\n }\n for (const listGroup of SelectorEngine.parents(target, SELECTOR_NAV_LIST_GROUP)) {\n // Set triggered links parents as active\n // With both