From 99cc9102fec9dbbe0354d2e10bd4717b98440587 Mon Sep 17 00:00:00 2001 From: Johnny Raine Date: Thu, 13 Jul 2023 10:43:47 +0200 Subject: [PATCH] Updating hep-ph up to 2023-07-13 (#169) * Adding hep-ph papers from April * Fixing the bib * Remove duplicate entry in lists * Updating for hep-ph up to 20230713 * Update recent --------- Co-authored-by: claudius-krause <40409502+claudius-krause@users.noreply.github.com> --- HEPML.bib | 32 ++++++++++++++++++++++++++++++++ HEPML.tex | 10 +++++----- README.md | 5 +++++ docs/index.md | 5 +++++ docs/recent.md | 3 +++ 5 files changed, 50 insertions(+), 5 deletions(-) diff --git a/HEPML.bib b/HEPML.bib index 4bd2c88..b4ab64c 100644 --- a/HEPML.bib +++ b/HEPML.bib @@ -34,6 +34,17 @@ @article{Algren:2023spv } % Jul. 10, 2023 +@article{Alghamdi:2023emm, + author = "Alghamdi, T. and others", + title = "{Toward a generative modeling analysis of CLAS exclusive $2\pi$ photoproduction}", + eprint = "2307.04450", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + reportNumber = "JLAB-THY-23-3881", + month = "7", + year = "2023" +} + @article{Acosta:2023zik, author = "Acosta, Fernando Torales and Mikuni, Vinicius and Nachman, Benjamin and Arratia, Miguel and Barish, Kenneth and Karki, Bishnu and Milton, Ryan and Karande, Piyush and Angerami, Aaron", title = "{Comparison of Point Cloud and Image-based Models for Calorimeter Fast Simulation}", @@ -44,6 +55,16 @@ @article{Acosta:2023zik year = "2023" } +@article{He:2023cfc, + author = "He, Minxuan and Wang, Daohan", + title = "{Quark/Gluon Discrimination and Top Tagging with Dual Attention Transformer}", + eprint = "2307.04723", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + month = "7", + year = "2023" +} + % Jul. 5, 2023 @article{Raine:2023fko, author = "Raine, John Andrew and Leigh, Matthew and Zoch, Knut and Golling, Tobias", @@ -132,6 +153,17 @@ @article{Grossi:2023fqq year = "2023" } +% Jun. 6, 2023 +@article{Mikuni:2023tok, + author = "Mikuni, Vinicius and Nachman, Benjamin", + title = "{High-dimensional and Permutation Invariant Anomaly Detection}", + eprint = "2306.03933", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + month = "6", + year = "2023" +} + % Jun. 5, 2023 @article{Riberdy:2023awf, author = "Riberdy, Michael Joseph and Dutrieux, Herv\'e and Mezrag, C\'edric and Sznajder, Pawe\l{}", diff --git a/HEPML.tex b/HEPML.tex index be74ff8..5051266 100644 --- a/HEPML.tex +++ b/HEPML.tex @@ -76,9 +76,9 @@ \\\textit{Boosted, hadronically decaying $W$ and $Z$ bosons form jets that are distinguished from generic quark and gluon jets by their mass near the boson mass and their two-prong substructure.} \item \textbf{$H\rightarrow b\bar{b}$}~\cite{Datta:2019ndh,Lin:2018cin,Moreno:2019neq,Chakraborty:2019imr,Sirunyan:2020lcu,Chung:2020ysf,Tannenwald:2020mhq,guo2020boosted,Abbas:2020khd,Jang:2021eph,Khosa:2021cyk} \\\textit{Due to the fidelity of $b$-tagging, boosted, hadronically decaying Higgs bosons (predominantly decaying to $b\bar{b}$) has unique challenged and opportunities compared with $W/Z$ tagging.} - \item \textbf{quarks and gluons}~\cite{ATL-PHYS-PUB-2017-017,Komiske:2016rsd,Cheng:2017rdo,Stoye:DLPS2017,Chien:2018dfn,Moreno:2019bmu,Kasieczka:2018lwf,1806025,Lee:2019ssx,Lee:2019cad,Dreyer:2020brq,Romero:2021qlf,Filipek:2021qbe,Dreyer:2021hhr,Bright-Thonney:2022xkx,CrispimRomao:2023ssj,Athanasakos:2023fhq} + \item \textbf{quarks and gluons}~\cite{ATL-PHYS-PUB-2017-017,Komiske:2016rsd,Cheng:2017rdo,Stoye:DLPS2017,Chien:2018dfn,Moreno:2019bmu,Kasieczka:2018lwf,1806025,Lee:2019ssx,Lee:2019cad,Dreyer:2020brq,Romero:2021qlf,Filipek:2021qbe,Dreyer:2021hhr,Bright-Thonney:2022xkx,CrispimRomao:2023ssj,Athanasakos:2023fhq,He:2023cfc} \\\textit{Quark jets tend to be narrower and have fewer particles than gluon jets. This classification task has been a benchmark for many new machine learning models.} - \item \textbf{top quark} tagging~\cite{Almeida:2015jua,Stoye:DLPS2017,Kasieczka:2019dbj,Chakraborty:2020yfc,Diefenbacher:2019ezd,Butter:2017cot,Kasieczka:2017nvn,Macaluso:2018tck,Bhattacharya:2020vzu,Lim:2020igi,Dreyer:2020brq,Aguilar-Saavedra:2021rjk,Andrews:2021ejw,Dreyer:2022yom,Ahmed:2022hct,Munoz:2022gjq,Bhattacherjee:2022gjq,Choi:2023slq} + \item \textbf{top quark} tagging~\cite{Almeida:2015jua,Stoye:DLPS2017,Kasieczka:2019dbj,Chakraborty:2020yfc,Diefenbacher:2019ezd,Butter:2017cot,Kasieczka:2017nvn,Macaluso:2018tck,Bhattacharya:2020vzu,Lim:2020igi,Dreyer:2020brq,Aguilar-Saavedra:2021rjk,Andrews:2021ejw,Dreyer:2022yom,Ahmed:2022hct,Munoz:2022gjq,Bhattacherjee:2022gjq,Choi:2023slq,He:2023cfc} \\\textit{Boosted top quarks form jets that have a three-prong substructure ($t\rightarrow Wb,W\rightarrow q\bar{q}$).} \item \textbf{strange jets}~\cite{Nakai:2020kuu,Erdmann:2019blf,Erdmann:2020ovh,Subba:2023rpm} \\\textit{Strange quarks have a very similar fragmentation to generic quark and gluon jets, so this is a particularly challenging task.} @@ -165,13 +165,13 @@ \item \textbf{Generative models / density estimation} \\\textit{The goal of generative modeling is to learn (explicitly or implicitly) a probability density $p(x)$ for the features $x\in\mathbb{R}^n$. This task is usually unsupervised (no labels).} \begin{itemize} - \item \textbf{GANs}~\cite{deOliveira:2017pjk,Paganini:2017hrr,Paganini:2017dwg,Alonso-Monsalve:2018aqs,Butter:2019eyo,Martinez:2019jlu,Bellagente:2019uyp,Vallecorsa:2019ked,SHiP:2019gcl,Carrazza:2019cnt,Butter:2019cae,Lin:2019htn,DiSipio:2019imz,Hashemi:2019fkn,Chekalina:2018hxi,ATL-SOFT-PUB-2018-001,Zhou:2018ill,Carminati:2018khv,Vallecorsa:2018zco,Datta:2018mwd,Musella:2018rdi,Erdmann:2018kuh,Deja:2019vcv,Derkach:2019qfk,Erbin:2018csv,Erdmann:2018jxd,Urban:2018tqv,Oliveira:DLPS2017,deOliveira:2017rwa,Farrell:2019fsm,Hooberman:DLPS2017,Belayneh:2019vyx,Wang:2020tap,buhmann2020getting,Alanazi:2020jod,2009.03796,2008.06545,Kansal:2020svm,Maevskiy:2020ank,Lai:2020byl,Choi:2021sku,Rehm:2021zow,Rehm:2021zoz,Carrazza:2021hny,Rehm:2021qwm,Lebese:2021foi,Winterhalder:2021ave,Kansal:2021cqp,NEURIPS2020_a878dbeb,Khattak:2021ndw,Mu:2021nno,Li:2021cbp,Bravo-Prieto:2021ehz,Anderlini:2021qpm,Chisholm:2021pdn,Desai:2021wbb,Buhmann:2021caf,Bieringer:2022cbs,Ghosh:2022zdz,Anderlini:2022ckd,Ratnikov:2022hge,Rogachev:2022hjg,ATLAS:2022jhk,Anderlini:2022hgm,Buhmann:2023pmh,Yue:2023uva,Hashemi:2023ruu,Diefenbacher:2023prl,Chan:2023ume,Dubinski:2023fsy} + \item \textbf{GANs}~\cite{deOliveira:2017pjk,Paganini:2017hrr,Paganini:2017dwg,Alonso-Monsalve:2018aqs,Butter:2019eyo,Martinez:2019jlu,Bellagente:2019uyp,Vallecorsa:2019ked,SHiP:2019gcl,Carrazza:2019cnt,Butter:2019cae,Lin:2019htn,DiSipio:2019imz,Hashemi:2019fkn,Chekalina:2018hxi,ATL-SOFT-PUB-2018-001,Zhou:2018ill,Carminati:2018khv,Vallecorsa:2018zco,Datta:2018mwd,Musella:2018rdi,Erdmann:2018kuh,Deja:2019vcv,Derkach:2019qfk,Erbin:2018csv,Erdmann:2018jxd,Urban:2018tqv,Oliveira:DLPS2017,deOliveira:2017rwa,Farrell:2019fsm,Hooberman:DLPS2017,Belayneh:2019vyx,Wang:2020tap,buhmann2020getting,Alanazi:2020jod,2009.03796,2008.06545,Kansal:2020svm,Maevskiy:2020ank,Lai:2020byl,Choi:2021sku,Rehm:2021zow,Rehm:2021zoz,Carrazza:2021hny,Rehm:2021qwm,Lebese:2021foi,Winterhalder:2021ave,Kansal:2021cqp,NEURIPS2020_a878dbeb,Khattak:2021ndw,Mu:2021nno,Li:2021cbp,Bravo-Prieto:2021ehz,Anderlini:2021qpm,Chisholm:2021pdn,Desai:2021wbb,Buhmann:2021caf,Bieringer:2022cbs,Ghosh:2022zdz,Anderlini:2022ckd,Ratnikov:2022hge,Rogachev:2022hjg,ATLAS:2022jhk,Anderlini:2022hgm,Buhmann:2023pmh,Yue:2023uva,Hashemi:2023ruu,Diefenbacher:2023prl,Chan:2023ume,Dubinski:2023fsy,Alghamdi:2023emm} \\\textit{Generative Adversarial Networks~\cite{Goodfellow:2014upx} learn $p(x)$ implicitly through the minimax optimization of two networks: one that maps noise to structure $G(z)$ and one a classifier (called the discriminator) that learns to distinguish examples generated from $G(z)$ and those generated from the target process. When the discriminator is maximally `confused', then the generator is effectively mimicking $p(x)$.} \item \textbf{Autoencoders}~\cite{Monk:2018zsb,ATL-SOFT-PUB-2018-001,Cheng:2020dal,1816035,Howard:2021pos,Buhmann:2021lxj,Bortolato:2021zic,deja2020endtoend,Hariri:2021clz,Fanelli:2019qaq,Collins:2021pld,Orzari:2021suh,Jawahar:2021vyu,Tsan:2021brw,Buhmann:2021caf,Touranakou:2022qrp,Ilten:2022jfm,Collins:2022qpr,AbhishekAbhishek:2022wby,Cresswell:2022tof,Roche:2023int,Anzalone:2023ugq} \\\textit{An autoencoder consists of two functions: one that maps $x$ into a latent space $z$ (encoder) and a second one that maps the latent space back into the original space (decoder). The encoder and decoder are simultaneously trained so that their composition is nearly the identity. When the latent space has a well-defined probability density (as in variational autoencoders), then one can sample from the autoencoder by applying the detector to a randomly chosen element of the latent space.} \item \textbf{Normalizing flows}~\cite{Albergo:2019eim,1800956,Kanwar:2003.06413,Brehmer:2020vwc,Bothmann:2020ywa,Gao:2020zvv,Gao:2020vdv,Nachman:2020lpy,Choi:2020bnf,Lu:2020npg,Bieringer:2020tnw,Hollingsworth:2021sii,Winterhalder:2021ave,Krause:2021ilc,Hackett:2021idh,Menary:2021tjg,Hallin:2021wme,NEURIPS2020_a878dbeb,Vandegar:2020yvw,Jawahar:2021vyu,Bister:2021arb,Krause:2021wez,Butter:2021csz,Winterhalder:2021ngy,Butter:2022lkf,Verheyen:2022tov,Leigh:2022lpn,Chen:2022ytr,Albandea:2022fky,Krause:2022jna,Cresswell:2022tof,Dolan:2022ikg,Backes:2022vmn,Heimel:2022wyj,Albandea:2023wgd,Rousselot:2023pcj,Diefenbacher:2023vsw,Nicoli:2023qsl,R:2023dcr,Nachman:2023clf,Raine:2023fko} \\\textit{Normalizing flows~\cite{pmlr-v37-rezende15} learn $p(x)$ explicitly by starting with a simple probability density and then applying a series of bijective transformations with tractable Jacobians.} - \item \textbf{Diffusion Models}~\cite{Mikuni:2022xry,Leigh:2023toe,Mikuni:2023dvk,Shmakov:2023kjj,Buhmann:2023bwk,Butter:2023fov,Acosta:2023zik} + \item \textbf{Diffusion Models}~\cite{Mikuni:2022xry,Leigh:2023toe,Mikuni:2023dvk,Shmakov:2023kjj,Buhmann:2023bwk,Butter:2023fov,Mikuni:2023tok,Acosta:2023zik} \\\textit{These approaches learn the gradient of the density instead of the density directly.} \item \textbf{Transformer Models}~\cite{Finke:2023veq,Butter:2023fov,Raine:2023fko} \\\textit{These approaches learn the density or perform generative modeling using transformer-based networks.} @@ -186,7 +186,7 @@ \item \textbf{Other/hybrid}~\cite{Cresswell:2022tof,DiBello:2022rss,Li:2022jon,Butter:2023fov} \\\textit{Architectures that combine different network elements or otherwise do not fit into the other categories.} \end{itemize} -\item \textbf{Anomaly detection}~\cite{DAgnolo:2018cun,Collins:2018epr,Collins:2019jip,DAgnolo:2019vbw,Farina:2018fyg,Heimel:2018mkt,Roy:2019jae,Cerri:2018anq,Blance:2019ibf,Hajer:2018kqm,DeSimone:2018efk,Mullin:2019mmh,1809.02977,Dillon:2019cqt,Andreassen:2020nkr,Nachman:2020lpy,Aguilar-Saavedra:2017rzt,Romao:2019dvs,Romao:2020ojy,knapp2020adversarially,collaboration2020dijet,1797846,1800445,Amram:2020ykb,Cheng:2020dal,Khosa:2020qrz,Thaprasop:2020mzp,Alexander:2020mbx,aguilarsaavedra2020mass,1815227,pol2020anomaly,Mikuni:2020qds,vanBeekveld:2020txa,Park:2020pak,Faroughy:2020gas,Stein:2020rou,Kasieczka:2021xcg,Chakravarti:2021svb,Batson:2021agz,Blance:2021gcs,Bortolato:2021zic,Collins:2021nxn,Dillon:2021nxw,Finke:2021sdf,Shih:2021kbt,Atkinson:2021nlt,Kahn:2021drv,Aarrestad:2021oeb,Dorigo:2021iyy,Caron:2021wmq,Govorkova:2021hqu,Kasieczka:2021tew,Volkovich:2021txe,Govorkova:2021utb,Hallin:2021wme,Ostdiek:2021bem,Fraser:2021lxm,Jawahar:2021vyu,Herrero-Garcia:2021goa,Aguilar-Saavedra:2021utu,Tombs:2021wae,Lester:2021aks,Mikuni:2021nwn,Chekanov:2021pus,dAgnolo:2021aun,Canelli:2021aps,Ngairangbam:2021yma,Bradshaw:2022qev,Aguilar-Saavedra:2022ejy,Buss:2022lxw,Alvi:2022fkk,Dillon:2022tmm,Birman:2022xzu,Raine:2022hht,Letizia:2022xbe,Fanelli:2022xwl,Finke:2022lsu,Verheyen:2022tov,Dillon:2022mkq,Caron:2022wrw,Park:2022zov,Kamenik:2022qxs,Hallin:2022eoq,Kasieczka:2022naq,Araz:2022zxk,Mastandrea:2022vas,Roche:2023int,Golling:2023juz,Sengupta:2023xqy} +\item \textbf{Anomaly detection}~\cite{DAgnolo:2018cun,Collins:2018epr,Collins:2019jip,DAgnolo:2019vbw,Farina:2018fyg,Heimel:2018mkt,Roy:2019jae,Cerri:2018anq,Blance:2019ibf,Hajer:2018kqm,DeSimone:2018efk,Mullin:2019mmh,1809.02977,Dillon:2019cqt,Andreassen:2020nkr,Nachman:2020lpy,Aguilar-Saavedra:2017rzt,Romao:2019dvs,Romao:2020ojy,knapp2020adversarially,collaboration2020dijet,1797846,1800445,Amram:2020ykb,Cheng:2020dal,Khosa:2020qrz,Thaprasop:2020mzp,Alexander:2020mbx,aguilarsaavedra2020mass,1815227,pol2020anomaly,Mikuni:2020qds,vanBeekveld:2020txa,Park:2020pak,Faroughy:2020gas,Stein:2020rou,Kasieczka:2021xcg,Chakravarti:2021svb,Batson:2021agz,Blance:2021gcs,Bortolato:2021zic,Collins:2021nxn,Dillon:2021nxw,Finke:2021sdf,Shih:2021kbt,Atkinson:2021nlt,Kahn:2021drv,Aarrestad:2021oeb,Dorigo:2021iyy,Caron:2021wmq,Govorkova:2021hqu,Kasieczka:2021tew,Volkovich:2021txe,Govorkova:2021utb,Hallin:2021wme,Ostdiek:2021bem,Fraser:2021lxm,Jawahar:2021vyu,Herrero-Garcia:2021goa,Aguilar-Saavedra:2021utu,Tombs:2021wae,Lester:2021aks,Mikuni:2021nwn,Chekanov:2021pus,dAgnolo:2021aun,Canelli:2021aps,Ngairangbam:2021yma,Bradshaw:2022qev,Aguilar-Saavedra:2022ejy,Buss:2022lxw,Alvi:2022fkk,Dillon:2022tmm,Birman:2022xzu,Raine:2022hht,Letizia:2022xbe,Fanelli:2022xwl,Finke:2022lsu,Verheyen:2022tov,Dillon:2022mkq,Caron:2022wrw,Park:2022zov,Kamenik:2022qxs,Hallin:2022eoq,Kasieczka:2022naq,Araz:2022zxk,Mastandrea:2022vas,Roche:2023int,Golling:2023juz,Sengupta:2023xqy,Mikuni:2023tok} \\\textit{The goal of anomaly detection is to identify abnormal events. The abnormal events could be from physics beyond the Standard Model or from faults in a detector. While nearly all searches for new physics are technically anomaly detection, this category is for methods that are mode-independent (broadly defined). Anomalies in high energy physics tend to manifest as over-densities in phase space (often called `population anomalies') in contrast to off-manifold anomalies where you can flag individual examples as anomalous. } \item \textbf{Simulation-based (`likelihood-free') Inference} \\\textit{Likelihood-based inference is the case where $p(x|\theta)$ is known and $\theta$ can be determined by maximizing the probability of the data. In high energy physics, $p(x|\theta)$ is often not known analytically, but it is often possible to sample from the density implicitly using simulations.} diff --git a/README.md b/README.md index cdd9884..152cf0b 100644 --- a/README.md +++ b/README.md @@ -258,6 +258,7 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Systematic Quark/Gluon Identification with Ratios of Likelihoods](https://arxiv.org/abs/2207.12411) * [Jet substructure observables for jet quenching in Quark Gluon Plasma: a Machine Learning driven analysis](https://arxiv.org/abs/2304.07196) * [Is infrared-collinear safe information all you need for jet classification?](https://arxiv.org/abs/2305.08979) +* [Quark/Gluon Discrimination and Top Tagging with Dual Attention Transformer](https://arxiv.org/abs/2307.04723) #### top quark tagging @@ -279,6 +280,7 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [BIP: Boost Invariant Polynomials for Efficient Jet Tagging](https://arxiv.org/abs/2207.08272) * [Boosted top tagging and its interpretation using Shapley values](https://arxiv.org/abs/2212.11606) * [Automatic detection of boosted Higgs and top quark jets in event image](https://arxiv.org/abs/2302.13460) +* [Quark/Gluon Discrimination and Top Tagging with Dual Attention Transformer](https://arxiv.org/abs/2307.04723) #### strange jets @@ -1017,6 +1019,7 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [New Angles on Fast Calorimeter Shower Simulation](https://arxiv.org/abs/2303.18150) * [Fitting a Deep Generative Hadronization Model](https://arxiv.org/abs/2305.17169) * [Machine Learning methods for simulating particle response in the Zero Degree Calorimeter at the ALICE experiment, CERN](https://arxiv.org/abs/2306.13606) +* [Toward a generative modeling analysis of CLAS exclusive $2\pi$ photoproduction](https://arxiv.org/abs/2307.04450) ### Autoencoders @@ -1095,6 +1098,7 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [End-To-End Latent Variational Diffusion Models for Inverse Problems in High Energy Physics](https://arxiv.org/abs/2305.10399) * [CaloClouds: Fast Geometry-Independent Highly-Granular Calorimeter Simulation](https://arxiv.org/abs/2305.04847) * [Jet Diffusion versus JetGPT -- Modern Networks for the LHC](https://arxiv.org/abs/2305.10475) +* [High-dimensional and Permutation Invariant Anomaly Detection](https://arxiv.org/abs/2306.03933) * [Comparison of Point Cloud and Image-based Models for Calorimeter Fast Simulation](https://arxiv.org/abs/2307.04780) ### Transformer Models @@ -1244,6 +1248,7 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Nanosecond anomaly detection with decision trees for high energy physics and real-time application to exotic Higgs decays](https://arxiv.org/abs/2304.03836) * [The Mass-ive Issue: Anomaly Detection in Jet Physics](https://arxiv.org/abs/2303.14134) * [CURTAINs Flows For Flows: Constructing Unobserved Regions with Maximum Likelihood Estimation](https://arxiv.org/abs/2305.04646) +* [High-dimensional and Permutation Invariant Anomaly Detection](https://arxiv.org/abs/2306.03933) ## Simulation-based (`likelihood-free') Inference ### Parameter estimation diff --git a/docs/index.md b/docs/index.md index fd36aad..04013f8 100644 --- a/docs/index.md +++ b/docs/index.md @@ -314,6 +314,7 @@ const expandElements = shouldExpand => { * [Systematic Quark/Gluon Identification with Ratios of Likelihoods](https://arxiv.org/abs/2207.12411) * [Jet substructure observables for jet quenching in Quark Gluon Plasma: a Machine Learning driven analysis](https://arxiv.org/abs/2304.07196) * [Is infrared-collinear safe information all you need for jet classification?](https://arxiv.org/abs/2305.08979) + * [Quark/Gluon Discrimination and Top Tagging with Dual Attention Transformer](https://arxiv.org/abs/2307.04723) #### top quark tagging @@ -335,6 +336,7 @@ const expandElements = shouldExpand => { * [BIP: Boost Invariant Polynomials for Efficient Jet Tagging](https://arxiv.org/abs/2207.08272) * [Boosted top tagging and its interpretation using Shapley values](https://arxiv.org/abs/2212.11606) * [Automatic detection of boosted Higgs and top quark jets in event image](https://arxiv.org/abs/2302.13460) + * [Quark/Gluon Discrimination and Top Tagging with Dual Attention Transformer](https://arxiv.org/abs/2307.04723) #### strange jets @@ -1144,6 +1146,7 @@ const expandElements = shouldExpand => { * [New Angles on Fast Calorimeter Shower Simulation](https://arxiv.org/abs/2303.18150) * [Fitting a Deep Generative Hadronization Model](https://arxiv.org/abs/2305.17169) * [Machine Learning methods for simulating particle response in the Zero Degree Calorimeter at the ALICE experiment, CERN](https://arxiv.org/abs/2306.13606) + * [Toward a generative modeling analysis of CLAS exclusive $2\pi$ photoproduction](https://arxiv.org/abs/2307.04450) ??? example "Autoencoders" @@ -1237,6 +1240,7 @@ const expandElements = shouldExpand => { * [End-To-End Latent Variational Diffusion Models for Inverse Problems in High Energy Physics](https://arxiv.org/abs/2305.10399) * [CaloClouds: Fast Geometry-Independent Highly-Granular Calorimeter Simulation](https://arxiv.org/abs/2305.04847) * [Jet Diffusion versus JetGPT -- Modern Networks for the LHC](https://arxiv.org/abs/2305.10475) + * [High-dimensional and Permutation Invariant Anomaly Detection](https://arxiv.org/abs/2306.03933) * [Comparison of Point Cloud and Image-based Models for Calorimeter Fast Simulation](https://arxiv.org/abs/2307.04780) @@ -1419,6 +1423,7 @@ const expandElements = shouldExpand => { * [Nanosecond anomaly detection with decision trees for high energy physics and real-time application to exotic Higgs decays](https://arxiv.org/abs/2304.03836) * [The Mass-ive Issue: Anomaly Detection in Jet Physics](https://arxiv.org/abs/2303.14134) * [CURTAINs Flows For Flows: Constructing Unobserved Regions with Maximum Likelihood Estimation](https://arxiv.org/abs/2305.04646) + * [High-dimensional and Permutation Invariant Anomaly Detection](https://arxiv.org/abs/2306.03933) ## Simulation-based (`likelihood-free') Inference diff --git a/docs/recent.md b/docs/recent.md index 5ad687d..64e7ee0 100644 --- a/docs/recent.md +++ b/docs/recent.md @@ -13,7 +13,9 @@ This is an automatically compiled list of papers which have been added to the li * [Fast Neural Network Inference on FPGAs for Triggering on Long-Lived Particles at Colliders](https://arxiv.org/abs/2307.05152) * [Precise Image Generation on Current Noisy Quantum Computing Devices](https://arxiv.org/abs/2307.05253) * [Decorrelation using Optimal Transport](https://arxiv.org/abs/2307.05187) +* [Toward a generative modeling analysis of CLAS exclusive $2\pi$ photoproduction](https://arxiv.org/abs/2307.04450) * [Comparison of Point Cloud and Image-based Models for Calorimeter Fast Simulation](https://arxiv.org/abs/2307.04780) +* [Quark/Gluon Discrimination and Top Tagging with Dual Attention Transformer](https://arxiv.org/abs/2307.04723) * [$\nu^2$-Flows: Fast and improved neutrino reconstruction in multi-neutrino final states with conditional normalizing flows](https://arxiv.org/abs/2307.02405) ## June 2023 @@ -24,6 +26,7 @@ This is an automatically compiled list of papers which have been added to the li * [Neutron-Gamma Pulse Shape Discrimination for Organic Scintillation Detector using 2D CNN based Image Classification](https://arxiv.org/abs/2306.09356) * [Deep Learning-Based Spatiotemporal Multi-Event Reconstruction for Delay Line Detectors](https://arxiv.org/abs/2306.09359) * [Amplitude-assisted tagging of longitudinally polarised bosons using wide neural networks](https://arxiv.org/abs/2306.07726) +* [High-dimensional and Permutation Invariant Anomaly Detection](https://arxiv.org/abs/2306.03933) * [Combining lattice QCD and phenomenological inputs on generalised parton distributions at moderate skewness](https://arxiv.org/abs/2306.01647) ## May 2023