Skip to content

Commit

Permalink
Add a few more papers
Browse files Browse the repository at this point in the history
  • Loading branch information
claudius-krause committed Jul 18, 2023
1 parent ca4510a commit 7056b51
Show file tree
Hide file tree
Showing 5 changed files with 60 additions and 4 deletions.
42 changes: 42 additions & 0 deletions HEPML.bib
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,17 @@ @article{Raine:2023fko
year = "2023"
}

% Jun. 26, 2023
@article{Kronheim:2023jrl,
author = "Kronheim, B. and Kadhim, A. Al and Kuchera, M. P. and Prosper, H. B. and Ramanujan, R.",
title = "{Implicit Quantile Networks For Emulation in Jet Physics}",
eprint = "2306.15053",
archivePrefix = "arXiv",
primaryClass = "physics.comp-ph",
month = "6",
year = "2023"
}

% Jun. 23, 2023
@article{Dubinski:2023fsy,
author = "Dubi\'nski, Jan and Deja, Kamil and Wenzel, Sandro and Rokita, Przemys\l{}aw and Trzci\'nski, Tomasz",
Expand Down Expand Up @@ -273,6 +284,16 @@ @article{Esmail:2023axd
year = "2023"
}

@article{Kach:2023rqw,
author = {K\"ach, Benno and Melzer-Pellmann, Isabell},
title = "{Attention to Mean-Fields for Particle Cloud Generation}",
eprint = "2305.15254",
archivePrefix = "arXiv",
primaryClass = "hep-ex",
month = "5",
year = "2023"
}

%May 23, 2023
@article{Cremer:2023gne,
author = {Cremer, Lucas and Erdmann, Johannes and Harnik, Roni and Sp\"ah, Jan Lukas and Stamou, Emmanuel},
Expand Down Expand Up @@ -1635,6 +1656,27 @@ @article{Khan:2022vot
year = "2022"
}

%Nov. 24, 2022
@article{Kach:2022qnf,
author = {K\"ach, Benno and Kr\"ucker, Dirk and Melzer-Pellmann, Isabell and Scham, Moritz and Schnake, Simon and Verney-Provatas, Alexi},
title = "{JetFlow: Generating Jets with Conditioned and Mass Constrained Normalising Flows}",
eprint = "2211.13630",
archivePrefix = "arXiv",
primaryClass = "hep-ex",
month = "11",
year = "2022"
}

@article{Kach:2022uzq,
author = {K\"ach, Benno and Kr\"ucker, Dirk and Melzer-Pellmann, Isabell},
title = "{Point Cloud Generation using Transformer Encoders and Normalising Flows}",
eprint = "2211.13623",
archivePrefix = "arXiv",
primaryClass = "hep-ex",
month = "11",
year = "2022"
}

%Nov. 23, 2022
@article{Albandea:2022fky,
author = "Albandea, David and Del Debbio, Luigi and Hern\'andez, Pilar and Kenway, Richard and Rossney, Joe Marsh and Ramos Martinez, Alberto",
Expand Down
8 changes: 4 additions & 4 deletions HEPML.tex
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@
\\\textit{Recursive neural networks are natural tools for processing data in a tree structure.}
\item \textbf{Graphs}~\cite{Henrion:DLPS2017,Ju:2020xty,Abdughani:2018wrw,Martinez:2018fwc,Ren:2019xhp,Moreno:2019bmu,Qasim:2019otl,Chakraborty:2019imr,DiBello:2020bas,Chakraborty:2020yfc,1797439,1801423,1808887,Iiyama:2020wap,1811770,Choma:2020cry,alonsomonsalve2020graph,guo2020boosted,Heintz:2020soy,Verma:2020gnq,Dreyer:2020brq,Qian:2021vnh,Pata:2021oez,Biscarat:2021dlj,Rossi:2021tjf,Hewes:2021heg,Thais:2021qcb,Dezoort:2021kfk,Verma:2021ceh,Hariri:2021clz,Belavin:2021bxb,Atkinson:2021nlt,Konar:2021zdg,Atkinson:2021jnj,Tsan:2021brw,Elabd:2021lgo,Pata:2022wam,Gong:2022lye,Qasim:2022rww,Ma:2022bvt,Bogatskiy:2022czk,Builtjes:2022usj,DiBello:2022iwf,Huang:2023ssr,Forestano:2023fpj,Anisha:2023xmh,Ehrke:2023cpn,Murnane:2023kfm}
\\\textit{A graph is a collection of nodes and edges. Graph neural networks are natural tools for processing data in a tree structure.}
\item \textbf{Sets (point clouds)}~\cite{Komiske:2018cqr,Qu:2019gqs,Mikuni:2020wpr,Shlomi:2020ufi,Dolan:2020qkr,Fenton:2020woz,Lee:2020qil,collado2021learning,Mikuni:2021pou,Shmakov:2021qdz,Shimmin:2021pkm,ATL-PHYS-PUB-2020-014,Qu:2022mxj,Onyisi:2022hdh,Athanasakos:2023fhq}
\item \textbf{Sets (point clouds)}~\cite{Komiske:2018cqr,Qu:2019gqs,Mikuni:2020wpr,Shlomi:2020ufi,Dolan:2020qkr,Fenton:2020woz,Lee:2020qil,collado2021learning,Mikuni:2021pou,Shmakov:2021qdz,Shimmin:2021pkm,ATL-PHYS-PUB-2020-014,Qu:2022mxj,Kach:2022uzq,Onyisi:2022hdh,Athanasakos:2023fhq,Kach:2023rqw}
\\\textit{A point cloud is a (potentially variable-size) set of points in space. Sets are distinguished from sequences in that there is no particular order (i.e. permutation invariance). Sets can also be viewed as graphs without edges and so graph methods that can parse variable-length inputs may also be appropriate for set learning, although there are other methods as well.}
\item \textbf{Physics-inspired basis}~\cite{Datta:2019,Datta:2017rhs,Datta:2017lxt,Komiske:2017aww,Butter:2017cot,Grojean:2020ech,Larkoski:2023nye}
\\\textit{This is a catch-all category for learning using other representations that use some sort of manual or automated physics-preprocessing.}
Expand Down Expand Up @@ -116,7 +116,7 @@
\\\textit{Quantum computers are based on unitary operations applied to quantum states. These states live in a vast Hilbert space which may have a usefully large information capacity for machine learning.}
\item \textbf{Feature ranking}~\cite{Faucett:2020vbu,Grojean:2020ech,Das:2022cjl}
\\\textit{It is often useful to take a set of input features and rank them based on their usefulness.}
\item \textbf{Attention}~\cite{goto2021development,Finke:2023veq,Qiu:2023ihi,Biassoni:2023lih}
\item \textbf{Attention}~\cite{goto2021development,Finke:2023veq,Qiu:2023ihi,Biassoni:2023lih,Kach:2023rqw}
\\\textit{This is an ML tool for helping the network to focus on particularly useful features.}
\item \textbf{Regularization}~\cite{Araz:2021wqm,Sforza:2013hua}
\\\textit{This is a term referring to any learning strategy that improves the robustness of a classifier to statistical fluctuations in the data and in the model initialization.}
Expand Down Expand Up @@ -169,7 +169,7 @@
\\\textit{Generative Adversarial Networks~\cite{Goodfellow:2014upx} learn $p(x)$ implicitly through the minimax optimization of two networks: one that maps noise to structure $G(z)$ and one a classifier (called the discriminator) that learns to distinguish examples generated from $G(z)$ and those generated from the target process. When the discriminator is maximally `confused', then the generator is effectively mimicking $p(x)$.}
\item \textbf{Autoencoders}~\cite{Monk:2018zsb,ATL-SOFT-PUB-2018-001,Cheng:2020dal,1816035,Howard:2021pos,Buhmann:2021lxj,Bortolato:2021zic,deja2020endtoend,Hariri:2021clz,Fanelli:2019qaq,Collins:2021pld,Orzari:2021suh,Jawahar:2021vyu,Tsan:2021brw,Buhmann:2021caf,Touranakou:2022qrp,Ilten:2022jfm,Collins:2022qpr,AbhishekAbhishek:2022wby,Cresswell:2022tof,Roche:2023int,Anzalone:2023ugq}
\\\textit{An autoencoder consists of two functions: one that maps $x$ into a latent space $z$ (encoder) and a second one that maps the latent space back into the original space (decoder). The encoder and decoder are simultaneously trained so that their composition is nearly the identity. When the latent space has a well-defined probability density (as in variational autoencoders), then one can sample from the autoencoder by applying the detector to a randomly chosen element of the latent space.}
\item \textbf{Normalizing flows}~\cite{Albergo:2019eim,1800956,Kanwar:2003.06413,Brehmer:2020vwc,Bothmann:2020ywa,Gao:2020zvv,Gao:2020vdv,Nachman:2020lpy,Choi:2020bnf,Lu:2020npg,Bieringer:2020tnw,Hollingsworth:2021sii,Winterhalder:2021ave,Krause:2021ilc,Hackett:2021idh,Menary:2021tjg,Hallin:2021wme,NEURIPS2020_a878dbeb,Vandegar:2020yvw,Jawahar:2021vyu,Bister:2021arb,Krause:2021wez,Butter:2021csz,Winterhalder:2021ngy,Butter:2022lkf,Verheyen:2022tov,Leigh:2022lpn,Chen:2022ytr,Albandea:2022fky,Krause:2022jna,Cresswell:2022tof,Dolan:2022ikg,Backes:2022vmn,Heimel:2022wyj,Albandea:2023wgd,Rousselot:2023pcj,Diefenbacher:2023vsw,Nicoli:2023qsl,R:2023dcr,Nachman:2023clf,Raine:2023fko}
\item \textbf{Normalizing flows}~\cite{Albergo:2019eim,1800956,Kanwar:2003.06413,Brehmer:2020vwc,Bothmann:2020ywa,Gao:2020zvv,Gao:2020vdv,Nachman:2020lpy,Choi:2020bnf,Lu:2020npg,Bieringer:2020tnw,Hollingsworth:2021sii,Winterhalder:2021ave,Krause:2021ilc,Hackett:2021idh,Menary:2021tjg,Hallin:2021wme,NEURIPS2020_a878dbeb,Vandegar:2020yvw,Jawahar:2021vyu,Bister:2021arb,Krause:2021wez,Butter:2021csz,Winterhalder:2021ngy,Butter:2022lkf,Verheyen:2022tov,Leigh:2022lpn,Chen:2022ytr,Albandea:2022fky,Krause:2022jna,Cresswell:2022tof,Kach:2022qnf,Kach:2022uzq,Dolan:2022ikg,Backes:2022vmn,Heimel:2022wyj,Albandea:2023wgd,Rousselot:2023pcj,Diefenbacher:2023vsw,Nicoli:2023qsl,R:2023dcr,Nachman:2023clf,Raine:2023fko}
\\\textit{Normalizing flows~\cite{pmlr-v37-rezende15} learn $p(x)$ explicitly by starting with a simple probability density and then applying a series of bijective transformations with tractable Jacobians.}
\item \textbf{Diffusion Models}~\cite{Mikuni:2022xry,Leigh:2023toe,Mikuni:2023dvk,Shmakov:2023kjj,Buhmann:2023bwk,Butter:2023fov,Mikuni:2023tok,Acosta:2023zik}
\\\textit{These approaches learn the gradient of the density instead of the density directly.}
Expand All @@ -183,7 +183,7 @@
\\\textit{Monte Carlo event generators integrate over a phase space that needs to be generated efficiently and this can be aided by machine learning methods.}
\item \textbf{Gaussian processes}~\cite{Frate:2017mai,Bertone:2016mdy,1804325,Cisbani:2019xta}
\\\textit{These are non-parametric tools for modeling the `time'-dependence of a random variable. The `time' need not be actual time - for instance, one can use Gaussian processes to model the energy dependence of some probability density.}
\item \textbf{Other/hybrid}~\cite{Cresswell:2022tof,DiBello:2022rss,Li:2022jon,Butter:2023fov}
\item \textbf{Other/hybrid}~\cite{Cresswell:2022tof,DiBello:2022rss,Li:2022jon,Butter:2023fov,Kronheim:2023jrl}
\\\textit{Architectures that combine different network elements or otherwise do not fit into the other categories.}
\end{itemize}
\item \textbf{Anomaly detection}~\cite{DAgnolo:2018cun,Collins:2018epr,Collins:2019jip,DAgnolo:2019vbw,Farina:2018fyg,Heimel:2018mkt,Roy:2019jae,Cerri:2018anq,Blance:2019ibf,Hajer:2018kqm,DeSimone:2018efk,Mullin:2019mmh,1809.02977,Dillon:2019cqt,Andreassen:2020nkr,Nachman:2020lpy,Aguilar-Saavedra:2017rzt,Romao:2019dvs,Romao:2020ojy,knapp2020adversarially,collaboration2020dijet,1797846,1800445,Amram:2020ykb,Cheng:2020dal,Khosa:2020qrz,Thaprasop:2020mzp,Alexander:2020mbx,aguilarsaavedra2020mass,1815227,pol2020anomaly,Mikuni:2020qds,vanBeekveld:2020txa,Park:2020pak,Faroughy:2020gas,Stein:2020rou,Kasieczka:2021xcg,Chakravarti:2021svb,Batson:2021agz,Blance:2021gcs,Bortolato:2021zic,Collins:2021nxn,Dillon:2021nxw,Finke:2021sdf,Shih:2021kbt,Atkinson:2021nlt,Kahn:2021drv,Aarrestad:2021oeb,Dorigo:2021iyy,Caron:2021wmq,Govorkova:2021hqu,Kasieczka:2021tew,Volkovich:2021txe,Govorkova:2021utb,Hallin:2021wme,Ostdiek:2021bem,Fraser:2021lxm,Jawahar:2021vyu,Herrero-Garcia:2021goa,Aguilar-Saavedra:2021utu,Tombs:2021wae,Lester:2021aks,Mikuni:2021nwn,Chekanov:2021pus,dAgnolo:2021aun,Canelli:2021aps,Ngairangbam:2021yma,Bradshaw:2022qev,Aguilar-Saavedra:2022ejy,Buss:2022lxw,Alvi:2022fkk,Dillon:2022tmm,Birman:2022xzu,Raine:2022hht,Letizia:2022xbe,Fanelli:2022xwl,Finke:2022lsu,Verheyen:2022tov,Dillon:2022mkq,Caron:2022wrw,Park:2022zov,Kamenik:2022qxs,Hallin:2022eoq,Kasieczka:2022naq,Araz:2022zxk,Mastandrea:2022vas,Roche:2023int,Golling:2023juz,Sengupta:2023xqy,Mikuni:2023tok}
Expand Down
6 changes: 6 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -195,8 +195,10 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A
* [Particle Convolution for High Energy Physics](https://arxiv.org/abs/2107.02908)
* [Deep Sets based Neural Networks for Impact Parameter Flavour Tagging in ATLAS](https://cds.cern.ch/record/2718948)
* [Particle Transformer for Jet Tagging](https://arxiv.org/abs/2202.03772)
* [Point Cloud Generation using Transformer Encoders and Normalising Flows](https://arxiv.org/abs/2211.13623)
* [Comparing Point Cloud Strategies for Collider Event Classification](https://arxiv.org/abs/2212.10659)
* [Is infrared-collinear safe information all you need for jet classification?](https://arxiv.org/abs/2305.08979)
* [Attention to Mean-Fields for Particle Cloud Generation](https://arxiv.org/abs/2305.15254)

#### Physics-inspired basis

Expand Down Expand Up @@ -653,6 +655,7 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A
* [Learning the language of QCD jets with transformers](https://arxiv.org/abs/2303.07364)
* [Parton Labeling without Matching: Unveiling Emergent Labelling Capabilities in Regression Models](https://arxiv.org/abs/2304.09208)
* [Assessment of few-hits machine learning classification algorithms for low energy physics in liquid argon detectors](https://arxiv.org/abs/2305.09744)
* [Attention to Mean-Fields for Particle Cloud Generation](https://arxiv.org/abs/2305.15254)

#### Regularization

Expand Down Expand Up @@ -1081,6 +1084,8 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A
* [Learning trivializing flows](https://arxiv.org/abs/2211.12806) [[DOI](https://doi.org/10.22323/1.430.0001)]
* [CaloFlow for CaloChallenge Dataset 1](https://arxiv.org/abs/2210.14245)
* [CaloMan: Fast generation of calorimeter showers with density estimation on learned manifolds](https://arxiv.org/abs/2211.15380)
* [JetFlow: Generating Jets with Conditioned and Mass Constrained Normalising Flows](https://arxiv.org/abs/2211.13630)
* [Point Cloud Generation using Transformer Encoders and Normalising Flows](https://arxiv.org/abs/2211.13623)
* [TopicFlow: Disentangling quark and gluon jets with normalizing flows](https://arxiv.org/abs/2211.16053)
* [An unfolding method based on conditional Invertible Neural Networks (cINN) using iterative training](https://arxiv.org/abs/2212.08674)
* [MadNIS -- Neural Multi-Channel Importance Sampling](https://arxiv.org/abs/2212.06172)
Expand Down Expand Up @@ -1158,6 +1163,7 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A
* [Conditional Generative Modelling of Reconstructed Particles at Collider Experiments](https://arxiv.org/abs/2211.06406)
* [Ad-hoc Pulse Shape Simulation using Cyclic Positional U-Net](https://arxiv.org/abs/2212.04950)
* [Jet Diffusion versus JetGPT -- Modern Networks for the LHC](https://arxiv.org/abs/2305.10475)
* [Implicit Quantile Networks For Emulation in Jet Physics](https://arxiv.org/abs/2306.15053)

## Anomaly detection.

Expand Down
6 changes: 6 additions & 0 deletions docs/index.md
Original file line number Diff line number Diff line change
Expand Up @@ -246,8 +246,10 @@ const expandElements = shouldExpand => {
* [Particle Convolution for High Energy Physics](https://arxiv.org/abs/2107.02908)
* [Deep Sets based Neural Networks for Impact Parameter Flavour Tagging in ATLAS](https://cds.cern.ch/record/2718948)
* [Particle Transformer for Jet Tagging](https://arxiv.org/abs/2202.03772)
* [Point Cloud Generation using Transformer Encoders and Normalising Flows](https://arxiv.org/abs/2211.13623)
* [Comparing Point Cloud Strategies for Collider Event Classification](https://arxiv.org/abs/2212.10659)
* [Is infrared-collinear safe information all you need for jet classification?](https://arxiv.org/abs/2305.08979)
* [Attention to Mean-Fields for Particle Cloud Generation](https://arxiv.org/abs/2305.15254)

#### Physics-inspired basis

Expand Down Expand Up @@ -714,6 +716,7 @@ const expandElements = shouldExpand => {
* [Learning the language of QCD jets with transformers](https://arxiv.org/abs/2303.07364)
* [Parton Labeling without Matching: Unveiling Emergent Labelling Capabilities in Regression Models](https://arxiv.org/abs/2304.09208)
* [Assessment of few-hits machine learning classification algorithms for low energy physics in liquid argon detectors](https://arxiv.org/abs/2305.09744)
* [Attention to Mean-Fields for Particle Cloud Generation](https://arxiv.org/abs/2305.15254)

#### Regularization

Expand Down Expand Up @@ -1218,6 +1221,8 @@ const expandElements = shouldExpand => {
* [Learning trivializing flows](https://arxiv.org/abs/2211.12806) [[DOI](https://doi.org/10.22323/1.430.0001)]
* [CaloFlow for CaloChallenge Dataset 1](https://arxiv.org/abs/2210.14245)
* [CaloMan: Fast generation of calorimeter showers with density estimation on learned manifolds](https://arxiv.org/abs/2211.15380)
* [JetFlow: Generating Jets with Conditioned and Mass Constrained Normalising Flows](https://arxiv.org/abs/2211.13630)
* [Point Cloud Generation using Transformer Encoders and Normalising Flows](https://arxiv.org/abs/2211.13623)
* [TopicFlow: Disentangling quark and gluon jets with normalizing flows](https://arxiv.org/abs/2211.16053)
* [An unfolding method based on conditional Invertible Neural Networks (cINN) using iterative training](https://arxiv.org/abs/2212.08674)
* [MadNIS -- Neural Multi-Channel Importance Sampling](https://arxiv.org/abs/2212.06172)
Expand Down Expand Up @@ -1330,6 +1335,7 @@ const expandElements = shouldExpand => {
* [Conditional Generative Modelling of Reconstructed Particles at Collider Experiments](https://arxiv.org/abs/2211.06406)
* [Ad-hoc Pulse Shape Simulation using Cyclic Positional U-Net](https://arxiv.org/abs/2212.04950)
* [Jet Diffusion versus JetGPT -- Modern Networks for the LHC](https://arxiv.org/abs/2305.10475)
* [Implicit Quantile Networks For Emulation in Jet Physics](https://arxiv.org/abs/2306.15053)

## Anomaly detection.

Expand Down
2 changes: 2 additions & 0 deletions docs/recent.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ This is an automatically compiled list of papers which have been added to the li
* [$\nu^2$-Flows: Fast and improved neutrino reconstruction in multi-neutrino final states with conditional normalizing flows](https://arxiv.org/abs/2307.02405)

## June 2023
* [Implicit Quantile Networks For Emulation in Jet Physics](https://arxiv.org/abs/2306.15053)
* [Machine Learning methods for simulating particle response in the Zero Degree Calorimeter at the ALICE experiment, CERN](https://arxiv.org/abs/2306.13606)
* [Triggering Dark Showers with Conditional Dual Auto-Encoders](https://arxiv.org/abs/2306.12955)
* [Hierarchical Neural Simulation-Based Inference Over Event Ensembles](https://arxiv.org/abs/2306.12584)
Expand All @@ -39,6 +40,7 @@ This is an automatically compiled list of papers which have been added to the li
* [Predicting the Exclusive Diffractive Electron-Ion Cross Section at small $x$ with Machine Learning in Sar$t$re](https://arxiv.org/abs/2305.15880)
* [Study of anomalous $W^-W^+\gamma/Z$ couplings using polarizations and spin correlations in $e^-e^+\to W^-W^+$ with polarized beams](https://arxiv.org/abs/2305.15106)
* [Sharpening the $A\to Z^{(*)}h $ Signature of the Type-II 2HDM at the LHC through Advanced Machine Learning](https://arxiv.org/abs/2305.13781)
* [Attention to Mean-Fields for Particle Cloud Generation](https://arxiv.org/abs/2305.15254)
* [Leveraging on-shell interference to search for FCNCs of the top quark and the Z boson](https://arxiv.org/abs/2305.12172)
* [Precision studies for the partonic kinematics calculation through Machine Learning](https://arxiv.org/abs/2305.11369)
* [Search for periodic signals in the dielectron and diphoton invariant mass spectra using 139 fb$^{-1}$ of $pp$ collisions at $\sqrt{s}](https://arxiv.org/abs/2305.10894)
Expand Down

0 comments on commit 7056b51

Please sign in to comment.