From 8b2d3ecb0fd1646afc628eb4a6acd86b6078f204 Mon Sep 17 00:00:00 2001 From: Johnny Raine Date: Wed, 12 Jul 2023 13:59:01 +0200 Subject: [PATCH 1/5] Adding hep-ph papers from April --- HEPML.bib | 208 +++++++++++++++++++++++++++++++++++++++++++++++++ HEPML.tex | 28 +++---- README.md | 19 +++++ docs/index.md | 19 +++++ docs/recent.md | 25 ++++++ 5 files changed, 285 insertions(+), 14 deletions(-) diff --git a/HEPML.bib b/HEPML.bib index 7e2a1bf..db0c409 100644 --- a/HEPML.bib +++ b/HEPML.bib @@ -1,5 +1,27 @@ # HEPML Papers +% Jul. 11, 2023 +@article{Algren:2023spv, + author = "Algren, Malte and Raine, John Andrew and Golling, Tobias", + title = "{Decorrelation using Optimal Transport}", + eprint = "2307.05187", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + month = "7", + year = "2023" +} + +% Jul. 5, 2023 +@article{Raine:2023fko, + author = "Raine, John Andrew and Leigh, Matthew and Zoch, Knut and Golling, Tobias", + title = "{$\nu^2$-Flows: Fast and improved neutrino reconstruction in multi-neutrino final states with conditional normalizing flows}", + eprint = "2307.02405", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + month = "7", + year = "2023" +} + % Jun. 19, 2023 @article{Karmakar:2023mhy, author = "Karmakar, Annesha and Pal, Anikesh and Kumar, G. Anil and Bhavika and Anand, V. and Tyagi, Mohit", @@ -21,6 +43,39 @@ @article{Knipfer:2023zrv year = "2023" } +% Jun. 14, 2023 +@article{Grossi:2023fqq, + author = "Grossi, Michele and Incudini, Massimiliano and Pellen, Mathieu and Pelliccioli, Giovanni", + title = "{Amplitude-assisted tagging of longitudinally polarised bosons using wide neural networks}", + eprint = "2306.07726", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + month = "6", + year = "2023" +} + +% Jun. 5, 2023 +@article{Riberdy:2023awf, + author = "Riberdy, Michael Joseph and Dutrieux, Herv\'e and Mezrag, C\'edric and Sznajder, Pawe\l{}", + title = "{Combining lattice QCD and phenomenological inputs on generalised parton distributions at moderate skewness}", + eprint = "2306.01647", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + month = "6", + year = "2023" +} + +% Jun. 2, 2023 +@article{Han:2023djl, + author = "Han, Tao and Lewis, Ian M. and Liu, Hongkai and Liu, Zhen and Wang, Xing", + title = "{A Guide to Diagnosing Colored Resonances at Hadron Colliders}", + eprint = "2306.00079", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + month = "5", + year = "2023" +} + % Jun. 1, 2023 @article{Herbst:2023lug, author = "Herbst, Ryan and Coffee, Ryan and Fronk, Nathan and Kim, Kukhee and Kim, Kuktae and Ruckman, Larry and Russell, J. J.", @@ -65,6 +120,72 @@ @article{Napolitano:2023jhg year = "2023" } +@article{Chan:2023ume, + author = "Chan, Jay and Ju, Xiangyang and Kania, Adam and Nachman, Benjamin and Sangli, Vishnu and Siodmok, Andrzej", + title = "{Fitting a Deep Generative Hadronization Model}", + eprint = "2305.17169", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + month = "5", + year = "2023" +} + +%May 26, 2023 +@article{Singh:2023yvj, + author = "Singh, Jaswant and Toll, Tobias", + title = "{Predicting the Exclusive Diffractive Electron-Ion Cross Section at small $x$ with Machine Learning in Sar$t$re}", + eprint = "2305.15880", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + month = "5", + year = "2023" +} + +%May 25, 2023 +@article{Subba:2023rpm, + author = "Subba, Amir and Singh, Ritesh K.", + title = "{Study of anomalous $W^-W^+\gamma/Z$ couplings using polarizations and spin correlations in $e^-e^+\to W^-W^+$ with polarized beams}", + eprint = "2305.15106", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + month = "5", + year = "2023" +} + +%May 24, 2023 +@article{Esmail:2023axd, + author = "Esmail, W. and Hammad, A. and Moretti, S.", + title = "{Sharpening the $A\to Z^{(*)}h $ Signature of the Type-II 2HDM at the LHC through Advanced Machine Learning}", + eprint = "2305.13781", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + month = "5", + year = "2023" +} + +%May 23, 2023 +@article{Cremer:2023gne, + author = {Cremer, Lucas and Erdmann, Johannes and Harnik, Roni and Sp\"ah, Jan Lukas and Stamou, Emmanuel}, + title = "{Leveraging on-shell interference to search for FCNCs of the top quark and the Z boson}", + eprint = "2305.12172", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + reportNumber = "DO-TH 23/04, FERMILAB-PUB-23-261-T", + month = "5", + year = "2023" +} + +%May 22, 2023 +@inproceedings{Renteria-Estrada:2023buo, + author = "Renteria-Estrada, David F. and Hernandez-Pinto, Roger J. and Sborlini, G. F. R. and Zurita, Pia", + title = "{Precision studies for the partonic kinematics calculation through Machine Learning}", + eprint = "2305.11369", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + month = "5", + year = "2023" +} + %May 18, 2023 @article{ATLAS:2023hbp, author = "{ATLAS Collaboration}", @@ -172,6 +293,18 @@ @article{Athanasakos:2023fhq year = "2023" } +% May 10, 2023 +@article{Aguilar-Saavedra:2023pde, + author = "Aguilar-Saavedra, J. A. and Arganda, E. and Joaquim, F. R. and Sand\'a Seoane, R. M. and Seabra, J. F.", + title = "{Gradient Boosting MUST taggers for highly-boosted jets}", + eprint = "2305.04957", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + reportNumber = "IFT-UAM/CSIC-23-29", + month = "5", + year = "2023" +} + % May 9, 2023 @article{Buhmann:2023bwk, author = {Buhmann, Erik and Diefenbacher, Sascha and Eren, Engin and Gaede, Frank and Kasieczka, Gregor and Korol, Anatolii and Korcari, William and Kr\"uger, Katja and McKeown, Peter}, @@ -184,6 +317,16 @@ @article{Buhmann:2023bwk year = "2023" } +@article{Bardhan:2023mia, + author = "Bardhan, Debjyoti and Kats, Yevgeny and Wunch, Noam", + title = "{Searching for dark jets with displaced vertices using weakly supervised machine learning}", + eprint = "2305.04372", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + month = "5", + year = "2023" +} + % May 8, 2023 @article{Sengupta:2023xqy, author = "Sengupta, Debajyoti and Klein, Samuel and Raine, John Andrew and Golling, Tobias", @@ -195,6 +338,16 @@ @article{Sengupta:2023xqy year = "2023" } +@article{Dennis:2023kfe, + author = "Dennis, Mitchell T. and Sakstein, Jeremy", + title = "{Tip of the Red Giant Branch Bounds on the Axion-Electron Coupling Revisited}", + eprint = "2305.03113", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + month = "5", + year = "2023" +} %%% not sure this is HEP + %May 7, 2023 @article{Aguilar-Saavedra:2023pde, author = "Aguilar-Saavedra, J. A. and Arganda, E. and Joaquim, F. R. and Sand\'a Seoane, R. M. and Seabra, J. F.", @@ -217,6 +370,18 @@ @article{Bardhan:2023mia year = "2023" } +%May 5, 2023 + +@article{Soleymaninia:2023dds, + author = "Soleymaninia, Maryam and Hashamipour, Hadi and Khanpour, Hamzeh and Shoeib, Samira and Mohamaditabar, Alireza", + title = "{Nuclear corrections on the charged hadron fragmentation functions in a Neural Network global QCD analysis}", + eprint = "2305.02664", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + month = "5", + year = "2023" +} + %May 4, 2023 @article{Dennis:2023kfe, author = "Dennis, Mitchell T. and Sakstein, Jeremy", @@ -249,6 +414,28 @@ @article{Shi:2023xfz year = "2023" } +@article{Basak:2023wzq, + author = "Basak, Dipankar and Dey, Kalyan", + title = "{Estimation of collision centrality in terms of the number of participating nucleons in heavy-ion collisions using deep learning}", + eprint = "2305.00493", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + month = "4", + year = "2023" +} + + +@article{Guo:2023nfu, + author = "Guo, Yu-Chen and Feng, Fan and Di, An and Lu, Shi-Qi and Yang, Ji-Chong", + title = "{MLAnalysis: An open-source program for high energy physics analyses}", + eprint = "2305.00964", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + month = "5", + year = "2023" +} + + %May 1, 2023 @article{Guo:2023nfu, author = "Guo, Yu-Chen and Feng, Fan and Di, An and Lu, Shi-Qi and Yang, Ji-Chong", @@ -295,6 +482,27 @@ @article{Algren:2023qnb year = "2023" } +@article{Fernando:2023obn, + author = "Fernando, I. P. and Keller, D.", + title = "{A Modern Global Extraction of the Sivers Function}", + eprint = "2304.14328", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + month = "4", + year = "2023" +} + +@article{Nishimura:2023wdu, + author = "Nishimura, Satsuki and Miyao, Coh and Otsuka, Hajime", + title = "{Exploring the flavor structure of quarks and leptons with reinforcement learning}", + eprint = "2304.14176", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + reportNumber = "KYUSHU-HET-257", + month = "4", + year = "2023" +} + %Apr. 27, 2023 @article{Nishimura:2023wdu, diff --git a/HEPML.tex b/HEPML.tex index 7b4c4ba..8854552 100644 --- a/HEPML.tex +++ b/HEPML.tex @@ -55,7 +55,7 @@ \item \textbf{Representations} \\\textit{There is no unique way to represent high energy physics data. It is often natural to encode $x$ as an image or another one of the structures listed below.} \begin{itemize} - \item \textbf{Jet images}~\cite{Pumplin:1991kc,Cogan:2014oua,Almeida:2015jua,deOliveira:2015xxd,ATL-PHYS-PUB-2017-017,Lin:2018cin,Komiske:2018oaa,Barnard:2016qma,Komiske:2016rsd,Kasieczka:2017nvn,Macaluso:2018tck,li2020reconstructing,li2020attention,Lee:2019cad,collado2021learning,Du:2020pmp,Filipek:2021qbe,Choi:2023slq} + \item \textbf{Jet images}~\cite{Pumplin:1991kc,Cogan:2014oua,Almeida:2015jua,deOliveira:2015xxd,ATL-PHYS-PUB-2017-017,Lin:2018cin,Komiske:2018oaa,Barnard:2016qma,Komiske:2016rsd,Kasieczka:2017nvn,Macaluso:2018tck,li2020reconstructing,li2020attention,Lee:2019cad,collado2021learning,Du:2020pmp,Filipek:2021qbe,Choi:2023slq,Han:2023djl} \\\textit{Jets are collimated sprays of particles. They have a complex radiation pattern and such, have been a prototypical example for many machine learning studies. See the next item for a specific description about images.} \item \textbf{Event images}~\cite{Nguyen:2018ugw,ATL-PHYS-PUB-2019-028,Lin:2018cin,Andrews:2018nwy,Chung:2020ysf,Du:2019civ,Andrews:2021ejw,Pol:2021iqw,Bae:2022dnw} \\\textit{A grayscale image is a regular grid with a scalar value at each grid point. `Color' images have a fixed-length vector at each grid point. Many detectors are analogous to digital cameras and thus images are a natural representation. In other cases, images can be created by discretizing. Convolutional neural networks are natural tools for processing image data. One downside of the image representation is that high energy physics data tend to be sparse, unlike natural images.} @@ -72,7 +72,7 @@ \end{itemize} \item \textbf{Targets} \begin{itemize} - \item \textbf{$W/Z$ tagging}~\cite{deOliveira:2015xxd,Barnard:2016qma,Louppe:2017ipp,Sirunyan:2020lcu,Chen:2019uar,1811770,Dreyer:2020brq,Kim:2021gtv,Subba:2022czw,Aguilar-Saavedra:2023pde,Athanasakos:2023fhq} + \item \textbf{$W/Z$ tagging}~\cite{deOliveira:2015xxd,Barnard:2016qma,Louppe:2017ipp,Sirunyan:2020lcu,Chen:2019uar,1811770,Dreyer:2020brq,Kim:2021gtv,Subba:2022czw,Aguilar-Saavedra:2023pde,Athanasakos:2023fhq,Grossi:2023fqq} \\\textit{Boosted, hadronically decaying $W$ and $Z$ bosons form jets that are distinguished from generic quark and gluon jets by their mass near the boson mass and their two-prong substructure.} \item \textbf{$H\rightarrow b\bar{b}$}~\cite{Datta:2019ndh,Lin:2018cin,Moreno:2019neq,Chakraborty:2019imr,Sirunyan:2020lcu,Chung:2020ysf,Tannenwald:2020mhq,guo2020boosted,Abbas:2020khd,Jang:2021eph,Khosa:2021cyk} \\\textit{Due to the fidelity of $b$-tagging, boosted, hadronically decaying Higgs bosons (predominantly decaying to $b\bar{b}$) has unique challenged and opportunities compared with $W/Z$ tagging.} @@ -80,13 +80,13 @@ \\\textit{Quark jets tend to be narrower and have fewer particles than gluon jets. This classification task has been a benchmark for many new machine learning models.} \item \textbf{top quark} tagging~\cite{Almeida:2015jua,Stoye:DLPS2017,Kasieczka:2019dbj,Chakraborty:2020yfc,Diefenbacher:2019ezd,Butter:2017cot,Kasieczka:2017nvn,Macaluso:2018tck,Bhattacharya:2020vzu,Lim:2020igi,Dreyer:2020brq,Aguilar-Saavedra:2021rjk,Andrews:2021ejw,Dreyer:2022yom,Ahmed:2022hct,Munoz:2022gjq,Bhattacherjee:2022gjq,Choi:2023slq} \\\textit{Boosted top quarks form jets that have a three-prong substructure ($t\rightarrow Wb,W\rightarrow q\bar{q}$).} - \item \textbf{strange jets}~\cite{Nakai:2020kuu,Erdmann:2019blf,Erdmann:2020ovh} + \item \textbf{strange jets}~\cite{Nakai:2020kuu,Erdmann:2019blf,Erdmann:2020ovh,Subba:2023rpm} \\\textit{Strange quarks have a very similar fragmentation to generic quark and gluon jets, so this is a particularly challenging task.} \item \textbf{$b$-tagging}~\cite{Sirunyan:2017ezt,Guest:2016iqz,bielkov2020identifying,Bols:2020bkb,ATL-PHYS-PUB-2017-003,ATL-PHYS-PUB-2020-014,Liao:2022ufk} \\\textit{Due to their long (but not too long) lifetime, the $B$-hadron lifetime is macroscopic and $b$-jet tagging has been one of the earliest adapters of modern machine learning tools.} \item \textbf{Flavor physics}~\cite{1811097,Bahtiyar:2022une,Zhang:2023czx,Nishimura:2023wdu} \\\textit{This category is for studies related to exclusive particle decays, especially with bottom and charm hadrons.} - \item \textbf{BSM particles and models}~\cite{Datta:2019ndh,Baldi:2014kfa,Chakraborty:2019imr,10.1088/2632-2153/ab9023,1792136,1801423,Chang:2020rtc,Cogollo:2020afo,Grossi:2020orx,Ngairangbam:2020ksz,Englert:2020ntw,Freitas:2020ttd,Khosa:2019kxd,Freitas:2019hbk,Stakia:2021pvp,Arganda:2021azw,Jorge:2021vpo,Ren:2021prq,Barron:2021btf,Yang:2021gge,Alvestad:2021sje,Morais:2021ead,Jung:2021tym,Drees:2021oew,Cornell:2021gut,Vidal:2021oed,Beauchesne:2021qrw,Feng:2021eke,Konar:2022bgc,Badea:2022dzb,Freitas:2022cno,Goodsell:2022beo,Lv:2022pme,Ai:2022qvs,Yang:2022fhw,Alasfar:2022vqw,Barbosa:2022mmw,Chiang:2022lsn,Hall:2022bme,Faucett:2022zie,Bhattacharya:2022kje,Bardhan:2022sif,Bhattacharyya:2022umc,Palit:2023dvs,Liu:2023gpt,Pedro:2023sdp,MB:2023edk,Dong:2023nir,Guo:2023jkz,Lu:2023gjk,Flacke:2023eil,Bardhan:2023mia} + \item \textbf{BSM particles and models}~\cite{Datta:2019ndh,Baldi:2014kfa,Chakraborty:2019imr,10.1088/2632-2153/ab9023,1792136,1801423,Chang:2020rtc,Cogollo:2020afo,Grossi:2020orx,Ngairangbam:2020ksz,Englert:2020ntw,Freitas:2020ttd,Khosa:2019kxd,Freitas:2019hbk,Stakia:2021pvp,Arganda:2021azw,Jorge:2021vpo,Ren:2021prq,Barron:2021btf,Yang:2021gge,Alvestad:2021sje,Morais:2021ead,Jung:2021tym,Drees:2021oew,Cornell:2021gut,Vidal:2021oed,Beauchesne:2021qrw,Feng:2021eke,Konar:2022bgc,Badea:2022dzb,Freitas:2022cno,Goodsell:2022beo,Lv:2022pme,Ai:2022qvs,Yang:2022fhw,Alasfar:2022vqw,Barbosa:2022mmw,Chiang:2022lsn,Hall:2022bme,Faucett:2022zie,Bhattacharya:2022kje,Bardhan:2022sif,Bhattacharyya:2022umc,Palit:2023dvs,Liu:2023gpt,Pedro:2023sdp,MB:2023edk,Dong:2023nir,Guo:2023jkz,Lu:2023gjk,Flacke:2023eil,Bardhan:2023mia,Aguilar-Saavedra:2023pde,Cremer:2023gne,Esmail:2023axd} \\\textit{There are many proposals to train classifiers to enhance the presence of particular new physics models.} \item \textbf{Particle identification}~\cite{deOliveira:2018lqd,Paganini:DLPS2017,Hooberman:DLPS2017,Belayneh:2019vyx,Qasim:2019otl,Collado:2020fwm,Verma:2021ixg,Graziani:2021vai,Graczykowski:2022zae,Fanelli:2022ifa,Dimitrova:2022uum,Kushawaha:2023dms,Wu:2023pzn} \\\textit{This is a generic category for direct particle identification and categorization using various detector technologies. Direct means that the particle directly interacts with the detector (in contrast with $b$-tagging).} @@ -110,7 +110,7 @@ \\\textit{For supervised learning, the labels $y_i$ are known. In the case that the labels are noisy or only known with some uncertainty, then the learning is called weak supervision. Semi-supervised learning is the related case where labels are known for only a fraction of the training examples.} \item \textbf{Unsupervised}~\cite{Mackey:2015hwa,Komiske:2019fks,1797846,Dillon:2019cqt,Cai:2020vzx,Howard:2021pos,Dillon:2021gag} \\\textit{When no labels are provided, the learning is called unsupervised.} - \item \textbf{Reinforcement Learning}~\cite{Carrazza:2019efs,Brehmer:2020brs,John:2020sak,Harvey:2021oue,Cranmer:2021gdt,Windisch:2021mem,Dersy:2022bym,Nishimura:2023wdu} + \item \textbf{Reinforcement Learning}~\cite{Carrazza:2019efs,Brehmer:2020brs,John:2020sak,Harvey:2021oue,Cranmer:2021gdt,Windisch:2021mem,Dersy:2022bym,Nishimura:2023wdu,Nishimura:2023wdu} \\\textit{Instead of learning to distinguish different types of examples, the goal of reinforcement learning is to learn a strategy (policy). The prototypical example of reinforcement learning in learning a strategy to play video games using some kind of score as a feedback during the learning.} \item \textbf{Quantum Machine Learning}~\cite{Mott:2017xdb,Zlokapa:2019lvv,Blance:2020nhl,Terashi:2020wfi,Chen:2020zkj,Wu:2020cye,Guan:2020bdl,Chen:2021ouz,Blance:2021gcs,Heredge:2021vww,Wu:2021xsj,Belis:2021zqi,Araz:2021ifk,Bravo-Prieto:2021ehz,Kim:2021wrr,Ngairangbam:2021yma,Gianelle:2022unu,Abel:2022lqr,Araz:2022haf,Delgado:2022aty,Alvi:2022fkk,Peixoto:2022zzk,Araz:2022zxk,Rousselot:2023pcj} \\\textit{Quantum computers are based on unitary operations applied to quantum states. These states live in a vast Hilbert space which may have a usefully large information capacity for machine learning.} @@ -139,19 +139,19 @@ \begin{itemize} \item \textbf{Pileup}~\cite{Komiske:2017ubm,ATL-PHYS-PUB-2019-028,Martinez:2018fwc,Carrazza:2019efs,Maier:2021ymx,Li:2022omf,CRESST:2022qor,Kim:2023koz} \\\textit{A given bunch crossing at the LHC will have many nearly simultaneous proton-proton collisions. Only one of those is usually interesting and the rest introduce a source of noise (pileup) that must be mitigating for precise final state reconstruction.} - \item \textbf{Calibration}~\cite{Cheong:2019upg,ATL-PHYS-PUB-2020-001,ATL-PHYS-PUB-2018-013,Hooberman:DLPS2017,Kasieczka:2020vlh,Sirunyan:2019wwa,Baldi:2020hjm,Du:2020pmp,Kieseler:2021jxc,Pollard:2021fqv,Akchurin:2021afn,Kieseler:2020wcq,Akchurin:2021ahx,Diefenthaler:2021rdj,Polson:2021kvr,Micallef:2021src,Arratia:2021tsq,Kronheim:2021hdb,Renteria-Estrada:2021zrd,Pata:2022wam,Chadeeva:2022kay,Dorigo:2022tfi,Alves:2022gnw,Qiu:2022xvr,Akchurin:2022apq,Gambhir:2022gua,Gambhir:2022dut,Valsecchi:2022rla,Leigh:2022lpn,Darulis:2022brn,Ge:2022xrv,Aad:2023ula,Lee:2023jew} + \item \textbf{Calibration}~\cite{Cheong:2019upg,ATL-PHYS-PUB-2020-001,ATL-PHYS-PUB-2018-013,Hooberman:DLPS2017,Kasieczka:2020vlh,Sirunyan:2019wwa,Baldi:2020hjm,Du:2020pmp,Kieseler:2021jxc,Pollard:2021fqv,Akchurin:2021afn,Kieseler:2020wcq,Akchurin:2021ahx,Diefenthaler:2021rdj,Polson:2021kvr,Micallef:2021src,Arratia:2021tsq,Kronheim:2021hdb,Renteria-Estrada:2021zrd,Pata:2022wam,Chadeeva:2022kay,Dorigo:2022tfi,Alves:2022gnw,Qiu:2022xvr,Akchurin:2022apq,Gambhir:2022gua,Gambhir:2022dut,Valsecchi:2022rla,Leigh:2022lpn,Darulis:2022brn,Ge:2022xrv,Aad:2023ula,Lee:2023jew,Basak:2023wzq,Soleymaninia:2023dds,Raine:2023fko} \\\textit{The goal of calibration is to remove the bias (and reduce variance if possible) from detector (or related) effects.} \item \textbf{Recasting}~\cite{Caron:2017hku,Bertone:2016mdy,1806026,Hammad:2022wpq} \\\textit{Even though an experimental analysis may provide a single model-dependent interpretation of the result, the results are likely to have important implications for a variety of other models. Recasting is the task of taking a result and interpreting it in the context of a model that was not used for the original analysis.} \item \textbf{Matrix elements}~\cite{Badger:2020uow,Bishara:2019iwh,1804325,Bury:2020ewi,Sombillo:2021yxe,Sombillo:2021rxv,Aylett-Bullock:2021hmo,Maitre:2021uaa,Danziger:2021eeg,Winterhalder:2021ngy,Karl:2022jda,Alnuqaydan:2022ncd,Dersy:2022bym,Badger:2022hwf,Janssen:2023ahv,Maitre:2023dqz} \\\textit{Regression methods can be used as surrogate models for functions that are too slow to evaluate. One important class of functions are matrix elements, which form the core component of cross section calculations in quantum field theory.} - \item \textbf{Parameter estimation}~\cite{Lei:2020ucb,1808105,Lazzarin:2020uvv,Kim:2021pcz,Alda:2021rgt,Craven:2021ems,Castro:2022zpq,Meng:2022lmd,Qiu:2023ihi,Hammal:2023njz} + \item \textbf{Parameter estimation}~\cite{Lei:2020ucb,1808105,Lazzarin:2020uvv,Kim:2021pcz,Alda:2021rgt,Craven:2021ems,Castro:2022zpq,Meng:2022lmd,Qiu:2023ihi,Hammal:2023njz,Shi:2023xfz} \\\textit{The target features could be parameters of a model, which can be learned directly through a regression setup. Other forms of inference are described in later sections (which could also be viewed as regression).} \item \textbf{Parton Distribution Functions (and related)}~\cite{DelDebbio:2020rgv,Grigsby:2020auv,Rossi:2020sbh,Carrazza:2021hny,Ball:2021leu,Ball:2021xlu,Khalek:2021gon,Iranipour:2022iak,Gao:2022uhg,Gao:2022srd,Candido:2023utz,Wang:2023nab,Kassabov:2023hbm,Wang:2023poi,Fernando:2023obn} \\\textit{Various machine learning models can provide flexible function approximators, which can be useful for modeling functions that cannot be determined easily from first principles such as parton distribution functions.} - \item \textbf{Lattice Gauge Theory}~\cite{Kanwar:2003.06413,Favoni:2020reg,Bulusu:2021rqz,Shi:2021qri,Hackett:2021idh,Yoon:2018krb,Zhang:2019qiq,Nguyen:2019gpo,Favoni:2021epq,Chen:2021jey,Bulusu:2021njs,Shi:2022yqw,Luo:2022jzl,Chen:2022ytr,Li:2022ozl,Kang:2022jbg,Albandea:2022fky,Khan:2022vot,Sale:2022snt,Kim:2022rna,Karsch:2022yka,Favoni:2022mcg,Chen:2022asj,Bacchio:2022vje,Bacchio:2022vje,Gao:2022uhg,Aguilar:2022thg,Lawrence:2022dba,Peng:2022wdl,Lehner:2023bba,Albandea:2023wgd,Nicoli:2023qsl,Aronsson:2023rli,Zhou:2023pti,Hudspith:2023loy,R:2023dcr,Bender:2023gwr,NarcisoFerreira:2023kak,Lehner:2023prf} + \item \textbf{Lattice Gauge Theory}~\cite{Kanwar:2003.06413,Favoni:2020reg,Bulusu:2021rqz,Shi:2021qri,Hackett:2021idh,Yoon:2018krb,Zhang:2019qiq,Nguyen:2019gpo,Favoni:2021epq,Chen:2021jey,Bulusu:2021njs,Shi:2022yqw,Luo:2022jzl,Chen:2022ytr,Li:2022ozl,Kang:2022jbg,Albandea:2022fky,Khan:2022vot,Sale:2022snt,Kim:2022rna,Karsch:2022yka,Favoni:2022mcg,Chen:2022asj,Bacchio:2022vje,Bacchio:2022vje,Gao:2022uhg,Aguilar:2022thg,Lawrence:2022dba,Peng:2022wdl,Lehner:2023bba,Albandea:2023wgd,Nicoli:2023qsl,Aronsson:2023rli,Zhou:2023pti,Hudspith:2023loy,R:2023dcr,Bender:2023gwr,NarcisoFerreira:2023kak,Lehner:2023prf,Riberdy:2023awf} \\\textit{Lattice methods offer a complementary approach to perturbation theory. A key challenge is to create approaches that respect the local gauge symmetry (equivariant networks).} - \item \textbf{Function Approximation}~\cite{1853982,Haddadin:2021mmo,Chahrour:2021eiv,Wang:2021jou,Kitouni:2021fkh,Lei:2022dvn,Wang:2023nab} + \item \textbf{Function Approximation}~\cite{1853982,Haddadin:2021mmo,Chahrour:2021eiv,Wang:2021jou,Kitouni:2021fkh,Lei:2022dvn,Wang:2023nab,Fernando:2023obn} \\\textit{Approximating functions that obey certain (physical) constraints.} \item \textbf{Symbolic Regression}~\cite{Butter:2021rvz,Zhang:2022uqk,Lu:2022joy,Wang:2023poi} \\\textit{Regression where the result is a (relatively) simple formula.} @@ -160,26 +160,26 @@ \end{itemize} \item \textbf{Equivariant networks}~\cite{Kanwar:2003.06413,Dolan:2020qkr,Favoni:2020reg,Bulusu:2021njs,Gong:2022lye,Shi:2022yqw,Bogatskiy:2022hub,Favoni:2022mcg,Bogatskiy:2022czk,Lehner:2023bba,Forestano:2023fpj,Aronsson:2023rli,Buhmann:2023pmh,Forestano:2023qcy,Lehner:2023prf,Murnane:2023kfm} \\\textit{It is often the case that implementing equivariance or learning symmetries with a model better describes the physics and improves performance} -\item \textbf{Decorrelation methods}~\cite{Louppe:2016ylz,Dolen:2016kst,Moult:2017okx,Stevens:2013dya,Shimmin:2017mfk,Bradshaw:2019ipy,ATL-PHYS-PUB-2018-014,DiscoFever,Xia:2018kgd,Englert:2018cfo,Wunsch:2019qbo,Rogozhnikov:2014zea,10.1088/2632-2153/ab9023,clavijo2020adversarial,Kasieczka:2020pil,Kitouni:2020xgb,Ghosh:2021hrh,Dolan:2021pml,Mikuni:2021nwn,Klein:2022hdv,Das:2022cjl} +\item \textbf{Decorrelation methods}~\cite{Louppe:2016ylz,Dolen:2016kst,Moult:2017okx,Stevens:2013dya,Shimmin:2017mfk,Bradshaw:2019ipy,ATL-PHYS-PUB-2018-014,DiscoFever,Xia:2018kgd,Englert:2018cfo,Wunsch:2019qbo,Rogozhnikov:2014zea,10.1088/2632-2153/ab9023,clavijo2020adversarial,Kasieczka:2020pil,Kitouni:2020xgb,Ghosh:2021hrh,Dolan:2021pml,Mikuni:2021nwn,Klein:2022hdv,Das:2022cjl,Algren:2023spv} \\\textit{It it sometimes the case that a classification or regression model needs to be independent of a set of features (usually a mass-like variable) in order to estimate the background or otherwise reduce the uncertainty. These techniques are related to what the machine learning literature calls model `fairness'.} \item \textbf{Generative models / density estimation} \\\textit{The goal of generative modeling is to learn (explicitly or implicitly) a probability density $p(x)$ for the features $x\in\mathbb{R}^n$. This task is usually unsupervised (no labels).} \begin{itemize} - \item \textbf{GANs}~\cite{deOliveira:2017pjk,Paganini:2017hrr,Paganini:2017dwg,Alonso-Monsalve:2018aqs,Butter:2019eyo,Martinez:2019jlu,Bellagente:2019uyp,Vallecorsa:2019ked,SHiP:2019gcl,Carrazza:2019cnt,Butter:2019cae,Lin:2019htn,DiSipio:2019imz,Hashemi:2019fkn,Chekalina:2018hxi,ATL-SOFT-PUB-2018-001,Zhou:2018ill,Carminati:2018khv,Vallecorsa:2018zco,Datta:2018mwd,Musella:2018rdi,Erdmann:2018kuh,Deja:2019vcv,Derkach:2019qfk,Erbin:2018csv,Erdmann:2018jxd,Urban:2018tqv,Oliveira:DLPS2017,deOliveira:2017rwa,Farrell:2019fsm,Hooberman:DLPS2017,Belayneh:2019vyx,Wang:2020tap,buhmann2020getting,Alanazi:2020jod,2009.03796,2008.06545,Kansal:2020svm,Maevskiy:2020ank,Lai:2020byl,Choi:2021sku,Rehm:2021zow,Rehm:2021zoz,Carrazza:2021hny,Rehm:2021qwm,Lebese:2021foi,Winterhalder:2021ave,Kansal:2021cqp,NEURIPS2020_a878dbeb,Khattak:2021ndw,Mu:2021nno,Li:2021cbp,Bravo-Prieto:2021ehz,Anderlini:2021qpm,Chisholm:2021pdn,Desai:2021wbb,Buhmann:2021caf,Bieringer:2022cbs,Ghosh:2022zdz,Anderlini:2022ckd,Ratnikov:2022hge,Rogachev:2022hjg,ATLAS:2022jhk,Anderlini:2022hgm,Buhmann:2023pmh,Yue:2023uva,Hashemi:2023ruu,Diefenbacher:2023prl} + \item \textbf{GANs}~\cite{deOliveira:2017pjk,Paganini:2017hrr,Paganini:2017dwg,Alonso-Monsalve:2018aqs,Butter:2019eyo,Martinez:2019jlu,Bellagente:2019uyp,Vallecorsa:2019ked,SHiP:2019gcl,Carrazza:2019cnt,Butter:2019cae,Lin:2019htn,DiSipio:2019imz,Hashemi:2019fkn,Chekalina:2018hxi,ATL-SOFT-PUB-2018-001,Zhou:2018ill,Carminati:2018khv,Vallecorsa:2018zco,Datta:2018mwd,Musella:2018rdi,Erdmann:2018kuh,Deja:2019vcv,Derkach:2019qfk,Erbin:2018csv,Erdmann:2018jxd,Urban:2018tqv,Oliveira:DLPS2017,deOliveira:2017rwa,Farrell:2019fsm,Hooberman:DLPS2017,Belayneh:2019vyx,Wang:2020tap,buhmann2020getting,Alanazi:2020jod,2009.03796,2008.06545,Kansal:2020svm,Maevskiy:2020ank,Lai:2020byl,Choi:2021sku,Rehm:2021zow,Rehm:2021zoz,Carrazza:2021hny,Rehm:2021qwm,Lebese:2021foi,Winterhalder:2021ave,Kansal:2021cqp,NEURIPS2020_a878dbeb,Khattak:2021ndw,Mu:2021nno,Li:2021cbp,Bravo-Prieto:2021ehz,Anderlini:2021qpm,Chisholm:2021pdn,Desai:2021wbb,Buhmann:2021caf,Bieringer:2022cbs,Ghosh:2022zdz,Anderlini:2022ckd,Ratnikov:2022hge,Rogachev:2022hjg,ATLAS:2022jhk,Anderlini:2022hgm,Buhmann:2023pmh,Yue:2023uva,Hashemi:2023ruu,Diefenbacher:2023prl,Chan:2023ume} \\\textit{Generative Adversarial Networks~\cite{Goodfellow:2014upx} learn $p(x)$ implicitly through the minimax optimization of two networks: one that maps noise to structure $G(z)$ and one a classifier (called the discriminator) that learns to distinguish examples generated from $G(z)$ and those generated from the target process. When the discriminator is maximally `confused', then the generator is effectively mimicking $p(x)$.} \item \textbf{Autoencoders}~\cite{Monk:2018zsb,ATL-SOFT-PUB-2018-001,Cheng:2020dal,1816035,Howard:2021pos,Buhmann:2021lxj,Bortolato:2021zic,deja2020endtoend,Hariri:2021clz,Fanelli:2019qaq,Collins:2021pld,Orzari:2021suh,Jawahar:2021vyu,Tsan:2021brw,Buhmann:2021caf,Touranakou:2022qrp,Ilten:2022jfm,Collins:2022qpr,AbhishekAbhishek:2022wby,Cresswell:2022tof,Roche:2023int} \\\textit{An autoencoder consists of two functions: one that maps $x$ into a latent space $z$ (encoder) and a second one that maps the latent space back into the original space (decoder). The encoder and decoder are simultaneously trained so that their composition is nearly the identity. When the latent space has a well-defined probability density (as in variational autoencoders), then one can sample from the autoencoder by applying the detector to a randomly chosen element of the latent space.} - \item \textbf{Normalizing flows}~\cite{Albergo:2019eim,1800956,Kanwar:2003.06413,Brehmer:2020vwc,Bothmann:2020ywa,Gao:2020zvv,Gao:2020vdv,Nachman:2020lpy,Choi:2020bnf,Lu:2020npg,Bieringer:2020tnw,Hollingsworth:2021sii,Winterhalder:2021ave,Krause:2021ilc,Hackett:2021idh,Menary:2021tjg,Hallin:2021wme,NEURIPS2020_a878dbeb,Vandegar:2020yvw,Jawahar:2021vyu,Bister:2021arb,Krause:2021wez,Butter:2021csz,Winterhalder:2021ngy,Butter:2022lkf,Verheyen:2022tov,Leigh:2022lpn,Chen:2022ytr,Albandea:2022fky,Krause:2022jna,Cresswell:2022tof,Dolan:2022ikg,Backes:2022vmn,Heimel:2022wyj,Albandea:2023wgd,Rousselot:2023pcj,Diefenbacher:2023vsw,Nicoli:2023qsl,R:2023dcr,Nachman:2023clf} + \item \textbf{Normalizing flows}~\cite{Albergo:2019eim,1800956,Kanwar:2003.06413,Brehmer:2020vwc,Bothmann:2020ywa,Gao:2020zvv,Gao:2020vdv,Nachman:2020lpy,Choi:2020bnf,Lu:2020npg,Bieringer:2020tnw,Hollingsworth:2021sii,Winterhalder:2021ave,Krause:2021ilc,Hackett:2021idh,Menary:2021tjg,Hallin:2021wme,NEURIPS2020_a878dbeb,Vandegar:2020yvw,Jawahar:2021vyu,Bister:2021arb,Krause:2021wez,Butter:2021csz,Winterhalder:2021ngy,Butter:2022lkf,Verheyen:2022tov,Leigh:2022lpn,Chen:2022ytr,Albandea:2022fky,Krause:2022jna,Cresswell:2022tof,Dolan:2022ikg,Backes:2022vmn,Heimel:2022wyj,Albandea:2023wgd,Rousselot:2023pcj,Diefenbacher:2023vsw,Nicoli:2023qsl,R:2023dcr,Nachman:2023clf,Raine:2023fko} \\\textit{Normalizing flows~\cite{pmlr-v37-rezende15} learn $p(x)$ explicitly by starting with a simple probability density and then applying a series of bijective transformations with tractable Jacobians.} \item \textbf{Diffusion Models}~\cite{Mikuni:2022xry,Leigh:2023toe,Mikuni:2023dvk,Shmakov:2023kjj,Buhmann:2023bwk,Butter:2023fov} \\\textit{These approaches learn the gradient of the density instead of the density directly.} - \item \textbf{Transformer Models}~\cite{Finke:2023veq,Butter:2023fov} + \item \textbf{Transformer Models}~\cite{Finke:2023veq,Butter:2023fov,Raine:2023fko} \\\textit{These approaches learn the density or perform generative modeling using transformer-based networks.} \item \textbf{Physics-inspired}~\cite{Andreassen:2018apy,Andreassen:2019txo,1808876,Lai:2020byl,Barenboim:2021vzh} \\\textit{A variety of methods have been proposed to use machine learning tools (e.g. neural networks) combined with physical components.} \item \textbf{Mixture Models}~\cite{Chen:2020uds,Burton:2021tsd,Graziani:2021vai,Liu:2022dem} \\\textit{A mixture model is a superposition of simple probability densities. For example, a Gaussian mixture model is a sum of normal probability densities. Mixture density networks are mixture models where the coefficients in front of the constituent densities as well as the density parameters (e.g. mean and variances of Gaussians) are parameterized by neural networks.} - \item \textbf{Phase space generation}~\cite{Bendavid:2017zhk,Bothmann:2020ywa,Gao:2020zvv,Gao:2020vdv,Klimek:2018mza,Carrazza:2020rdn,Nachman:2020fff,Chen:2020nfb,Verheyen:2020bjw,Backes:2020vka,Danziger:2021eeg,Yoon:2020zmb,Maitre:2022xle,Jinno:2022sbr,Heimel:2022wyj} + \item \textbf{Phase space generation}~\cite{Bendavid:2017zhk,Bothmann:2020ywa,Gao:2020zvv,Gao:2020vdv,Klimek:2018mza,Carrazza:2020rdn,Nachman:2020fff,Chen:2020nfb,Verheyen:2020bjw,Backes:2020vka,Danziger:2021eeg,Yoon:2020zmb,Maitre:2022xle,Jinno:2022sbr,Heimel:2022wyj,Renteria-Estrada:2023buo,Singh:2023yvj} \\\textit{Monte Carlo event generators integrate over a phase space that needs to be generated efficiently and this can be aided by machine learning methods.} \item \textbf{Gaussian processes}~\cite{Frate:2017mai,Bertone:2016mdy,1804325,Cisbani:2019xta} \\\textit{These are non-parametric tools for modeling the `time'-dependence of a random variable. The `time' need not be actual time - for instance, one can use Gaussian processes to model the energy dependence of some probability density.} diff --git a/README.md b/README.md index 65e8b75..b538e05 100644 --- a/README.md +++ b/README.md @@ -99,6 +99,7 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Deep learning jet modifications in heavy-ion collisions](https://arxiv.org/abs/2012.07797) * [Identifying the Quantum Properties of Hadronic Resonances using Machine Learning](https://arxiv.org/abs/2105.04582) * [Automatic detection of boosted Higgs and top quark jets in event image](https://arxiv.org/abs/2302.13460) +* [A Guide to Diagnosing Colored Resonances at Hadron Colliders](https://arxiv.org/abs/2306.00079) #### Event images @@ -221,6 +222,7 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Role of polarizations and spin-spin correlations of W's in e-e+\textrightarrow{}W-W+ at s](https://arxiv.org/abs/2212.12973) [[DOI](https://doi.org/10.1103/PhysRevD.107.073004)] * [Gradient Boosting MUST taggers for highly-boosted jets](https://arxiv.org/abs/2305.04957) * [Is infrared-collinear safe information all you need for jet classification?](https://arxiv.org/abs/2305.08979) +* [Amplitude-assisted tagging of longitudinally polarised bosons using wide neural networks](https://arxiv.org/abs/2306.07726) #### $H\rightarrow b\bar{b}$ @@ -282,6 +284,7 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Strange Jet Tagging](https://arxiv.org/abs/2003.09517) * [A tagger for strange jets based on tracking information using long short-term memory](https://arxiv.org/abs/1907.07505) [[DOI](https://doi.org/10.1088/1748-0221/15/01/P01021)] * [Maximum performance of strange-jet tagging at hadron colliders](https://arxiv.org/abs/2011.10736) +* [Study of anomalous $W^-W^+\gamma/Z$ couplings using polarizations and spin correlations in $e^-e^+\to W^-W^+$ with polarized beams](https://arxiv.org/abs/2305.15106) #### $b$-tagging @@ -354,6 +357,9 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Probing Dark QCD Sector through the Higgs Portal with Machine Learning at the LHC](https://arxiv.org/abs/2304.03237) * [Uncovering doubly charged scalars with dominant three-body decays using machine learning](https://arxiv.org/abs/2304.09195) * [Searching for dark jets with displaced vertices using weakly supervised machine learning](https://arxiv.org/abs/2305.04372) +* [Gradient Boosting MUST taggers for highly-boosted jets](https://arxiv.org/abs/2305.04957) +* [Leveraging on-shell interference to search for FCNCs of the top quark and the Z boson](https://arxiv.org/abs/2305.12172) +* [Sharpening the $A\to Z^{(*)}h $ Signature of the Type-II 2HDM at the LHC through Advanced Machine Learning](https://arxiv.org/abs/2305.13781) #### Particle identification @@ -600,6 +606,7 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [A machine learning pipeline for autonomous numerical analytic continuation of Dyson-Schwinger equations](https://arxiv.org/abs/2112.13011) * [Simplifying Polylogarithms with Machine Learning](https://arxiv.org/abs/2206.04115) * [Exploring the flavor structure of quarks and leptons with reinforcement learning](https://arxiv.org/abs/2304.14176) +* [Exploring the flavor structure of quarks and leptons with reinforcement learning](https://arxiv.org/abs/2304.14176) #### Quantum Machine Learning @@ -759,6 +766,9 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [A new method for the $q^2$ reconstruction in semileptonic decays at LHCb based on machine learning](https://arxiv.org/abs/2208.02145) * [Firmware implementation of a recurrent neural network for the computation of the energy deposited in the liquid argon calorimeter of the ATLAS experiment](https://arxiv.org/abs/2302.07555) [[DOI](https://doi.org/10.1088/1748-0221/18/05/P05017)] * [Restoring the saturation response of a PMT using pulse shape and artificial neural networks](https://arxiv.org/abs/2302.06170) [[DOI](https://doi.org/10.1093/ptep/ptad047)] +* [Estimation of collision centrality in terms of the number of participating nucleons in heavy-ion collisions using deep learning](https://arxiv.org/abs/2305.00493) +* [Nuclear corrections on the charged hadron fragmentation functions in a Neural Network global QCD analysis](https://arxiv.org/abs/2305.02664) +* [$\nu^2$-Flows: Fast and improved neutrino reconstruction in multi-neutrino final states with conditional normalizing flows](https://arxiv.org/abs/2307.02405) ### Recasting @@ -798,6 +808,7 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Machine Learning Assisted Vector Atomic Magnetometry](https://arxiv.org/abs/2301.05707) * [Parton Labeling without Matching: Unveiling Emergent Labelling Capabilities in Regression Models](https://arxiv.org/abs/2304.09208) * [Neural Network predictions of inclusive electron-nucleus cross sections](https://arxiv.org/abs/2305.08217) +* [$\Sigma$ Resonances from a Neural Network-based Partial Wave Analysis on $K^-p$ Scattering](https://arxiv.org/abs/2305.01852) ### Parton Distribution Functions (and related) @@ -858,6 +869,7 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [A variational Monte Carlo algorithm for lattice gauge theories with continuous gauge groups: a study of (2+1)-dimensional compact QED with dynamical fermions at finite density](https://arxiv.org/abs/2304.05916) * [Evidence of the Schwinger mechanism from lattice QCD](https://arxiv.org/abs/2304.07800) * [Gauge-equivariant pooling layers for preconditioners in lattice QCD](https://arxiv.org/abs/2304.10438) +* [Combining lattice QCD and phenomenological inputs on generalised parton distributions at moderate skewness](https://arxiv.org/abs/2306.01647) ### Function Approximation @@ -868,6 +880,7 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Robust and Provably Monotonic Networks](https://arxiv.org/abs/2112.00038) * [Implicit Neural Representation as a Differentiable Surrogate for Photon Propagation in a Monolithic Neutrino Detector](https://arxiv.org/abs/2211.01505) * [Determination of the distribution of strong coupling constant with machine learning](https://arxiv.org/abs/2303.07968) +* [A Modern Global Extraction of the Sivers Function](https://arxiv.org/abs/2304.14328) ### Symbolic Regression @@ -926,6 +939,7 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Online-compatible Unsupervised Non-resonant Anomaly Detection](https://arxiv.org/abs/2111.06417) * [Decorrelation with conditional normalizing flows](https://arxiv.org/abs/2211.02486) * [Feature Selection with Distance Correlation](https://arxiv.org/abs/2212.00046) +* [Decorrelation using Optimal Transport](https://arxiv.org/abs/2307.05187) ## Generative models / density estimation ### GANs @@ -998,6 +1012,7 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Ultrafast CMOS image sensors and data-enabled super-resolution for multimodal radiographic imaging and tomography](https://arxiv.org/abs/2301.11865) [[DOI](https://doi.org/10.22323/1.420.0041)] * [Ultra-High-Resolution Detector Simulation with Intra-Event Aware GAN and Self-Supervised Relational Reasoning](https://arxiv.org/abs/2303.08046) * [New Angles on Fast Calorimeter Shower Simulation](https://arxiv.org/abs/2303.18150) +* [Fitting a Deep Generative Hadronization Model](https://arxiv.org/abs/2305.17169) ### Autoencoders @@ -1065,6 +1080,7 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Detecting and Mitigating Mode-Collapse for Flow-based Sampling of Lattice Field Theories](https://arxiv.org/abs/2302.14082) * [Locality-constrained autoregressive cum conditional normalizing flow for lattice field theory simulations](https://arxiv.org/abs/2304.01798) * [ELSA - Enhanced latent spaces for improved collider simulations](https://arxiv.org/abs/2305.07696) +* [$\nu^2$-Flows: Fast and improved neutrino reconstruction in multi-neutrino final states with conditional normalizing flows](https://arxiv.org/abs/2307.02405) ### Diffusion Models @@ -1079,6 +1095,7 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Learning the language of QCD jets with transformers](https://arxiv.org/abs/2303.07364) * [Jet Diffusion versus JetGPT -- Modern Networks for the LHC](https://arxiv.org/abs/2305.10475) +* [$\nu^2$-Flows: Fast and improved neutrino reconstruction in multi-neutrino final states with conditional normalizing flows](https://arxiv.org/abs/2307.02405) ### Physics-inspired @@ -1112,6 +1129,8 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Multi-variable Integration with a Neural Network](https://arxiv.org/abs/2211.02834) * [Machine Learning Post-Minkowskian Integrals](https://arxiv.org/abs/2209.01091) * [MadNIS -- Neural Multi-Channel Importance Sampling](https://arxiv.org/abs/2212.06172) +* [Precision studies for the partonic kinematics calculation through Machine Learning](https://arxiv.org/abs/2305.11369) +* [Predicting the Exclusive Diffractive Electron-Ion Cross Section at small $x$ with Machine Learning in Sar$t$re](https://arxiv.org/abs/2305.15880) ### Gaussian processes diff --git a/docs/index.md b/docs/index.md index a835b16..04037dc 100644 --- a/docs/index.md +++ b/docs/index.md @@ -150,6 +150,7 @@ const expandElements = shouldExpand => { * [Deep learning jet modifications in heavy-ion collisions](https://arxiv.org/abs/2012.07797) * [Identifying the Quantum Properties of Hadronic Resonances using Machine Learning](https://arxiv.org/abs/2105.04582) * [Automatic detection of boosted Higgs and top quark jets in event image](https://arxiv.org/abs/2302.13460) + * [A Guide to Diagnosing Colored Resonances at Hadron Colliders](https://arxiv.org/abs/2306.00079) #### Event images @@ -277,6 +278,7 @@ const expandElements = shouldExpand => { * [Role of polarizations and spin-spin correlations of W's in e-e+\textrightarrow{}W-W+ at s](https://arxiv.org/abs/2212.12973) [[DOI](https://doi.org/10.1103/PhysRevD.107.073004)] * [Gradient Boosting MUST taggers for highly-boosted jets](https://arxiv.org/abs/2305.04957) * [Is infrared-collinear safe information all you need for jet classification?](https://arxiv.org/abs/2305.08979) + * [Amplitude-assisted tagging of longitudinally polarised bosons using wide neural networks](https://arxiv.org/abs/2306.07726) #### $H\rightarrow b\bar{b}$ @@ -338,6 +340,7 @@ const expandElements = shouldExpand => { * [Strange Jet Tagging](https://arxiv.org/abs/2003.09517) * [A tagger for strange jets based on tracking information using long short-term memory](https://arxiv.org/abs/1907.07505) [[DOI](https://doi.org/10.1088/1748-0221/15/01/P01021)] * [Maximum performance of strange-jet tagging at hadron colliders](https://arxiv.org/abs/2011.10736) + * [Study of anomalous $W^-W^+\gamma/Z$ couplings using polarizations and spin correlations in $e^-e^+\to W^-W^+$ with polarized beams](https://arxiv.org/abs/2305.15106) #### $b$-tagging @@ -410,6 +413,9 @@ const expandElements = shouldExpand => { * [Probing Dark QCD Sector through the Higgs Portal with Machine Learning at the LHC](https://arxiv.org/abs/2304.03237) * [Uncovering doubly charged scalars with dominant three-body decays using machine learning](https://arxiv.org/abs/2304.09195) * [Searching for dark jets with displaced vertices using weakly supervised machine learning](https://arxiv.org/abs/2305.04372) + * [Gradient Boosting MUST taggers for highly-boosted jets](https://arxiv.org/abs/2305.04957) + * [Leveraging on-shell interference to search for FCNCs of the top quark and the Z boson](https://arxiv.org/abs/2305.12172) + * [Sharpening the $A\to Z^{(*)}h $ Signature of the Type-II 2HDM at the LHC through Advanced Machine Learning](https://arxiv.org/abs/2305.13781) #### Particle identification @@ -661,6 +667,7 @@ const expandElements = shouldExpand => { * [A machine learning pipeline for autonomous numerical analytic continuation of Dyson-Schwinger equations](https://arxiv.org/abs/2112.13011) * [Simplifying Polylogarithms with Machine Learning](https://arxiv.org/abs/2206.04115) * [Exploring the flavor structure of quarks and leptons with reinforcement learning](https://arxiv.org/abs/2304.14176) + * [Exploring the flavor structure of quarks and leptons with reinforcement learning](https://arxiv.org/abs/2304.14176) #### Quantum Machine Learning @@ -835,6 +842,9 @@ const expandElements = shouldExpand => { * [A new method for the $q^2$ reconstruction in semileptonic decays at LHCb based on machine learning](https://arxiv.org/abs/2208.02145) * [Firmware implementation of a recurrent neural network for the computation of the energy deposited in the liquid argon calorimeter of the ATLAS experiment](https://arxiv.org/abs/2302.07555) [[DOI](https://doi.org/10.1088/1748-0221/18/05/P05017)] * [Restoring the saturation response of a PMT using pulse shape and artificial neural networks](https://arxiv.org/abs/2302.06170) [[DOI](https://doi.org/10.1093/ptep/ptad047)] + * [Estimation of collision centrality in terms of the number of participating nucleons in heavy-ion collisions using deep learning](https://arxiv.org/abs/2305.00493) + * [Nuclear corrections on the charged hadron fragmentation functions in a Neural Network global QCD analysis](https://arxiv.org/abs/2305.02664) + * [$\nu^2$-Flows: Fast and improved neutrino reconstruction in multi-neutrino final states with conditional normalizing flows](https://arxiv.org/abs/2307.02405) ??? example "Recasting" @@ -889,6 +899,7 @@ const expandElements = shouldExpand => { * [Machine Learning Assisted Vector Atomic Magnetometry](https://arxiv.org/abs/2301.05707) * [Parton Labeling without Matching: Unveiling Emergent Labelling Capabilities in Regression Models](https://arxiv.org/abs/2304.09208) * [Neural Network predictions of inclusive electron-nucleus cross sections](https://arxiv.org/abs/2305.08217) + * [$\Sigma$ Resonances from a Neural Network-based Partial Wave Analysis on $K^-p$ Scattering](https://arxiv.org/abs/2305.01852) ??? example "Parton Distribution Functions (and related)" @@ -959,6 +970,7 @@ const expandElements = shouldExpand => { * [A variational Monte Carlo algorithm for lattice gauge theories with continuous gauge groups: a study of (2+1)-dimensional compact QED with dynamical fermions at finite density](https://arxiv.org/abs/2304.05916) * [Evidence of the Schwinger mechanism from lattice QCD](https://arxiv.org/abs/2304.07800) * [Gauge-equivariant pooling layers for preconditioners in lattice QCD](https://arxiv.org/abs/2304.10438) + * [Combining lattice QCD and phenomenological inputs on generalised parton distributions at moderate skewness](https://arxiv.org/abs/2306.01647) ??? example "Function Approximation" @@ -974,6 +986,7 @@ const expandElements = shouldExpand => { * [Robust and Provably Monotonic Networks](https://arxiv.org/abs/2112.00038) * [Implicit Neural Representation as a Differentiable Surrogate for Photon Propagation in a Monolithic Neutrino Detector](https://arxiv.org/abs/2211.01505) * [Determination of the distribution of strong coupling constant with machine learning](https://arxiv.org/abs/2303.07968) + * [A Modern Global Extraction of the Sivers Function](https://arxiv.org/abs/2304.14328) ??? example "Symbolic Regression" @@ -1048,6 +1061,7 @@ const expandElements = shouldExpand => { * [Online-compatible Unsupervised Non-resonant Anomaly Detection](https://arxiv.org/abs/2111.06417) * [Decorrelation with conditional normalizing flows](https://arxiv.org/abs/2211.02486) * [Feature Selection with Distance Correlation](https://arxiv.org/abs/2212.00046) + * [Decorrelation using Optimal Transport](https://arxiv.org/abs/2307.05187) ## Generative models / density estimation @@ -1125,6 +1139,7 @@ const expandElements = shouldExpand => { * [Ultrafast CMOS image sensors and data-enabled super-resolution for multimodal radiographic imaging and tomography](https://arxiv.org/abs/2301.11865) [[DOI](https://doi.org/10.22323/1.420.0041)] * [Ultra-High-Resolution Detector Simulation with Intra-Event Aware GAN and Self-Supervised Relational Reasoning](https://arxiv.org/abs/2303.08046) * [New Angles on Fast Calorimeter Shower Simulation](https://arxiv.org/abs/2303.18150) + * [Fitting a Deep Generative Hadronization Model](https://arxiv.org/abs/2305.17169) ??? example "Autoencoders" @@ -1202,6 +1217,7 @@ const expandElements = shouldExpand => { * [Detecting and Mitigating Mode-Collapse for Flow-based Sampling of Lattice Field Theories](https://arxiv.org/abs/2302.14082) * [Locality-constrained autoregressive cum conditional normalizing flow for lattice field theory simulations](https://arxiv.org/abs/2304.01798) * [ELSA - Enhanced latent spaces for improved collider simulations](https://arxiv.org/abs/2305.07696) + * [$\nu^2$-Flows: Fast and improved neutrino reconstruction in multi-neutrino final states with conditional normalizing flows](https://arxiv.org/abs/2307.02405) ??? example "Diffusion Models" @@ -1226,6 +1242,7 @@ const expandElements = shouldExpand => { * [Learning the language of QCD jets with transformers](https://arxiv.org/abs/2303.07364) * [Jet Diffusion versus JetGPT -- Modern Networks for the LHC](https://arxiv.org/abs/2305.10475) + * [$\nu^2$-Flows: Fast and improved neutrino reconstruction in multi-neutrino final states with conditional normalizing flows](https://arxiv.org/abs/2307.02405) ??? example "Physics-inspired" @@ -1274,6 +1291,8 @@ const expandElements = shouldExpand => { * [Multi-variable Integration with a Neural Network](https://arxiv.org/abs/2211.02834) * [Machine Learning Post-Minkowskian Integrals](https://arxiv.org/abs/2209.01091) * [MadNIS -- Neural Multi-Channel Importance Sampling](https://arxiv.org/abs/2212.06172) + * [Precision studies for the partonic kinematics calculation through Machine Learning](https://arxiv.org/abs/2305.11369) + * [Predicting the Exclusive Diffractive Electron-Ion Cross Section at small $x$ with Machine Learning in Sar$t$re](https://arxiv.org/abs/2305.15880) ??? example "Gaussian processes" diff --git a/docs/recent.md b/docs/recent.md index 4b1aaee..6ba79c5 100644 --- a/docs/recent.md +++ b/docs/recent.md @@ -9,15 +9,28 @@ search: This is an automatically compiled list of papers which have been added to the living review that were made public within the previous 4 months at the time of updating. This is not an exhaustive list of released papers, and is only able to find those which have both year and month data provided in the bib reference. +## July 2023 +* [Decorrelation using Optimal Transport](https://arxiv.org/abs/2307.05187) +* [$\nu^2$-Flows: Fast and improved neutrino reconstruction in multi-neutrino final states with conditional normalizing flows](https://arxiv.org/abs/2307.02405) + ## June 2023 * [Neutron-Gamma Pulse Shape Discrimination for Organic Scintillation Detector using 2D CNN based Image Classification](https://arxiv.org/abs/2306.09356) * [Deep Learning-Based Spatiotemporal Multi-Event Reconstruction for Delay Line Detectors](https://arxiv.org/abs/2306.09359) +* [Amplitude-assisted tagging of longitudinally polarised bosons using wide neural networks](https://arxiv.org/abs/2306.07726) +* [Combining lattice QCD and phenomenological inputs on generalised parton distributions at moderate skewness](https://arxiv.org/abs/2306.01647) ## May 2023 +* [A Guide to Diagnosing Colored Resonances at Hadron Colliders](https://arxiv.org/abs/2306.00079) * [Implementation of a framework for deploying AI inference engines in FPGAs](https://arxiv.org/abs/2305.19455) * [Magnetic field regression using artificial neural networks for cold atom experiments](https://arxiv.org/abs/2305.18822) * [Measurements of multijet event isotropies using optimal transport with the ATLAS detector](https://arxiv.org/abs/2305.16930) * [Novel Machine Learning and Differentiable Programming Techniques applied to the VIP-2 Underground Experiment](https://arxiv.org/abs/2305.17153) +* [Fitting a Deep Generative Hadronization Model](https://arxiv.org/abs/2305.17169) +* [Predicting the Exclusive Diffractive Electron-Ion Cross Section at small $x$ with Machine Learning in Sar$t$re](https://arxiv.org/abs/2305.15880) +* [Study of anomalous $W^-W^+\gamma/Z$ couplings using polarizations and spin correlations in $e^-e^+\to W^-W^+$ with polarized beams](https://arxiv.org/abs/2305.15106) +* [Sharpening the $A\to Z^{(*)}h $ Signature of the Type-II 2HDM at the LHC through Advanced Machine Learning](https://arxiv.org/abs/2305.13781) +* [Leveraging on-shell interference to search for FCNCs of the top quark and the Z boson](https://arxiv.org/abs/2305.12172) +* [Precision studies for the partonic kinematics calculation through Machine Learning](https://arxiv.org/abs/2305.11369) * [Search for periodic signals in the dielectron and diphoton invariant mass spectra using 139 fb$^{-1}$ of $pp$ collisions at $\sqrt{s}](https://arxiv.org/abs/2305.10894) * [Jet Diffusion versus JetGPT -- Modern Networks for the LHC](https://arxiv.org/abs/2305.10475) * [End-To-End Latent Variational Diffusion Models for Inverse Problems in High Energy Physics](https://arxiv.org/abs/2305.10399) @@ -28,18 +41,30 @@ This is an automatically compiled list of papers which have been added to the li * [ELSA - Enhanced latent spaces for improved collider simulations](https://arxiv.org/abs/2305.07696) * [Neural Network predictions of inclusive electron-nucleus cross sections](https://arxiv.org/abs/2305.08217) * [Is infrared-collinear safe information all you need for jet classification?](https://arxiv.org/abs/2305.08979) +* [Gradient Boosting MUST taggers for highly-boosted jets](https://arxiv.org/abs/2305.04957) * [CaloClouds: Fast Geometry-Independent Highly-Granular Calorimeter Simulation](https://arxiv.org/abs/2305.04847) +* [Searching for dark jets with displaced vertices using weakly supervised machine learning](https://arxiv.org/abs/2305.04372) * [CURTAINs Flows For Flows: Constructing Unobserved Regions with Maximum Likelihood Estimation](https://arxiv.org/abs/2305.04646) +* [Tip of the Red Giant Branch Bounds on the Axion-Electron Coupling Revisited](https://arxiv.org/abs/2305.03113) * [Gradient Boosting MUST taggers for highly-boosted jets](https://arxiv.org/abs/2305.04957) * [Searching for dark jets with displaced vertices using weakly supervised machine learning](https://arxiv.org/abs/2305.04372) +* [Nuclear corrections on the charged hadron fragmentation functions in a Neural Network global QCD analysis](https://arxiv.org/abs/2305.02664) * [Tip of the Red Giant Branch Bounds on the Axion-Electron Coupling Revisited](https://arxiv.org/abs/2305.03113) * [Nuclear corrections on the charged hadron fragmentation functions in a Neural Network global QCD analysis](https://arxiv.org/abs/2305.02664) * [$\Sigma$ Resonances from a Neural Network-based Partial Wave Analysis on $K^-p$ Scattering](https://arxiv.org/abs/2305.01852) + +## April 2023 +* [Estimation of collision centrality in terms of the number of participating nucleons in heavy-ion collisions using deep learning](https://arxiv.org/abs/2305.00493) + +## May 2023 +* [MLAnalysis: An open-source program for high energy physics analyses](https://arxiv.org/abs/2305.00964) * [MLAnalysis: An open-source program for high energy physics analyses](https://arxiv.org/abs/2305.00964) ## April 2023 * [Estimation of collision centrality in terms of the number of participating nucleons in heavy-ion collisions using deep learning](https://arxiv.org/abs/2305.00493) * [Flow Away your Differences: Conditional Normalizing Flows as an Improvement to Reweighting](https://arxiv.org/abs/2304.14963) +* [A Modern Global Extraction of the Sivers Function](https://arxiv.org/abs/2304.14328) +* [Exploring the flavor structure of quarks and leptons with reinforcement learning](https://arxiv.org/abs/2304.14176) * [Exploring the flavor structure of quarks and leptons with reinforcement learning](https://arxiv.org/abs/2304.14176) * [Machine learning method for $^{12}$C event classification and reconstruction in the active target time-projection chamber](https://arxiv.org/abs/2304.13233) * [A Modern Global Extraction of the Sivers Function](https://arxiv.org/abs/2304.14328) From 2922433147fc99b78abfb774e5984e01271b9c77 Mon Sep 17 00:00:00 2001 From: Johnny Raine Date: Wed, 12 Jul 2023 15:10:17 +0200 Subject: [PATCH 2/5] Fixing the bib --- HEPML.bib | 86 -------------------------------------------------- docs/recent.md | 12 ------- 2 files changed, 98 deletions(-) diff --git a/HEPML.bib b/HEPML.bib index 0811041..1b5a2a4 100644 --- a/HEPML.bib +++ b/HEPML.bib @@ -293,18 +293,6 @@ @article{Athanasakos:2023fhq year = "2023" } -% May 10, 2023 -@article{Aguilar-Saavedra:2023pde, - author = "Aguilar-Saavedra, J. A. and Arganda, E. and Joaquim, F. R. and Sand\'a Seoane, R. M. and Seabra, J. F.", - title = "{Gradient Boosting MUST taggers for highly-boosted jets}", - eprint = "2305.04957", - archivePrefix = "arXiv", - primaryClass = "hep-ph", - reportNumber = "IFT-UAM/CSIC-23-29", - month = "5", - year = "2023" -} - % May 9, 2023 @article{Buhmann:2023bwk, author = {Buhmann, Erik and Diefenbacher, Sascha and Eren, Engin and Gaede, Frank and Kasieczka, Gregor and Korol, Anatolii and Korcari, William and Kr\"uger, Katja and McKeown, Peter}, @@ -317,16 +305,6 @@ @article{Buhmann:2023bwk year = "2023" } -@article{Bardhan:2023mia, - author = "Bardhan, Debjyoti and Kats, Yevgeny and Wunch, Noam", - title = "{Searching for dark jets with displaced vertices using weakly supervised machine learning}", - eprint = "2305.04372", - archivePrefix = "arXiv", - primaryClass = "hep-ph", - month = "5", - year = "2023" -} - % May 8, 2023 @article{Sengupta:2023xqy, author = "Sengupta, Debajyoti and Klein, Samuel and Raine, John Andrew and Golling, Tobias", @@ -338,16 +316,6 @@ @article{Sengupta:2023xqy year = "2023" } -@article{Dennis:2023kfe, - author = "Dennis, Mitchell T. and Sakstein, Jeremy", - title = "{Tip of the Red Giant Branch Bounds on the Axion-Electron Coupling Revisited}", - eprint = "2305.03113", - archivePrefix = "arXiv", - primaryClass = "hep-ph", - month = "5", - year = "2023" -} %%% not sure this is HEP - %May 7, 2023 @article{Aguilar-Saavedra:2023pde, author = "Aguilar-Saavedra, J. A. and Arganda, E. and Joaquim, F. R. and Sand\'a Seoane, R. M. and Seabra, J. F.", @@ -393,16 +361,6 @@ @article{Dennis:2023kfe year = "2023" } -@article{Soleymaninia:2023dds, - author = "Soleymaninia, Maryam and Hashamipour, Hadi and Khanpour, Hamzeh and Shoeib, Samira and Mohamaditabar, Alireza", - title = "{Nuclear corrections on the charged hadron fragmentation functions in a Neural Network global QCD analysis}", - eprint = "2305.02664", - archivePrefix = "arXiv", - primaryClass = "hep-ph", - month = "5", - year = "2023" -} - %May 2, 2023 @article{Shi:2023xfz, author = "Shi, Jun and Gui, Long-Cheng and Liang, Jian and Liu, Guoming", @@ -414,28 +372,6 @@ @article{Shi:2023xfz year = "2023" } -@article{Basak:2023wzq, - author = "Basak, Dipankar and Dey, Kalyan", - title = "{Estimation of collision centrality in terms of the number of participating nucleons in heavy-ion collisions using deep learning}", - eprint = "2305.00493", - archivePrefix = "arXiv", - primaryClass = "hep-ph", - month = "4", - year = "2023" -} - - -@article{Guo:2023nfu, - author = "Guo, Yu-Chen and Feng, Fan and Di, An and Lu, Shi-Qi and Yang, Ji-Chong", - title = "{MLAnalysis: An open-source program for high energy physics analyses}", - eprint = "2305.00964", - archivePrefix = "arXiv", - primaryClass = "hep-ph", - month = "5", - year = "2023" -} - - %May 1, 2023 @article{Guo:2023nfu, author = "Guo, Yu-Chen and Feng, Fan and Di, An and Lu, Shi-Qi and Yang, Ji-Chong", @@ -482,28 +418,6 @@ @article{Algren:2023qnb year = "2023" } -@article{Fernando:2023obn, - author = "Fernando, I. P. and Keller, D.", - title = "{A Modern Global Extraction of the Sivers Function}", - eprint = "2304.14328", - archivePrefix = "arXiv", - primaryClass = "hep-ph", - month = "4", - year = "2023" -} - -@article{Nishimura:2023wdu, - author = "Nishimura, Satsuki and Miyao, Coh and Otsuka, Hajime", - title = "{Exploring the flavor structure of quarks and leptons with reinforcement learning}", - eprint = "2304.14176", - archivePrefix = "arXiv", - primaryClass = "hep-ph", - reportNumber = "KYUSHU-HET-257", - month = "4", - year = "2023" -} - - %Apr. 27, 2023 @article{Nishimura:2023wdu, author = "Nishimura, Satsuki and Miyao, Coh and Otsuka, Hajime", diff --git a/docs/recent.md b/docs/recent.md index 9e316c5..4f207b1 100644 --- a/docs/recent.md +++ b/docs/recent.md @@ -41,30 +41,18 @@ This is an automatically compiled list of papers which have been added to the li * [ELSA - Enhanced latent spaces for improved collider simulations](https://arxiv.org/abs/2305.07696) * [Neural Network predictions of inclusive electron-nucleus cross sections](https://arxiv.org/abs/2305.08217) * [Is infrared-collinear safe information all you need for jet classification?](https://arxiv.org/abs/2305.08979) -* [Gradient Boosting MUST taggers for highly-boosted jets](https://arxiv.org/abs/2305.04957) * [CaloClouds: Fast Geometry-Independent Highly-Granular Calorimeter Simulation](https://arxiv.org/abs/2305.04847) -* [Searching for dark jets with displaced vertices using weakly supervised machine learning](https://arxiv.org/abs/2305.04372) * [CURTAINs Flows For Flows: Constructing Unobserved Regions with Maximum Likelihood Estimation](https://arxiv.org/abs/2305.04646) -* [Tip of the Red Giant Branch Bounds on the Axion-Electron Coupling Revisited](https://arxiv.org/abs/2305.03113) * [Gradient Boosting MUST taggers for highly-boosted jets](https://arxiv.org/abs/2305.04957) * [Searching for dark jets with displaced vertices using weakly supervised machine learning](https://arxiv.org/abs/2305.04372) * [Nuclear corrections on the charged hadron fragmentation functions in a Neural Network global QCD analysis](https://arxiv.org/abs/2305.02664) * [Tip of the Red Giant Branch Bounds on the Axion-Electron Coupling Revisited](https://arxiv.org/abs/2305.03113) -* [Nuclear corrections on the charged hadron fragmentation functions in a Neural Network global QCD analysis](https://arxiv.org/abs/2305.02664) * [$\Sigma$ Resonances from a Neural Network-based Partial Wave Analysis on $K^-p$ Scattering](https://arxiv.org/abs/2305.01852) - -## April 2023 -* [Estimation of collision centrality in terms of the number of participating nucleons in heavy-ion collisions using deep learning](https://arxiv.org/abs/2305.00493) - -## May 2023 -* [MLAnalysis: An open-source program for high energy physics analyses](https://arxiv.org/abs/2305.00964) * [MLAnalysis: An open-source program for high energy physics analyses](https://arxiv.org/abs/2305.00964) ## April 2023 * [Estimation of collision centrality in terms of the number of participating nucleons in heavy-ion collisions using deep learning](https://arxiv.org/abs/2305.00493) * [Flow Away your Differences: Conditional Normalizing Flows as an Improvement to Reweighting](https://arxiv.org/abs/2304.14963) -* [A Modern Global Extraction of the Sivers Function](https://arxiv.org/abs/2304.14328) -* [Exploring the flavor structure of quarks and leptons with reinforcement learning](https://arxiv.org/abs/2304.14176) * [Exploring the flavor structure of quarks and leptons with reinforcement learning](https://arxiv.org/abs/2304.14176) * [Machine learning method for $^{12}$C event classification and reconstruction in the active target time-projection chamber](https://arxiv.org/abs/2304.13233) * [A Modern Global Extraction of the Sivers Function](https://arxiv.org/abs/2304.14328) From 7c1f37aa68fba1c196421495b23d375870650a0e Mon Sep 17 00:00:00 2001 From: Johnny Raine Date: Wed, 12 Jul 2023 15:32:01 +0200 Subject: [PATCH 3/5] Remove duplicate entry in lists --- HEPML.tex | 2 +- README.md | 1 - docs/index.md | 1 - 3 files changed, 1 insertion(+), 3 deletions(-) diff --git a/HEPML.tex b/HEPML.tex index 461e4da..d935e20 100644 --- a/HEPML.tex +++ b/HEPML.tex @@ -110,7 +110,7 @@ \\\textit{For supervised learning, the labels $y_i$ are known. In the case that the labels are noisy or only known with some uncertainty, then the learning is called weak supervision. Semi-supervised learning is the related case where labels are known for only a fraction of the training examples.} \item \textbf{Unsupervised}~\cite{Mackey:2015hwa,Komiske:2019fks,1797846,Dillon:2019cqt,Cai:2020vzx,Howard:2021pos,Dillon:2021gag} \\\textit{When no labels are provided, the learning is called unsupervised.} - \item \textbf{Reinforcement Learning}~\cite{Carrazza:2019efs,Brehmer:2020brs,John:2020sak,Harvey:2021oue,Cranmer:2021gdt,Windisch:2021mem,Dersy:2022bym,Nishimura:2023wdu,Nishimura:2023wdu} + \item \textbf{Reinforcement Learning}~\cite{Carrazza:2019efs,Brehmer:2020brs,John:2020sak,Harvey:2021oue,Cranmer:2021gdt,Windisch:2021mem,Dersy:2022bym,Nishimura:2023wdu} \\\textit{Instead of learning to distinguish different types of examples, the goal of reinforcement learning is to learn a strategy (policy). The prototypical example of reinforcement learning in learning a strategy to play video games using some kind of score as a feedback during the learning.} \item \textbf{Quantum Machine Learning}~\cite{Mott:2017xdb,Zlokapa:2019lvv,Blance:2020nhl,Terashi:2020wfi,Chen:2020zkj,Wu:2020cye,Guan:2020bdl,Chen:2021ouz,Blance:2021gcs,Heredge:2021vww,Wu:2021xsj,Belis:2021zqi,Araz:2021ifk,Bravo-Prieto:2021ehz,Kim:2021wrr,Ngairangbam:2021yma,Gianelle:2022unu,Abel:2022lqr,Araz:2022haf,Delgado:2022aty,Alvi:2022fkk,Peixoto:2022zzk,Araz:2022zxk,Rousselot:2023pcj} \\\textit{Quantum computers are based on unitary operations applied to quantum states. These states live in a vast Hilbert space which may have a usefully large information capacity for machine learning.} diff --git a/README.md b/README.md index 9663277..eccf6be 100644 --- a/README.md +++ b/README.md @@ -607,7 +607,6 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [A machine learning pipeline for autonomous numerical analytic continuation of Dyson-Schwinger equations](https://arxiv.org/abs/2112.13011) * [Simplifying Polylogarithms with Machine Learning](https://arxiv.org/abs/2206.04115) * [Exploring the flavor structure of quarks and leptons with reinforcement learning](https://arxiv.org/abs/2304.14176) -* [Exploring the flavor structure of quarks and leptons with reinforcement learning](https://arxiv.org/abs/2304.14176) #### Quantum Machine Learning diff --git a/docs/index.md b/docs/index.md index 30b4d47..868ff9e 100644 --- a/docs/index.md +++ b/docs/index.md @@ -668,7 +668,6 @@ const expandElements = shouldExpand => { * [A machine learning pipeline for autonomous numerical analytic continuation of Dyson-Schwinger equations](https://arxiv.org/abs/2112.13011) * [Simplifying Polylogarithms with Machine Learning](https://arxiv.org/abs/2206.04115) * [Exploring the flavor structure of quarks and leptons with reinforcement learning](https://arxiv.org/abs/2304.14176) - * [Exploring the flavor structure of quarks and leptons with reinforcement learning](https://arxiv.org/abs/2304.14176) #### Quantum Machine Learning From e40d87f600b777edc18b2bb7627e7346e3c1bf7c Mon Sep 17 00:00:00 2001 From: Johnny Raine Date: Thu, 13 Jul 2023 10:03:00 +0200 Subject: [PATCH 4/5] Updating for hep-ph up to 20230713 --- HEPML.bib | 34 ++++++++++++++++++++++++++++++++++ HEPML.tex | 10 +++++----- README.md | 5 +++++ docs/index.md | 5 +++++ docs/recent.md | 3 +++ 5 files changed, 52 insertions(+), 5 deletions(-) diff --git a/HEPML.bib b/HEPML.bib index 1b5a2a4..56bb0ba 100644 --- a/HEPML.bib +++ b/HEPML.bib @@ -11,6 +11,29 @@ @article{Algren:2023spv year = "2023" } +% Jul. 10, 2023 +@article{Alghamdi:2023emm, + author = "Alghamdi, T. and others", + title = "{Toward a generative modeling analysis of CLAS exclusive $2\pi$ photoproduction}", + eprint = "2307.04450", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + reportNumber = "JLAB-THY-23-3881", + month = "7", + year = "2023" +} + +@article{He:2023cfc, + author = "He, Minxuan and Wang, Daohan", + title = "{Quark/Gluon Discrimination and Top Tagging with Dual Attention Transformer}", + eprint = "2307.04723", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + month = "7", + year = "2023" +} + + % Jul. 5, 2023 @article{Raine:2023fko, author = "Raine, John Andrew and Leigh, Matthew and Zoch, Knut and Golling, Tobias", @@ -54,6 +77,17 @@ @article{Grossi:2023fqq year = "2023" } +% Jun. 6, 2023 +@article{Mikuni:2023tok, + author = "Mikuni, Vinicius and Nachman, Benjamin", + title = "{High-dimensional and Permutation Invariant Anomaly Detection}", + eprint = "2306.03933", + archivePrefix = "arXiv", + primaryClass = "hep-ph", + month = "6", + year = "2023" +} + % Jun. 5, 2023 @article{Riberdy:2023awf, author = "Riberdy, Michael Joseph and Dutrieux, Herv\'e and Mezrag, C\'edric and Sznajder, Pawe\l{}", diff --git a/HEPML.tex b/HEPML.tex index d935e20..608c0a4 100644 --- a/HEPML.tex +++ b/HEPML.tex @@ -76,9 +76,9 @@ \\\textit{Boosted, hadronically decaying $W$ and $Z$ bosons form jets that are distinguished from generic quark and gluon jets by their mass near the boson mass and their two-prong substructure.} \item \textbf{$H\rightarrow b\bar{b}$}~\cite{Datta:2019ndh,Lin:2018cin,Moreno:2019neq,Chakraborty:2019imr,Sirunyan:2020lcu,Chung:2020ysf,Tannenwald:2020mhq,guo2020boosted,Abbas:2020khd,Jang:2021eph,Khosa:2021cyk} \\\textit{Due to the fidelity of $b$-tagging, boosted, hadronically decaying Higgs bosons (predominantly decaying to $b\bar{b}$) has unique challenged and opportunities compared with $W/Z$ tagging.} - \item \textbf{quarks and gluons}~\cite{ATL-PHYS-PUB-2017-017,Komiske:2016rsd,Cheng:2017rdo,Stoye:DLPS2017,Chien:2018dfn,Moreno:2019bmu,Kasieczka:2018lwf,1806025,Lee:2019ssx,Lee:2019cad,Dreyer:2020brq,Romero:2021qlf,Filipek:2021qbe,Dreyer:2021hhr,Bright-Thonney:2022xkx,CrispimRomao:2023ssj,Athanasakos:2023fhq} + \item \textbf{quarks and gluons}~\cite{ATL-PHYS-PUB-2017-017,Komiske:2016rsd,Cheng:2017rdo,Stoye:DLPS2017,Chien:2018dfn,Moreno:2019bmu,Kasieczka:2018lwf,1806025,Lee:2019ssx,Lee:2019cad,Dreyer:2020brq,Romero:2021qlf,Filipek:2021qbe,Dreyer:2021hhr,Bright-Thonney:2022xkx,CrispimRomao:2023ssj,Athanasakos:2023fhq,He:2023cfc} \\\textit{Quark jets tend to be narrower and have fewer particles than gluon jets. This classification task has been a benchmark for many new machine learning models.} - \item \textbf{top quark} tagging~\cite{Almeida:2015jua,Stoye:DLPS2017,Kasieczka:2019dbj,Chakraborty:2020yfc,Diefenbacher:2019ezd,Butter:2017cot,Kasieczka:2017nvn,Macaluso:2018tck,Bhattacharya:2020vzu,Lim:2020igi,Dreyer:2020brq,Aguilar-Saavedra:2021rjk,Andrews:2021ejw,Dreyer:2022yom,Ahmed:2022hct,Munoz:2022gjq,Bhattacherjee:2022gjq,Choi:2023slq} + \item \textbf{top quark} tagging~\cite{Almeida:2015jua,Stoye:DLPS2017,Kasieczka:2019dbj,Chakraborty:2020yfc,Diefenbacher:2019ezd,Butter:2017cot,Kasieczka:2017nvn,Macaluso:2018tck,Bhattacharya:2020vzu,Lim:2020igi,Dreyer:2020brq,Aguilar-Saavedra:2021rjk,Andrews:2021ejw,Dreyer:2022yom,Ahmed:2022hct,Munoz:2022gjq,Bhattacherjee:2022gjq,Choi:2023slq,He:2023cfc} \\\textit{Boosted top quarks form jets that have a three-prong substructure ($t\rightarrow Wb,W\rightarrow q\bar{q}$).} \item \textbf{strange jets}~\cite{Nakai:2020kuu,Erdmann:2019blf,Erdmann:2020ovh,Subba:2023rpm} \\\textit{Strange quarks have a very similar fragmentation to generic quark and gluon jets, so this is a particularly challenging task.} @@ -165,13 +165,13 @@ \item \textbf{Generative models / density estimation} \\\textit{The goal of generative modeling is to learn (explicitly or implicitly) a probability density $p(x)$ for the features $x\in\mathbb{R}^n$. This task is usually unsupervised (no labels).} \begin{itemize} - \item \textbf{GANs}~\cite{deOliveira:2017pjk,Paganini:2017hrr,Paganini:2017dwg,Alonso-Monsalve:2018aqs,Butter:2019eyo,Martinez:2019jlu,Bellagente:2019uyp,Vallecorsa:2019ked,SHiP:2019gcl,Carrazza:2019cnt,Butter:2019cae,Lin:2019htn,DiSipio:2019imz,Hashemi:2019fkn,Chekalina:2018hxi,ATL-SOFT-PUB-2018-001,Zhou:2018ill,Carminati:2018khv,Vallecorsa:2018zco,Datta:2018mwd,Musella:2018rdi,Erdmann:2018kuh,Deja:2019vcv,Derkach:2019qfk,Erbin:2018csv,Erdmann:2018jxd,Urban:2018tqv,Oliveira:DLPS2017,deOliveira:2017rwa,Farrell:2019fsm,Hooberman:DLPS2017,Belayneh:2019vyx,Wang:2020tap,buhmann2020getting,Alanazi:2020jod,2009.03796,2008.06545,Kansal:2020svm,Maevskiy:2020ank,Lai:2020byl,Choi:2021sku,Rehm:2021zow,Rehm:2021zoz,Carrazza:2021hny,Rehm:2021qwm,Lebese:2021foi,Winterhalder:2021ave,Kansal:2021cqp,NEURIPS2020_a878dbeb,Khattak:2021ndw,Mu:2021nno,Li:2021cbp,Bravo-Prieto:2021ehz,Anderlini:2021qpm,Chisholm:2021pdn,Desai:2021wbb,Buhmann:2021caf,Bieringer:2022cbs,Ghosh:2022zdz,Anderlini:2022ckd,Ratnikov:2022hge,Rogachev:2022hjg,ATLAS:2022jhk,Anderlini:2022hgm,Buhmann:2023pmh,Yue:2023uva,Hashemi:2023ruu,Diefenbacher:2023prl,Chan:2023ume} + \item \textbf{GANs}~\cite{deOliveira:2017pjk,Paganini:2017hrr,Paganini:2017dwg,Alonso-Monsalve:2018aqs,Butter:2019eyo,Martinez:2019jlu,Bellagente:2019uyp,Vallecorsa:2019ked,SHiP:2019gcl,Carrazza:2019cnt,Butter:2019cae,Lin:2019htn,DiSipio:2019imz,Hashemi:2019fkn,Chekalina:2018hxi,ATL-SOFT-PUB-2018-001,Zhou:2018ill,Carminati:2018khv,Vallecorsa:2018zco,Datta:2018mwd,Musella:2018rdi,Erdmann:2018kuh,Deja:2019vcv,Derkach:2019qfk,Erbin:2018csv,Erdmann:2018jxd,Urban:2018tqv,Oliveira:DLPS2017,deOliveira:2017rwa,Farrell:2019fsm,Hooberman:DLPS2017,Belayneh:2019vyx,Wang:2020tap,buhmann2020getting,Alanazi:2020jod,2009.03796,2008.06545,Kansal:2020svm,Maevskiy:2020ank,Lai:2020byl,Choi:2021sku,Rehm:2021zow,Rehm:2021zoz,Carrazza:2021hny,Rehm:2021qwm,Lebese:2021foi,Winterhalder:2021ave,Kansal:2021cqp,NEURIPS2020_a878dbeb,Khattak:2021ndw,Mu:2021nno,Li:2021cbp,Bravo-Prieto:2021ehz,Anderlini:2021qpm,Chisholm:2021pdn,Desai:2021wbb,Buhmann:2021caf,Bieringer:2022cbs,Ghosh:2022zdz,Anderlini:2022ckd,Ratnikov:2022hge,Rogachev:2022hjg,ATLAS:2022jhk,Anderlini:2022hgm,Buhmann:2023pmh,Yue:2023uva,Hashemi:2023ruu,Diefenbacher:2023prl,Chan:2023ume,Alghamdi:2023emm} \\\textit{Generative Adversarial Networks~\cite{Goodfellow:2014upx} learn $p(x)$ implicitly through the minimax optimization of two networks: one that maps noise to structure $G(z)$ and one a classifier (called the discriminator) that learns to distinguish examples generated from $G(z)$ and those generated from the target process. When the discriminator is maximally `confused', then the generator is effectively mimicking $p(x)$.} \item \textbf{Autoencoders}~\cite{Monk:2018zsb,ATL-SOFT-PUB-2018-001,Cheng:2020dal,1816035,Howard:2021pos,Buhmann:2021lxj,Bortolato:2021zic,deja2020endtoend,Hariri:2021clz,Fanelli:2019qaq,Collins:2021pld,Orzari:2021suh,Jawahar:2021vyu,Tsan:2021brw,Buhmann:2021caf,Touranakou:2022qrp,Ilten:2022jfm,Collins:2022qpr,AbhishekAbhishek:2022wby,Cresswell:2022tof,Roche:2023int} \\\textit{An autoencoder consists of two functions: one that maps $x$ into a latent space $z$ (encoder) and a second one that maps the latent space back into the original space (decoder). The encoder and decoder are simultaneously trained so that their composition is nearly the identity. When the latent space has a well-defined probability density (as in variational autoencoders), then one can sample from the autoencoder by applying the detector to a randomly chosen element of the latent space.} \item \textbf{Normalizing flows}~\cite{Albergo:2019eim,1800956,Kanwar:2003.06413,Brehmer:2020vwc,Bothmann:2020ywa,Gao:2020zvv,Gao:2020vdv,Nachman:2020lpy,Choi:2020bnf,Lu:2020npg,Bieringer:2020tnw,Hollingsworth:2021sii,Winterhalder:2021ave,Krause:2021ilc,Hackett:2021idh,Menary:2021tjg,Hallin:2021wme,NEURIPS2020_a878dbeb,Vandegar:2020yvw,Jawahar:2021vyu,Bister:2021arb,Krause:2021wez,Butter:2021csz,Winterhalder:2021ngy,Butter:2022lkf,Verheyen:2022tov,Leigh:2022lpn,Chen:2022ytr,Albandea:2022fky,Krause:2022jna,Cresswell:2022tof,Dolan:2022ikg,Backes:2022vmn,Heimel:2022wyj,Albandea:2023wgd,Rousselot:2023pcj,Diefenbacher:2023vsw,Nicoli:2023qsl,R:2023dcr,Nachman:2023clf,Raine:2023fko} \\\textit{Normalizing flows~\cite{pmlr-v37-rezende15} learn $p(x)$ explicitly by starting with a simple probability density and then applying a series of bijective transformations with tractable Jacobians.} - \item \textbf{Diffusion Models}~\cite{Mikuni:2022xry,Leigh:2023toe,Mikuni:2023dvk,Shmakov:2023kjj,Buhmann:2023bwk,Butter:2023fov} + \item \textbf{Diffusion Models}~\cite{Mikuni:2022xry,Leigh:2023toe,Mikuni:2023dvk,Shmakov:2023kjj,Buhmann:2023bwk,Butter:2023fov,Mikuni:2023tok} \\\textit{These approaches learn the gradient of the density instead of the density directly.} \item \textbf{Transformer Models}~\cite{Finke:2023veq,Butter:2023fov,Raine:2023fko} \\\textit{These approaches learn the density or perform generative modeling using transformer-based networks.} @@ -186,7 +186,7 @@ \item \textbf{Other/hybrid}~\cite{Cresswell:2022tof,DiBello:2022rss,Li:2022jon,Butter:2023fov} \\\textit{Architectures that combine different network elements or otherwise do not fit into the other categories.} \end{itemize} -\item \textbf{Anomaly detection}~\cite{DAgnolo:2018cun,Collins:2018epr,Collins:2019jip,DAgnolo:2019vbw,Farina:2018fyg,Heimel:2018mkt,Roy:2019jae,Cerri:2018anq,Blance:2019ibf,Hajer:2018kqm,DeSimone:2018efk,Mullin:2019mmh,1809.02977,Dillon:2019cqt,Andreassen:2020nkr,Nachman:2020lpy,Aguilar-Saavedra:2017rzt,Romao:2019dvs,Romao:2020ojy,knapp2020adversarially,collaboration2020dijet,1797846,1800445,Amram:2020ykb,Cheng:2020dal,Khosa:2020qrz,Thaprasop:2020mzp,Alexander:2020mbx,aguilarsaavedra2020mass,1815227,pol2020anomaly,Mikuni:2020qds,vanBeekveld:2020txa,Park:2020pak,Faroughy:2020gas,Stein:2020rou,Kasieczka:2021xcg,Chakravarti:2021svb,Batson:2021agz,Blance:2021gcs,Bortolato:2021zic,Collins:2021nxn,Dillon:2021nxw,Finke:2021sdf,Shih:2021kbt,Atkinson:2021nlt,Kahn:2021drv,Aarrestad:2021oeb,Dorigo:2021iyy,Caron:2021wmq,Govorkova:2021hqu,Kasieczka:2021tew,Volkovich:2021txe,Govorkova:2021utb,Hallin:2021wme,Ostdiek:2021bem,Fraser:2021lxm,Jawahar:2021vyu,Herrero-Garcia:2021goa,Aguilar-Saavedra:2021utu,Tombs:2021wae,Lester:2021aks,Mikuni:2021nwn,Chekanov:2021pus,dAgnolo:2021aun,Canelli:2021aps,Ngairangbam:2021yma,Bradshaw:2022qev,Aguilar-Saavedra:2022ejy,Buss:2022lxw,Alvi:2022fkk,Dillon:2022tmm,Birman:2022xzu,Raine:2022hht,Letizia:2022xbe,Fanelli:2022xwl,Finke:2022lsu,Verheyen:2022tov,Dillon:2022mkq,Caron:2022wrw,Park:2022zov,Kamenik:2022qxs,Hallin:2022eoq,Kasieczka:2022naq,Araz:2022zxk,Mastandrea:2022vas,Roche:2023int,Golling:2023juz,Sengupta:2023xqy} +\item \textbf{Anomaly detection}~\cite{DAgnolo:2018cun,Collins:2018epr,Collins:2019jip,DAgnolo:2019vbw,Farina:2018fyg,Heimel:2018mkt,Roy:2019jae,Cerri:2018anq,Blance:2019ibf,Hajer:2018kqm,DeSimone:2018efk,Mullin:2019mmh,1809.02977,Dillon:2019cqt,Andreassen:2020nkr,Nachman:2020lpy,Aguilar-Saavedra:2017rzt,Romao:2019dvs,Romao:2020ojy,knapp2020adversarially,collaboration2020dijet,1797846,1800445,Amram:2020ykb,Cheng:2020dal,Khosa:2020qrz,Thaprasop:2020mzp,Alexander:2020mbx,aguilarsaavedra2020mass,1815227,pol2020anomaly,Mikuni:2020qds,vanBeekveld:2020txa,Park:2020pak,Faroughy:2020gas,Stein:2020rou,Kasieczka:2021xcg,Chakravarti:2021svb,Batson:2021agz,Blance:2021gcs,Bortolato:2021zic,Collins:2021nxn,Dillon:2021nxw,Finke:2021sdf,Shih:2021kbt,Atkinson:2021nlt,Kahn:2021drv,Aarrestad:2021oeb,Dorigo:2021iyy,Caron:2021wmq,Govorkova:2021hqu,Kasieczka:2021tew,Volkovich:2021txe,Govorkova:2021utb,Hallin:2021wme,Ostdiek:2021bem,Fraser:2021lxm,Jawahar:2021vyu,Herrero-Garcia:2021goa,Aguilar-Saavedra:2021utu,Tombs:2021wae,Lester:2021aks,Mikuni:2021nwn,Chekanov:2021pus,dAgnolo:2021aun,Canelli:2021aps,Ngairangbam:2021yma,Bradshaw:2022qev,Aguilar-Saavedra:2022ejy,Buss:2022lxw,Alvi:2022fkk,Dillon:2022tmm,Birman:2022xzu,Raine:2022hht,Letizia:2022xbe,Fanelli:2022xwl,Finke:2022lsu,Verheyen:2022tov,Dillon:2022mkq,Caron:2022wrw,Park:2022zov,Kamenik:2022qxs,Hallin:2022eoq,Kasieczka:2022naq,Araz:2022zxk,Mastandrea:2022vas,Roche:2023int,Golling:2023juz,Sengupta:2023xqy,Mikuni:2023tok} \\\textit{The goal of anomaly detection is to identify abnormal events. The abnormal events could be from physics beyond the Standard Model or from faults in a detector. While nearly all searches for new physics are technically anomaly detection, this category is for methods that are mode-independent (broadly defined). Anomalies in high energy physics tend to manifest as over-densities in phase space (often called `population anomalies') in contrast to off-manifold anomalies where you can flag individual examples as anomalous. } \item \textbf{Simulation-based (`likelihood-free') Inference} \\\textit{Likelihood-based inference is the case where $p(x|\theta)$ is known and $\theta$ can be determined by maximizing the probability of the data. In high energy physics, $p(x|\theta)$ is often not known analytically, but it is often possible to sample from the density implicitly using simulations.} diff --git a/README.md b/README.md index eccf6be..edc490d 100644 --- a/README.md +++ b/README.md @@ -258,6 +258,7 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Systematic Quark/Gluon Identification with Ratios of Likelihoods](https://arxiv.org/abs/2207.12411) * [Jet substructure observables for jet quenching in Quark Gluon Plasma: a Machine Learning driven analysis](https://arxiv.org/abs/2304.07196) * [Is infrared-collinear safe information all you need for jet classification?](https://arxiv.org/abs/2305.08979) +* [Quark/Gluon Discrimination and Top Tagging with Dual Attention Transformer](https://arxiv.org/abs/2307.04723) #### top quark tagging @@ -279,6 +280,7 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [BIP: Boost Invariant Polynomials for Efficient Jet Tagging](https://arxiv.org/abs/2207.08272) * [Boosted top tagging and its interpretation using Shapley values](https://arxiv.org/abs/2212.11606) * [Automatic detection of boosted Higgs and top quark jets in event image](https://arxiv.org/abs/2302.13460) +* [Quark/Gluon Discrimination and Top Tagging with Dual Attention Transformer](https://arxiv.org/abs/2307.04723) #### strange jets @@ -1013,6 +1015,7 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Ultra-High-Resolution Detector Simulation with Intra-Event Aware GAN and Self-Supervised Relational Reasoning](https://arxiv.org/abs/2303.08046) * [New Angles on Fast Calorimeter Shower Simulation](https://arxiv.org/abs/2303.18150) * [Fitting a Deep Generative Hadronization Model](https://arxiv.org/abs/2305.17169) +* [Toward a generative modeling analysis of CLAS exclusive $2\pi$ photoproduction](https://arxiv.org/abs/2307.04450) ### Autoencoders @@ -1090,6 +1093,7 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [End-To-End Latent Variational Diffusion Models for Inverse Problems in High Energy Physics](https://arxiv.org/abs/2305.10399) * [CaloClouds: Fast Geometry-Independent Highly-Granular Calorimeter Simulation](https://arxiv.org/abs/2305.04847) * [Jet Diffusion versus JetGPT -- Modern Networks for the LHC](https://arxiv.org/abs/2305.10475) +* [High-dimensional and Permutation Invariant Anomaly Detection](https://arxiv.org/abs/2306.03933) ### Transformer Models @@ -1237,6 +1241,7 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Nanosecond anomaly detection with decision trees for high energy physics and real-time application to exotic Higgs decays](https://arxiv.org/abs/2304.03836) * [The Mass-ive Issue: Anomaly Detection in Jet Physics](https://arxiv.org/abs/2303.14134) * [CURTAINs Flows For Flows: Constructing Unobserved Regions with Maximum Likelihood Estimation](https://arxiv.org/abs/2305.04646) +* [High-dimensional and Permutation Invariant Anomaly Detection](https://arxiv.org/abs/2306.03933) ## Simulation-based (`likelihood-free') Inference ### Parameter estimation diff --git a/docs/index.md b/docs/index.md index 868ff9e..cceca9f 100644 --- a/docs/index.md +++ b/docs/index.md @@ -314,6 +314,7 @@ const expandElements = shouldExpand => { * [Systematic Quark/Gluon Identification with Ratios of Likelihoods](https://arxiv.org/abs/2207.12411) * [Jet substructure observables for jet quenching in Quark Gluon Plasma: a Machine Learning driven analysis](https://arxiv.org/abs/2304.07196) * [Is infrared-collinear safe information all you need for jet classification?](https://arxiv.org/abs/2305.08979) + * [Quark/Gluon Discrimination and Top Tagging with Dual Attention Transformer](https://arxiv.org/abs/2307.04723) #### top quark tagging @@ -335,6 +336,7 @@ const expandElements = shouldExpand => { * [BIP: Boost Invariant Polynomials for Efficient Jet Tagging](https://arxiv.org/abs/2207.08272) * [Boosted top tagging and its interpretation using Shapley values](https://arxiv.org/abs/2212.11606) * [Automatic detection of boosted Higgs and top quark jets in event image](https://arxiv.org/abs/2302.13460) + * [Quark/Gluon Discrimination and Top Tagging with Dual Attention Transformer](https://arxiv.org/abs/2307.04723) #### strange jets @@ -1140,6 +1142,7 @@ const expandElements = shouldExpand => { * [Ultra-High-Resolution Detector Simulation with Intra-Event Aware GAN and Self-Supervised Relational Reasoning](https://arxiv.org/abs/2303.08046) * [New Angles on Fast Calorimeter Shower Simulation](https://arxiv.org/abs/2303.18150) * [Fitting a Deep Generative Hadronization Model](https://arxiv.org/abs/2305.17169) + * [Toward a generative modeling analysis of CLAS exclusive $2\pi$ photoproduction](https://arxiv.org/abs/2307.04450) ??? example "Autoencoders" @@ -1232,6 +1235,7 @@ const expandElements = shouldExpand => { * [End-To-End Latent Variational Diffusion Models for Inverse Problems in High Energy Physics](https://arxiv.org/abs/2305.10399) * [CaloClouds: Fast Geometry-Independent Highly-Granular Calorimeter Simulation](https://arxiv.org/abs/2305.04847) * [Jet Diffusion versus JetGPT -- Modern Networks for the LHC](https://arxiv.org/abs/2305.10475) + * [High-dimensional and Permutation Invariant Anomaly Detection](https://arxiv.org/abs/2306.03933) ??? example "Transformer Models" @@ -1412,6 +1416,7 @@ const expandElements = shouldExpand => { * [Nanosecond anomaly detection with decision trees for high energy physics and real-time application to exotic Higgs decays](https://arxiv.org/abs/2304.03836) * [The Mass-ive Issue: Anomaly Detection in Jet Physics](https://arxiv.org/abs/2303.14134) * [CURTAINs Flows For Flows: Constructing Unobserved Regions with Maximum Likelihood Estimation](https://arxiv.org/abs/2305.04646) + * [High-dimensional and Permutation Invariant Anomaly Detection](https://arxiv.org/abs/2306.03933) ## Simulation-based (`likelihood-free') Inference diff --git a/docs/recent.md b/docs/recent.md index 4f207b1..0c92433 100644 --- a/docs/recent.md +++ b/docs/recent.md @@ -11,12 +11,15 @@ This is an automatically compiled list of papers which have been added to the li ## July 2023 * [Decorrelation using Optimal Transport](https://arxiv.org/abs/2307.05187) +* [Toward a generative modeling analysis of CLAS exclusive $2\pi$ photoproduction](https://arxiv.org/abs/2307.04450) +* [Quark/Gluon Discrimination and Top Tagging with Dual Attention Transformer](https://arxiv.org/abs/2307.04723) * [$\nu^2$-Flows: Fast and improved neutrino reconstruction in multi-neutrino final states with conditional normalizing flows](https://arxiv.org/abs/2307.02405) ## June 2023 * [Neutron-Gamma Pulse Shape Discrimination for Organic Scintillation Detector using 2D CNN based Image Classification](https://arxiv.org/abs/2306.09356) * [Deep Learning-Based Spatiotemporal Multi-Event Reconstruction for Delay Line Detectors](https://arxiv.org/abs/2306.09359) * [Amplitude-assisted tagging of longitudinally polarised bosons using wide neural networks](https://arxiv.org/abs/2306.07726) +* [High-dimensional and Permutation Invariant Anomaly Detection](https://arxiv.org/abs/2306.03933) * [Combining lattice QCD and phenomenological inputs on generalised parton distributions at moderate skewness](https://arxiv.org/abs/2306.01647) ## May 2023 From de2167f84c441212787e8c9ad5b1ee007cba668f Mon Sep 17 00:00:00 2001 From: Johnny Raine Date: Thu, 13 Jul 2023 10:35:49 +0200 Subject: [PATCH 5/5] Update recent --- docs/recent.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/recent.md b/docs/recent.md index dde11a0..64e7ee0 100644 --- a/docs/recent.md +++ b/docs/recent.md @@ -14,8 +14,8 @@ This is an automatically compiled list of papers which have been added to the li * [Precise Image Generation on Current Noisy Quantum Computing Devices](https://arxiv.org/abs/2307.05253) * [Decorrelation using Optimal Transport](https://arxiv.org/abs/2307.05187) * [Toward a generative modeling analysis of CLAS exclusive $2\pi$ photoproduction](https://arxiv.org/abs/2307.04450) -* [Quark/Gluon Discrimination and Top Tagging with Dual Attention Transformer](https://arxiv.org/abs/2307.04723) * [Comparison of Point Cloud and Image-based Models for Calorimeter Fast Simulation](https://arxiv.org/abs/2307.04780) +* [Quark/Gluon Discrimination and Top Tagging with Dual Attention Transformer](https://arxiv.org/abs/2307.04723) * [$\nu^2$-Flows: Fast and improved neutrino reconstruction in multi-neutrino final states with conditional normalizing flows](https://arxiv.org/abs/2307.02405) ## June 2023