diff --git a/HEPML.bib b/HEPML.bib index c8aad83..d13101c 100644 --- a/HEPML.bib +++ b/HEPML.bib @@ -1,5 +1,48 @@ # HEPML Papers +% Jun. 19, 2023 +@article{Karmakar:2023mhy, + author = "Karmakar, Annesha and Pal, Anikesh and Kumar, G. Anil and Bhavika and Anand, V. and Tyagi, Mohit", + title = "{Neutron-Gamma Pulse Shape Discrimination for Organic Scintillation Detector using 2D CNN based Image Classification}", + eprint = "2306.09356", + archivePrefix = "arXiv", + primaryClass = "physics.ins-det", + month = "6", + year = "2023" +} + +@article{Knipfer:2023zrv, + author = "Knipfer, Marco and Meier, Stefan and Heimerl, Jonas and Hommelhoff, Peter and Gleyzer, Sergei", + title = "{Deep Learning-Based Spatiotemporal Multi-Event Reconstruction for Delay Line Detectors}", + eprint = "2306.09359", + archivePrefix = "arXiv", + primaryClass = "physics.ins-det", + month = "6", + year = "2023" +} + +% Jun. 1, 2023 +@article{Herbst:2023lug, + author = "Herbst, Ryan and Coffee, Ryan and Fronk, Nathan and Kim, Kukhee and Kim, Kuktae and Ruckman, Larry and Russell, J. J.", + title = "{Implementation of a framework for deploying AI inference engines in FPGAs}", + eprint = "2305.19455", + archivePrefix = "arXiv", + primaryClass = "physics.ins-det", + month = "5", + year = "2023" +} + +% May 31, 2023 +@article{Chen:2023cim, + author = "Chen, Ziting and Wong, Kin To and Seo, Bojeong and Huang, Mingchen and Parit, Mithilesh K. and Zhen, Haoting and Li, Jensen and Jo, Gyu-Boong", + title = "{Magnetic field regression using artificial neural networks for cold atom experiments}", + eprint = "2305.18822", + archivePrefix = "arXiv", + primaryClass = "physics.ins-det", + month = "5", + year = "2023" +} + %May 30, 2023 @article{ATLAS:2023mny, author = "{ATLAS Collaboration}", @@ -12,10 +55,19 @@ @article{ATLAS:2023mny year = "2023" } +@article{Napolitano:2023jhg, + author = "Napolitano, F. and others", + title = "{Novel Machine Learning and Differentiable Programming Techniques applied to the VIP-2 Underground Experiment}", + eprint = "2305.17153", + archivePrefix = "arXiv", + primaryClass = "physics.ins-det", + month = "5", + year = "2023" +} + %May 18, 2023 @article{ATLAS:2023hbp, - author = "Aad, Georges and others", - collaboration = "ATLAS", + author = "{ATLAS Collaboration}", title = "{Search for periodic signals in the dielectron and diphoton invariant mass spectra using 139 fb$^{-1}$ of $pp$ collisions at $\sqrt{s} =$ 13 TeV with the ATLAS detector}", eprint = "2305.10894", archivePrefix = "arXiv", @@ -120,6 +172,18 @@ @article{Athanasakos:2023fhq year = "2023" } +% May 9, 2023 +@article{Buhmann:2023bwk, + author = {Buhmann, Erik and Diefenbacher, Sascha and Eren, Engin and Gaede, Frank and Kasieczka, Gregor and Korol, Anatolii and Korcari, William and Kr\"uger, Katja and McKeown, Peter}, + title = "{CaloClouds: Fast Geometry-Independent Highly-Granular Calorimeter Simulation}", + eprint = "2305.04847", + archivePrefix = "arXiv", + primaryClass = "physics.ins-det", + reportNumber = "DESY-23-061", + month = "5", + year = "2023" +} + %May 7, 2023 @article{Aguilar-Saavedra:2023pde, author = "Aguilar-Saavedra, J. A. and Arganda, E. and Joaquim, F. R. and Sand\'a Seoane, R. M. and Seabra, J. F.", @@ -185,6 +249,19 @@ @article{Guo:2023nfu year = "2023" } +@article{Kim:2023koz, + author = "Kim, C. H. and Ahn, S. and Chae, K. Y. and Hooker, J. and Rogachev, G. V.", + title = "{Restoring original signals from pile-up using deep learning}", + eprint = "2304.14496", + archivePrefix = "arXiv", + primaryClass = "physics.ins-det", + doi = "10.1016/j.nima.2023.168492", + journal = "Nucl. Instrum. Meth. A", + volume = "1055", + pages = "168492", + year = "2023" +} + %Apr. 30, 2023 @article{Basak:2023wzq, author = "Basak, Dipankar and Dey, Kalyan", @@ -208,6 +285,16 @@ @article{Nishimura:2023wdu year = "2023" } +@article{Wu:2023pzn, + author = "Wu, Huangkai and others", + title = "{Machine learning method for $^{12}$C event classification and reconstruction in the active target time-projection chamber}", + eprint = "2304.13233", + archivePrefix = "arXiv", + primaryClass = "physics.ins-det", + month = "4", + year = "2023" +} + @article{Fernando:2023obn, author = "Fernando, I. P. and Keller, D.", title = "{A Modern Global Extraction of the Sivers Function}", @@ -218,6 +305,17 @@ @article{Fernando:2023obn year = "2023" } +% Apr. 25, 2023 +@article{Ai:2023azx, + author = "Ai, Pengcheng and Xiao, Le and Deng, Zhi and Wang, Yi and Sun, Xiangming and Huang, Guangming and Wang, Dong and Li, Yulei and Ran, Xinchi", + title = "{Label-free timing analysis of modularized nuclear detectors with physics-constrained deep learning}", + eprint = "2304.11930", + archivePrefix = "arXiv", + primaryClass = "physics.ins-det", + month = "4", + year = "2023" +} + %Apr. 20, 2023 @article{Lehner:2023prf, author = "Lehner, Christoph and Wettig, Tilo", @@ -296,6 +394,17 @@ @article{Bender:2023gwr year = "2023" } +@inproceedings{Murnane:2023kfm, + author = "Murnane, Daniel and Thais, Savannah and Thete, Ameya", + title = "{Equivariant Graph Neural Networks for Charged Particle Tracking}", + booktitle = "{21th International Workshop on Advanced Computing and Analysis Techniques in Physics Research}: {AI meets Reality}", + eprint = "2304.05293", + archivePrefix = "arXiv", + primaryClass = "physics.ins-det", + month = "4", + year = "2023" +} + %Apr. 7, 2023 @article{Roche:2023int, author = "Roche, Stephen and Bayer, Quincy and Carlson, Benjamin and Ouligian, William and Serhiayenka, Pavel and Stelzer, Joerg and Hong, Tae Min", @@ -371,6 +480,17 @@ @article{MB:2023edk year = "2023" } +@article{Diefenbacher:2023prl, + author = {Diefenbacher, Sascha and Eren, Engin and Gaede, Frank and Kasieczka, Gregor and Korol, Anatolii and Kr\"uger, Katja and McKeown, Peter and Rustige, Lennart}, + title = "{New Angles on Fast Calorimeter Shower Simulation}", + eprint = "2303.18150", + archivePrefix = "arXiv", + primaryClass = "physics.ins-det", + reportNumber = "DESY-23-039", + month = "3", + year = "2023" +} + %Mar. 30, 2023 @article{Hudspith:2023loy, author = "Hudspith, R. J. and Mohler, D.", @@ -404,6 +524,26 @@ @article{Liu:2023gpt year = "2023" } +@article{Gronroos:2023qff, + author = {Gr\"onroos, Sonja and Pierini, Maurizio and Chernyavskaya, Nadezda}, + title = "{Automated visual inspection of CMS HGCAL silicon sensor surface using an ensemble of a deep convolutional autoencoder and classifier}", + eprint = "2303.15319", + archivePrefix = "arXiv", + primaryClass = "physics.ins-det", + month = "3", + year = "2023" +} + +@article{Joshi:2023btt, + author = "Joshi, Bhargav and Li, Taihui and Liang, Buyun and Rusack, Roger and Sun, Ju", + title = "{Predicting the Future of the CMS Detector: Crystal Radiation Damage and Machine Learning at the LHC}", + eprint = "2303.15291", + archivePrefix = "arXiv", + primaryClass = "physics.ins-det", + month = "3", + year = "2023" +} + %Mar. 27, 2023 @article{Zhou:2023pti, author = "Zhou, Kai and Wang, Lingxiao and Pang, Long-Gang and Shi, Shuzhe", @@ -437,6 +577,17 @@ @article{Ehrke:2023cpn year = "2023" } +% Mar.21, 2023 +@article{Kushawaha:2023dms, + author = "Kushawaha, Nilay and Furletova, Yulia and Roy, Ankhi and Romanov, Dmitry", + title = "{Separation of electrons from pions in GEM TRD using deep learning}", + eprint = "2303.10776", + archivePrefix = "arXiv", + primaryClass = "physics.ins-det", + month = "3", + year = "2023" +} + %Mar. 20, 2023 @article{Aronsson:2023rli, author = {Aronsson, Jimmy and M\"uller, David I. and Schuh, Daniel}, diff --git a/HEPML.tex b/HEPML.tex index 7dce348..0c69e94 100644 --- a/HEPML.tex +++ b/HEPML.tex @@ -63,7 +63,7 @@ \\\textit{Data that have a variable with a particular order may be represented as a sequence. Recurrent neural networks are natural tools for processing sequence data. } \item \textbf{Trees}~\cite{Louppe:2017ipp,Cheng:2017rdo,Jercic:2021bfc} \\\textit{Recursive neural networks are natural tools for processing data in a tree structure.} - \item \textbf{Graphs}~\cite{Henrion:DLPS2017,Ju:2020xty,Abdughani:2018wrw,Martinez:2018fwc,Ren:2019xhp,Moreno:2019bmu,Qasim:2019otl,Chakraborty:2019imr,DiBello:2020bas,Chakraborty:2020yfc,1797439,1801423,1808887,Iiyama:2020wap,1811770,Choma:2020cry,alonsomonsalve2020graph,guo2020boosted,Heintz:2020soy,Verma:2020gnq,Dreyer:2020brq,Qian:2021vnh,Pata:2021oez,Biscarat:2021dlj,Rossi:2021tjf,Hewes:2021heg,Thais:2021qcb,Dezoort:2021kfk,Verma:2021ceh,Hariri:2021clz,Belavin:2021bxb,Atkinson:2021nlt,Konar:2021zdg,Atkinson:2021jnj,Tsan:2021brw,Elabd:2021lgo,Pata:2022wam,Gong:2022lye,Qasim:2022rww,Ma:2022bvt,Bogatskiy:2022czk,Builtjes:2022usj,DiBello:2022iwf,Huang:2023ssr,Forestano:2023fpj,Anisha:2023xmh,Ehrke:2023cpn} + \item \textbf{Graphs}~\cite{Henrion:DLPS2017,Ju:2020xty,Abdughani:2018wrw,Martinez:2018fwc,Ren:2019xhp,Moreno:2019bmu,Qasim:2019otl,Chakraborty:2019imr,DiBello:2020bas,Chakraborty:2020yfc,1797439,1801423,1808887,Iiyama:2020wap,1811770,Choma:2020cry,alonsomonsalve2020graph,guo2020boosted,Heintz:2020soy,Verma:2020gnq,Dreyer:2020brq,Qian:2021vnh,Pata:2021oez,Biscarat:2021dlj,Rossi:2021tjf,Hewes:2021heg,Thais:2021qcb,Dezoort:2021kfk,Verma:2021ceh,Hariri:2021clz,Belavin:2021bxb,Atkinson:2021nlt,Konar:2021zdg,Atkinson:2021jnj,Tsan:2021brw,Elabd:2021lgo,Pata:2022wam,Gong:2022lye,Qasim:2022rww,Ma:2022bvt,Bogatskiy:2022czk,Builtjes:2022usj,DiBello:2022iwf,Huang:2023ssr,Forestano:2023fpj,Anisha:2023xmh,Ehrke:2023cpn,Murnane:2023kfm} \\\textit{A graph is a collection of nodes and edges. Graph neural networks are natural tools for processing data in a tree structure.} \item \textbf{Sets (point clouds)}~\cite{Komiske:2018cqr,Qu:2019gqs,Mikuni:2020wpr,Shlomi:2020ufi,Dolan:2020qkr,Fenton:2020woz,Lee:2020qil,collado2021learning,Mikuni:2021pou,Shmakov:2021qdz,Shimmin:2021pkm,ATL-PHYS-PUB-2020-014,Qu:2022mxj,Onyisi:2022hdh,Athanasakos:2023fhq} \\\textit{A point cloud is a (potentially variable-size) set of points in space. Sets are distinguished from sequences in that there is no particular order (i.e. permutation invariance). Sets can also be viewed as graphs without edges and so graph methods that can parse variable-length inputs may also be appropriate for set learning, although there are other methods as well.} @@ -88,17 +88,17 @@ \\\textit{This category is for studies related to exclusive particle decays, especially with bottom and charm hadrons.} \item \textbf{BSM particles and models}~\cite{Datta:2019ndh,Baldi:2014kfa,Chakraborty:2019imr,10.1088/2632-2153/ab9023,1792136,1801423,Chang:2020rtc,Cogollo:2020afo,Grossi:2020orx,Ngairangbam:2020ksz,Englert:2020ntw,Freitas:2020ttd,Khosa:2019kxd,Freitas:2019hbk,Stakia:2021pvp,Arganda:2021azw,Jorge:2021vpo,Ren:2021prq,Barron:2021btf,Yang:2021gge,Alvestad:2021sje,Morais:2021ead,Jung:2021tym,Drees:2021oew,Cornell:2021gut,Vidal:2021oed,Beauchesne:2021qrw,Feng:2021eke,Konar:2022bgc,Badea:2022dzb,Freitas:2022cno,Goodsell:2022beo,Lv:2022pme,Ai:2022qvs,Yang:2022fhw,Alasfar:2022vqw,Barbosa:2022mmw,Chiang:2022lsn,Hall:2022bme,Faucett:2022zie,Bhattacharya:2022kje,Bardhan:2022sif,Bhattacharyya:2022umc,Palit:2023dvs,Liu:2023gpt,Pedro:2023sdp,MB:2023edk,Dong:2023nir,Guo:2023jkz,Lu:2023gjk,Flacke:2023eil,Bardhan:2023mia} \\\textit{There are many proposals to train classifiers to enhance the presence of particular new physics models.} - \item \textbf{Particle identification}~\cite{deOliveira:2018lqd,Paganini:DLPS2017,Hooberman:DLPS2017,Belayneh:2019vyx,Qasim:2019otl,Collado:2020fwm,Verma:2021ixg,Graziani:2021vai,Graczykowski:2022zae,Fanelli:2022ifa,Dimitrova:2022uum} + \item \textbf{Particle identification}~\cite{deOliveira:2018lqd,Paganini:DLPS2017,Hooberman:DLPS2017,Belayneh:2019vyx,Qasim:2019otl,Collado:2020fwm,Verma:2021ixg,Graziani:2021vai,Graczykowski:2022zae,Fanelli:2022ifa,Dimitrova:2022uum,Kushawaha:2023dms,Wu:2023pzn} \\\textit{This is a generic category for direct particle identification and categorization using various detector technologies. Direct means that the particle directly interacts with the detector (in contrast with $b$-tagging).} - \item \textbf{Neutrino Detectors}~\cite{Aurisano:2016jvx,Acciarri:2016ryt,Hertel:DLPS2017,Adams:2018bvi,Domine:2019zhm,Aiello:2020orq,Adams:2020vlj,Domine:2020tlx,DUNE:2020gpm,DeepLearnPhysics:2020hut,Koh:2020snv,Yu:2020wxu,Psihas:2020pby,alonsomonsalve2020graph,Abratenko:2020pbp,Clerbaux:2020ttg,Liu:2020pzv,Abratenko:2020ocq,Chen:2020zkj,Qian:2021vnh,abbasi2021convolutional,Drielsma:2021jdv,Rossi:2021tjf,Hewes:2021heg,Acciarri:2021oav,Belavin:2021bxb,Maksimovic:2021dmz,Gavrikov:2021ktt,Garcia-Mendez:2021vts,Carloni:2021zbc,MicroBooNE:2021nss,MicroBooNE:2021ojx,Elkarghli:2020owr,DUNE:2022fiy,Lutkus:2022eou,Chappell:2022yxd,Bachlechner:2022cvf,Sogaard:2022qgg,IceCube:2022njh,Bai:2022lbv} + \item \textbf{Neutrino Detectors}~\cite{Aurisano:2016jvx,Acciarri:2016ryt,Hertel:DLPS2017,Adams:2018bvi,Domine:2019zhm,Aiello:2020orq,Adams:2020vlj,Domine:2020tlx,DUNE:2020gpm,DeepLearnPhysics:2020hut,Koh:2020snv,Yu:2020wxu,Psihas:2020pby,alonsomonsalve2020graph,Abratenko:2020pbp,Clerbaux:2020ttg,Liu:2020pzv,Abratenko:2020ocq,Chen:2020zkj,Qian:2021vnh,abbasi2021convolutional,Drielsma:2021jdv,Rossi:2021tjf,Hewes:2021heg,Acciarri:2021oav,Belavin:2021bxb,Maksimovic:2021dmz,Gavrikov:2021ktt,Garcia-Mendez:2021vts,Carloni:2021zbc,MicroBooNE:2021nss,MicroBooNE:2021ojx,Elkarghli:2020owr,DUNE:2022fiy,Lutkus:2022eou,Chappell:2022yxd,Bachlechner:2022cvf,Sogaard:2022qgg,IceCube:2022njh,Bai:2022lbv,Biassoni:2023lih} \\\textit{Neutrino detectors are very large in order to have a sizable rate of neutrino detection. The entire neutrino interaction can be characterized to distinguish different neutrino flavors.} \item \textbf{Direct Dark Matter Detectors}~\cite{Ilyasov_2020,Akerib:2020aws,Khosa:2019qgp,Golovatiuk:2021lqn,McDonald:2021hus,Coarasa:2021fpv,Herrero-Garcia:2021goa,Liang:2021nsz,Li:2022tvg,Biassoni:2023lih} \\\textit{Dark matter detectors are similar to neutrino detectors, but aim to achieve `zero' background.} \item \textbf{Cosmology, Astro Particle, and Cosmic Ray physics}~\cite{Ostdiek:2020cqz,Brehmer:2019jyt,Tsai:2020vcx,Verma:2020gnq,Aab:2021rcn,Balazs:2021uhg,gonzalez2021tackling,Conceicao:2021xgn,huang2021convolutionalneuralnetwork,Droz:2021wnh,Han:2021kjx,Arjona:2021hmg,1853992,Shih:2021kbt,Ikeda:2021sxm,Aizpuru:2021vhd,Vago:2021grx,List:2021aer,Kahlhoefer:2021sha,Sabiu:2021aea,Mishra-Sharma:2021nhh,Mishra-Sharma:2021oxe,Bister:2021arb,Chen:2019avc,De:2022sde,Montel:2022fhv,Glauch:2022xth,Sun:2022djj,Abel:2022nje,Zhang:2022djp,Nguyen:2022ldb,Kim:2023wuk} \\\textit{Machine learning is often used in astrophysics and cosmology in different ways than terrestrial particle physics experiments due to a general divide between Bayesian and Frequentist statistics. However, there are many similar tasks and a growing number of proposals designed for one domain that apply to the other. See also https://github.com/georgestein/ml-in-cosmology.} - \item \textbf{Tracking}~\cite{Farrell:DLPS2017,Farrell:2018cjr,Amrouche:2019wmx,Ju:2020xty,Akar:2020jti,Shlomi:2020ufi,Choma:2020cry,Siviero:2020tim,Fox:2020hfm,Amrouche:2021tlm,goto2021development,Biscarat:2021dlj,Akar:2021gns,Thais:2021qcb,Ju:2021ayy,Dezoort:2021kfk,Edmonds:2021lzd,Lavrik:2021zgt,Huth:2021zcm,Goncharov:2021wvd,Wang:2022oer,Alonso-Monsalve:2022zlm,Bakina:2022mhs,Akram:2022zmj,Sun:2022bxx,Abidi:2022ogh,Bae:2023eec} + \item \textbf{Tracking}~\cite{Farrell:DLPS2017,Farrell:2018cjr,Amrouche:2019wmx,Ju:2020xty,Akar:2020jti,Shlomi:2020ufi,Choma:2020cry,Siviero:2020tim,Fox:2020hfm,Amrouche:2021tlm,goto2021development,Biscarat:2021dlj,Akar:2021gns,Thais:2021qcb,Ju:2021ayy,Dezoort:2021kfk,Edmonds:2021lzd,Lavrik:2021zgt,Huth:2021zcm,Goncharov:2021wvd,Wang:2022oer,Alonso-Monsalve:2022zlm,Bakina:2022mhs,Akram:2022zmj,Sun:2022bxx,Abidi:2022ogh,Bae:2023eec,Knipfer:2023zrv} \\\textit{Charged particle tracking is a challenging pattern recognition task. This category is for various classification tasks associated with tracking, such as seed selection.} - \item \textbf{Heavy Ions / Nuclear Physics}~\cite{Pang:2016vdc,Chien:2018dfn,Du:2020pmp,Du:2019civ,Mallick:2021wop,Nagu:2021zho,Zhao:2021yjo,Sombillo:2021ifs,Zhou:2021bvw,Apolinario:2021olp,Brown:2021upr,Du:2021pqa,Kuttan:2021npg,Huang:2021iux,Shokr:2021ouh,He:2021uko,Habashy:2021orz,Zepeda:2021tzp,Mishra:2021eqb,Ng:2021ibr,Habashy:2021qku,Biro:2021zgm,Lai:2021ckt,Du:2021qwv,Du:2021brx,Xiang:2021ssj,Soma:2022qnv,Rahman:2022tfq,Boglione:2022gpv,Liyanage:2022byj,Liu:2022hzd,Fanelli:2022kro,Chen:2022shj,Saha:2022skj,Lee:2022kdn,Biro:2022zhl,Mallick:2022alr,Steffanic:2023cyx,Mallick:2023vgi,Hirvonen:2023lqy,Biro:2023kyx,He:2023zin,Zhou:2023pti,CrispimRomao:2023ssj,Basak:2023wzq,Shi:2023xfz,Soleymaninia:2023dds,Lin:2023bmy,Wang:2023muv} + \item \textbf{Heavy Ions / Nuclear Physics}~\cite{Pang:2016vdc,Chien:2018dfn,Du:2020pmp,Du:2019civ,Mallick:2021wop,Nagu:2021zho,Zhao:2021yjo,Sombillo:2021ifs,Zhou:2021bvw,Apolinario:2021olp,Brown:2021upr,Du:2021pqa,Kuttan:2021npg,Huang:2021iux,Shokr:2021ouh,He:2021uko,Habashy:2021orz,Zepeda:2021tzp,Mishra:2021eqb,Ng:2021ibr,Habashy:2021qku,Biro:2021zgm,Lai:2021ckt,Du:2021qwv,Du:2021brx,Xiang:2021ssj,Soma:2022qnv,Rahman:2022tfq,Boglione:2022gpv,Liyanage:2022byj,Liu:2022hzd,Fanelli:2022kro,Chen:2022shj,Saha:2022skj,Lee:2022kdn,Biro:2022zhl,Mallick:2022alr,Steffanic:2023cyx,Mallick:2023vgi,Hirvonen:2023lqy,Biro:2023kyx,He:2023zin,Zhou:2023pti,CrispimRomao:2023ssj,Basak:2023wzq,Shi:2023xfz,Soleymaninia:2023dds,Lin:2023bmy,Wang:2023muv,Ai:2023azx,Karmakar:2023mhy} \\\textit{Many tools in high energy nuclear physics are similar to high energy particle physics. The physics target of these studies are to understand collective properties of the strong force.} \end{itemize} \item \textbf{Learning strategies} @@ -116,7 +116,7 @@ \\\textit{Quantum computers are based on unitary operations applied to quantum states. These states live in a vast Hilbert space which may have a usefully large information capacity for machine learning.} \item \textbf{Feature ranking}~\cite{Faucett:2020vbu,Grojean:2020ech,Das:2022cjl} \\\textit{It is often useful to take a set of input features and rank them based on their usefulness.} - \item \textbf{Attention}~\cite{goto2021development,Finke:2023veq,Qiu:2023ihi} + \item \textbf{Attention}~\cite{goto2021development,Finke:2023veq,Qiu:2023ihi,Biassoni:2023lih} \\\textit{This is an ML tool for helping the network to focus on particularly useful features.} \item \textbf{Regularization}~\cite{Araz:2021wqm,Sforza:2013hua} \\\textit{This is a term referring to any learning strategy that improves the robustness of a classifier to statistical fluctuations in the data and in the model initialization.} @@ -128,7 +128,7 @@ \begin{itemize} \item \textbf{Software}~\cite{Strong:2020mge,Gligorov:2012qt,Weitekamp:DLPS2017,Nguyen:2018ugw,Bourgeois:2018nvk,1792136,Balazs:2021uhg,Rehm:2021zow,Mahesh:2021iph,Amrouche:2021tio,Pol:2021iqw,Goncharov:2021wvd,Saito:2021vpp,Jiang:2022zho,Guo:2023nfu,Tyson:2023zkx} \\\textit{Strategies for efficient inference for a given hardware architecture.} - \item \textbf{Hardware/firmware}~\cite{Duarte:2018ite,DiGuglielmo:2020eqx,Summers:2020xiy,1808088,Iiyama:2020wap,Mohan:2020vvi,Carrazza:2020qwu,Rankin:2020usv,Heintz:2020soy,Rossi:2020sbh,Aarrestad:2021zos,Hawks:2021ruw,Teixeira:2021yhl,Hong:2021snb,DiGuglielmo:2021ide,Migliorini:2021fuj,Govorkova:2021utb,Elabd:2021lgo,Jwa:2019zlh,Butter:2022lkf,Sun:2022bxx,Khoda:2022dwz,Carlson:2022vac,Abidi:2022ogh,MeyerzuTheenhausen:2022ffb} + \item \textbf{Hardware/firmware}~\cite{Duarte:2018ite,DiGuglielmo:2020eqx,Summers:2020xiy,1808088,Iiyama:2020wap,Mohan:2020vvi,Carrazza:2020qwu,Rankin:2020usv,Heintz:2020soy,Rossi:2020sbh,Aarrestad:2021zos,Hawks:2021ruw,Teixeira:2021yhl,Hong:2021snb,DiGuglielmo:2021ide,Migliorini:2021fuj,Govorkova:2021utb,Elabd:2021lgo,Jwa:2019zlh,Butter:2022lkf,Sun:2022bxx,Khoda:2022dwz,Carlson:2022vac,Abidi:2022ogh,MeyerzuTheenhausen:2022ffb,Herbst:2023lug} \\\textit{Various accelerators have been studied for fast inference that is very important for latency-limited applications like the trigger at collider experiments.} \item \textbf{Deployment}~\cite{Kuznetsov:2020mcj,SunnebornGudnadottir:2021nhk} \\\textit{This category is for the deployment of machine learning interfaces, such as in the cloud.} @@ -137,7 +137,7 @@ \item \textbf{Regression} \\\textit{In contrast to classification, the goal of regression is to learn a function $f:\mathbb{R}^n\rightarrow\mathbb{R}^m$ for input features $x\in\mathbb{R}^n$ and target features $y\in\mathbb{R}^m$. The learning setup is very similar to classification, where the network architectures and loss functions may need to be tweaked. For example, the mean squared error is the most common loss function for regression, but the network output is no longer restricted to be between $0$ and $1$.} \begin{itemize} - \item \textbf{Pileup}~\cite{Komiske:2017ubm,ATL-PHYS-PUB-2019-028,Martinez:2018fwc,Carrazza:2019efs,Maier:2021ymx,Li:2022omf,CRESST:2022qor} + \item \textbf{Pileup}~\cite{Komiske:2017ubm,ATL-PHYS-PUB-2019-028,Martinez:2018fwc,Carrazza:2019efs,Maier:2021ymx,Li:2022omf,CRESST:2022qor,Kim:2023koz} \\\textit{A given bunch crossing at the LHC will have many nearly simultaneous proton-proton collisions. Only one of those is usually interesting and the rest introduce a source of noise (pileup) that must be mitigating for precise final state reconstruction.} \item \textbf{Calibration}~\cite{Cheong:2019upg,ATL-PHYS-PUB-2020-001,ATL-PHYS-PUB-2018-013,Hooberman:DLPS2017,Kasieczka:2020vlh,Sirunyan:2019wwa,Baldi:2020hjm,Du:2020pmp,Kieseler:2021jxc,Pollard:2021fqv,Akchurin:2021afn,Kieseler:2020wcq,Akchurin:2021ahx,Diefenthaler:2021rdj,Polson:2021kvr,Micallef:2021src,Arratia:2021tsq,Kronheim:2021hdb,Renteria-Estrada:2021zrd,Pata:2022wam,Chadeeva:2022kay,Dorigo:2022tfi,Alves:2022gnw,Qiu:2022xvr,Akchurin:2022apq,Gambhir:2022gua,Gambhir:2022dut,Valsecchi:2022rla,Leigh:2022lpn,Darulis:2022brn,Ge:2022xrv,Aad:2023ula,Lee:2023jew} \\\textit{The goal of calibration is to remove the bias (and reduce variance if possible) from detector (or related) effects.} @@ -154,24 +154,24 @@ \item \textbf{Function Approximation}~\cite{1853982,Haddadin:2021mmo,Chahrour:2021eiv,Wang:2021jou,Kitouni:2021fkh,Lei:2022dvn,Wang:2023nab} \\\textit{Approximating functions that obey certain (physical) constraints.} \item \textbf{Symbolic Regression}~\cite{Butter:2021rvz,Zhang:2022uqk,Lu:2022joy,Wang:2023poi} - \\\textit{Regression where the result is a (relatively) simple formula.} - \item \textbf{Monitoring}~\cite{Mukund:2023oyy,Matha:2023tmf,CMSMuon:2023czf} + \\\textit{Regression where the result is a (relatively) simple formula.} + \item \textbf{Monitoring}~\cite{Mukund:2023oyy,Matha:2023tmf,CMSMuon:2023czf,Joshi:2023btt,Chen:2023cim} \\\textit{Regression models can be used to monitor experimental setups and sensors.} \end{itemize} -\item \textbf{Equivariant networks}~\cite{Kanwar:2003.06413,Dolan:2020qkr,Favoni:2020reg,Bulusu:2021njs,Gong:2022lye,Shi:2022yqw,Bogatskiy:2022hub,Favoni:2022mcg,Bogatskiy:2022czk,Lehner:2023bba,Forestano:2023fpj,Aronsson:2023rli,Buhmann:2023pmh,Forestano:2023qcy,Lehner:2023prf} +\item \textbf{Equivariant networks}~\cite{Kanwar:2003.06413,Dolan:2020qkr,Favoni:2020reg,Bulusu:2021njs,Gong:2022lye,Shi:2022yqw,Bogatskiy:2022hub,Favoni:2022mcg,Bogatskiy:2022czk,Lehner:2023bba,Forestano:2023fpj,Aronsson:2023rli,Buhmann:2023pmh,Forestano:2023qcy,Lehner:2023prf,Murnane:2023kfm} \\\textit{It is often the case that implementing equivariance or learning symmetries with a model better describes the physics and improves performance} \item \textbf{Decorrelation methods}~\cite{Louppe:2016ylz,Dolen:2016kst,Moult:2017okx,Stevens:2013dya,Shimmin:2017mfk,Bradshaw:2019ipy,ATL-PHYS-PUB-2018-014,DiscoFever,Xia:2018kgd,Englert:2018cfo,Wunsch:2019qbo,Rogozhnikov:2014zea,10.1088/2632-2153/ab9023,clavijo2020adversarial,Kasieczka:2020pil,Kitouni:2020xgb,Ghosh:2021hrh,Dolan:2021pml,Mikuni:2021nwn,Klein:2022hdv,Das:2022cjl} \\\textit{It it sometimes the case that a classification or regression model needs to be independent of a set of features (usually a mass-like variable) in order to estimate the background or otherwise reduce the uncertainty. These techniques are related to what the machine learning literature calls model `fairness'.} \item \textbf{Generative models / density estimation} \\\textit{The goal of generative modeling is to learn (explicitly or implicitly) a probability density $p(x)$ for the features $x\in\mathbb{R}^n$. This task is usually unsupervised (no labels).} \begin{itemize} - \item \textbf{GANs}~\cite{deOliveira:2017pjk,Paganini:2017hrr,Paganini:2017dwg,Alonso-Monsalve:2018aqs,Butter:2019eyo,Martinez:2019jlu,Bellagente:2019uyp,Vallecorsa:2019ked,SHiP:2019gcl,Carrazza:2019cnt,Butter:2019cae,Lin:2019htn,DiSipio:2019imz,Hashemi:2019fkn,Chekalina:2018hxi,ATL-SOFT-PUB-2018-001,Zhou:2018ill,Carminati:2018khv,Vallecorsa:2018zco,Datta:2018mwd,Musella:2018rdi,Erdmann:2018kuh,Deja:2019vcv,Derkach:2019qfk,Erbin:2018csv,Erdmann:2018jxd,Urban:2018tqv,Oliveira:DLPS2017,deOliveira:2017rwa,Farrell:2019fsm,Hooberman:DLPS2017,Belayneh:2019vyx,Wang:2020tap,buhmann2020getting,Alanazi:2020jod,2009.03796,2008.06545,Kansal:2020svm,Maevskiy:2020ank,Lai:2020byl,Choi:2021sku,Rehm:2021zow,Rehm:2021zoz,Carrazza:2021hny,Rehm:2021qwm,Lebese:2021foi,Winterhalder:2021ave,Kansal:2021cqp,NEURIPS2020_a878dbeb,Khattak:2021ndw,Mu:2021nno,Li:2021cbp,Bravo-Prieto:2021ehz,Anderlini:2021qpm,Chisholm:2021pdn,Desai:2021wbb,Buhmann:2021caf,Bieringer:2022cbs,Ghosh:2022zdz,Anderlini:2022ckd,Ratnikov:2022hge,Rogachev:2022hjg,ATLAS:2022jhk,Anderlini:2022hgm,Buhmann:2023pmh,Yue:2023uva,Hashemi:2023ruu} + \item \textbf{GANs}~\cite{deOliveira:2017pjk,Paganini:2017hrr,Paganini:2017dwg,Alonso-Monsalve:2018aqs,Butter:2019eyo,Martinez:2019jlu,Bellagente:2019uyp,Vallecorsa:2019ked,SHiP:2019gcl,Carrazza:2019cnt,Butter:2019cae,Lin:2019htn,DiSipio:2019imz,Hashemi:2019fkn,Chekalina:2018hxi,ATL-SOFT-PUB-2018-001,Zhou:2018ill,Carminati:2018khv,Vallecorsa:2018zco,Datta:2018mwd,Musella:2018rdi,Erdmann:2018kuh,Deja:2019vcv,Derkach:2019qfk,Erbin:2018csv,Erdmann:2018jxd,Urban:2018tqv,Oliveira:DLPS2017,deOliveira:2017rwa,Farrell:2019fsm,Hooberman:DLPS2017,Belayneh:2019vyx,Wang:2020tap,buhmann2020getting,Alanazi:2020jod,2009.03796,2008.06545,Kansal:2020svm,Maevskiy:2020ank,Lai:2020byl,Choi:2021sku,Rehm:2021zow,Rehm:2021zoz,Carrazza:2021hny,Rehm:2021qwm,Lebese:2021foi,Winterhalder:2021ave,Kansal:2021cqp,NEURIPS2020_a878dbeb,Khattak:2021ndw,Mu:2021nno,Li:2021cbp,Bravo-Prieto:2021ehz,Anderlini:2021qpm,Chisholm:2021pdn,Desai:2021wbb,Buhmann:2021caf,Bieringer:2022cbs,Ghosh:2022zdz,Anderlini:2022ckd,Ratnikov:2022hge,Rogachev:2022hjg,ATLAS:2022jhk,Anderlini:2022hgm,Buhmann:2023pmh,Yue:2023uva,Hashemi:2023ruu,Diefenbacher:2023prl} \\\textit{Generative Adversarial Networks~\cite{Goodfellow:2014upx} learn $p(x)$ implicitly through the minimax optimization of two networks: one that maps noise to structure $G(z)$ and one a classifier (called the discriminator) that learns to distinguish examples generated from $G(z)$ and those generated from the target process. When the discriminator is maximally `confused', then the generator is effectively mimicking $p(x)$.} \item \textbf{Autoencoders}~\cite{Monk:2018zsb,ATL-SOFT-PUB-2018-001,Cheng:2020dal,1816035,Howard:2021pos,Buhmann:2021lxj,Bortolato:2021zic,deja2020endtoend,Hariri:2021clz,Fanelli:2019qaq,Collins:2021pld,Orzari:2021suh,Jawahar:2021vyu,Tsan:2021brw,Buhmann:2021caf,Touranakou:2022qrp,Ilten:2022jfm,Collins:2022qpr,AbhishekAbhishek:2022wby,Cresswell:2022tof,Roche:2023int} \\\textit{An autoencoder consists of two functions: one that maps $x$ into a latent space $z$ (encoder) and a second one that maps the latent space back into the original space (decoder). The encoder and decoder are simultaneously trained so that their composition is nearly the identity. When the latent space has a well-defined probability density (as in variational autoencoders), then one can sample from the autoencoder by applying the detector to a randomly chosen element of the latent space.} \item \textbf{Normalizing flows}~\cite{Albergo:2019eim,1800956,Kanwar:2003.06413,Brehmer:2020vwc,Bothmann:2020ywa,Gao:2020zvv,Gao:2020vdv,Nachman:2020lpy,Choi:2020bnf,Lu:2020npg,Bieringer:2020tnw,Hollingsworth:2021sii,Winterhalder:2021ave,Krause:2021ilc,Hackett:2021idh,Menary:2021tjg,Hallin:2021wme,NEURIPS2020_a878dbeb,Vandegar:2020yvw,Jawahar:2021vyu,Bister:2021arb,Krause:2021wez,Butter:2021csz,Winterhalder:2021ngy,Butter:2022lkf,Verheyen:2022tov,Leigh:2022lpn,Chen:2022ytr,Albandea:2022fky,Krause:2022jna,Cresswell:2022tof,Dolan:2022ikg,Backes:2022vmn,Heimel:2022wyj,Albandea:2023wgd,Rousselot:2023pcj,Diefenbacher:2023vsw,Nicoli:2023qsl,R:2023dcr,Nachman:2023clf} \\\textit{Normalizing flows~\cite{pmlr-v37-rezende15} learn $p(x)$ explicitly by starting with a simple probability density and then applying a series of bijective transformations with tractable Jacobians.} - \item \textbf{Diffusion Models}~\cite{Mikuni:2022xry,Mikuni:2023dvk,Shmakov:2023kjj,Butter:2023fov} + \item \textbf{Diffusion Models}~\cite{Mikuni:2022xry,Mikuni:2023dvk,Shmakov:2023kjj,Buhmann:2023bwk,Butter:2023fov} \\\textit{These approaches learn the gradient of the density instead of the density directly.} \item \textbf{Transformer Models}~\cite{Finke:2023veq,Butter:2023fov} \\\textit{These approaches learn the density or perform generative modeling using transformer-based networks.} @@ -199,7 +199,7 @@ \\\textit{Morphing simulations to look like data is a form of domain adaptation.} \item \textbf{BSM}~\cite{Andreassen:2020nkr,Hollingsworth:2020kjg,Brehmer:2018kdj,Brehmer:2018eca,Brehmer:2018hga,Brehmer:2019xox,Romao:2020ojy,deSouza:2022uhk,GomezAmbrosio:2022mpm,Castro:2022zpq,Anisha:2023xmh,Dennis:2023kfe} \\\textit{This category is for parameter estimation when the parameter is the signal strength of new physics.} - \item \textbf{Differentiable Simulation}~\cite{Heinrich:2022xfa,Nachman:2022jbj,Lei:2022dvn} + \item \textbf{Differentiable Simulation}~\cite{Heinrich:2022xfa,Nachman:2022jbj,Lei:2022dvn,Napolitano:2023jhg} \\\textit{Coding up a simulation using a differentiable programming language like TensorFlow, PyTorch, or JAX.} \end{itemize} \item \textbf{Uncertainty Quantification} @@ -217,7 +217,7 @@ \item \textbf{Experimental results} \\\textit{This section is incomplete as there are many results that directly and indirectly (e.g. via flavor tagging) use modern machine learning techniques. We will try to highlight experimental results that use deep learning in a critical way for the final analysis sensitivity.} \begin{itemize} - \item Performance studies~\cite{CMS:2022prd,Yang:2022dwu,NEOS-II:2022mov,Jiang:2022zho} + \item Performance studies~\cite{CMS:2022prd,Yang:2022dwu,NEOS-II:2022mov,Jiang:2022zho,Gronroos:2023qff} \item Searches and measurements were ML reconstruction is a core component~\cite{MicroBooNE:2021nxr,MicroBooNE:2021jwr,ATLAS:2023hbp}. \item Final analysis discriminate for searches~\cite{Aad:2019yxi,Aad:2020hzm,collaboration2020dijet,Sirunyan:2020hwz}. \item Measurements using deep learning directly (not through object reconstruction)~\cite{H1:2021wkz} diff --git a/README.md b/README.md index 481066d..047eda0 100644 --- a/README.md +++ b/README.md @@ -7,7 +7,7 @@ The purpose of this note is to collect references for modern machine learning as applied to particle physics. A minimal number of categories is chosen in order to be as useful as possible. Note that papers may be referenced in more than one category. The fact that a paper is listed in this document does not endorse or validate its content - that is for the community (and for peer-review) to decide. Furthermore, the classification here is a best attempt and may have flaws - please let us know if (a) we have missed a paper you think should be included, (b) a paper has been misclassified, or (c) a citation for a paper is not correct or if the journal information is now available. In order to be as useful as possible, this document will continue to evolve so please check back before you write your next paper. If you find this review helpful, please consider citing it using ```\cite{hepmllivingreview}``` in `HEPML.bib`. -This review was built with the help of the HEP-ML community, the [INSPIRE REST API](https://github.com/inspirehep/rest-api-doc), and the moderators Benjamin Nachman, Matthew Feickert, Etienne Dreyer, Waleed Esmail, Michele Faucci Giannelli, Claudius Krause, Johnny Raine, Dalila Salamani, and Ramon Winterhalder +This review was built with the help of the HEP-ML community, the [INSPIRE REST API](https://github.com/inspirehep/rest-api-doc), and the moderators Benjamin Nachman, Matthew Feickert, Etienne Dreyer, Waleed Esmail, Michele Faucci Giannelli, Claudius Krause, Johnny Raine, Dalila Salamani, and Ramon Winterhalder. ## Reviews ### Modern reviews @@ -176,6 +176,7 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Deep Learning Symmetries and Their Lie Groups, Algebras, and Subalgebras from First Principles](https://arxiv.org/abs/2301.05638) * [On the BSM reach of four top production at the LHC](https://arxiv.org/abs/2302.08281) * [Topological Reconstruction of Particle Physics Processes using Graph Neural Networks](https://arxiv.org/abs/2303.13937) +* [Equivariant Graph Neural Networks for Charged Particle Tracking](https://arxiv.org/abs/2304.05293) #### Sets (point clouds) @@ -367,6 +368,8 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Using Machine Learning for Particle Identification in ALICE](https://arxiv.org/abs/2204.06900) * [Artificial Intelligence for Imaging Cherenkov Detectors at the EIC](https://arxiv.org/abs/2204.08645) * [Using Artificial Intelligence in the Reconstruction of Signals from the PADME Electromagnetic Calorimeter](https://arxiv.org/abs/2210.00811) [[DOI](https://doi.org/10.3390/instruments6040046)] +* [Separation of electrons from pions in GEM TRD using deep learning](https://arxiv.org/abs/2303.10776) +* [Machine learning method for $^{12}$C event classification and reconstruction in the active target time-projection chamber](https://arxiv.org/abs/2304.13233) #### Neutrino Detectors @@ -410,6 +413,7 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [GraphNeT: Graph neural networks for neutrino telescope event reconstruction](https://arxiv.org/abs/2210.12194) * [Graph Neural Networks for low-energy event classification \& reconstruction in IceCube](https://arxiv.org/abs/2209.03042) [[DOI](https://doi.org/10.1088/1748-0221/17/11/P11003)] * [Probing the mixing parameter |V\ensuremath{\tau}N|2 for heavy neutrinos](https://arxiv.org/abs/2211.00309) [[DOI](https://doi.org/10.1103/PhysRevD.107.095008)] +* [Assessment of few-hits machine learning classification algorithms for low energy physics in liquid argon detectors](https://arxiv.org/abs/2305.09744) #### Direct Dark Matter Detectors @@ -488,6 +492,7 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Fast muon tracking with machine learning implemented in FPGA](https://arxiv.org/abs/2202.04976) [[DOI](https://doi.org/10.1016/j.nima.2022.167546)] * [Charged Particle Tracking with Machine Learning on FPGAs](https://arxiv.org/abs/2212.02348) * [Reconstruction of fast neutron direction in segmented organic detectors using deep learning](https://arxiv.org/abs/2301.10796) [[DOI](https://doi.org/10.1016/j.nima.2023.168024)] +* [Deep Learning-Based Spatiotemporal Multi-Event Reconstruction for Delay Line Detectors](https://arxiv.org/abs/2306.09359) #### Heavy Ions / Nuclear Physics @@ -540,6 +545,8 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Nuclear corrections on the charged hadron fragmentation functions in a Neural Network global QCD analysis](https://arxiv.org/abs/2305.02664) * [Demonstration of Sub-micron UCN Position Resolution using Room-temperature CMOS Sensor](https://arxiv.org/abs/2305.09562) * [A machine learning study to identify collective flow in small and large colliding systems](https://arxiv.org/abs/2305.09937) +* [Label-free timing analysis of modularized nuclear detectors with physics-constrained deep learning](https://arxiv.org/abs/2304.11930) +* [Neutron-Gamma Pulse Shape Discrimination for Organic Scintillation Detector using 2D CNN based Image Classification](https://arxiv.org/abs/2306.09356) ### Learning strategies @@ -632,6 +639,7 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Development of a Vertex Finding Algorithm using Recurrent Neural Network](https://arxiv.org/abs/2101.11906) * [Learning the language of QCD jets with transformers](https://arxiv.org/abs/2303.07364) * [Parton Labeling without Matching: Unveiling Emergent Labelling Capabilities in Regression Models](https://arxiv.org/abs/2304.09208) +* [Assessment of few-hits machine learning classification algorithms for low energy physics in liquid argon detectors](https://arxiv.org/abs/2305.09744) #### Regularization @@ -697,6 +705,7 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Nanosecond machine learning regression with deep boosted decision trees in FPGA for high energy physics](https://arxiv.org/abs/2207.05602) * [Charged Particle Tracking with Machine Learning on FPGAs](https://arxiv.org/abs/2212.02348) * [Neural-network-based level-1 trigger upgrade for the SuperCDMS experiment at SNOLAB](https://arxiv.org/abs/2212.07864) [[DOI](https://doi.org/10.1088/1748-0221/18/06/P06012)] +* [Implementation of a framework for deploying AI inference engines in FPGAs](https://arxiv.org/abs/2305.19455) #### Deployment @@ -713,6 +722,7 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Pile-Up Mitigation using Attention](https://arxiv.org/abs/2107.02779) * [Semi-supervised Graph Neural Networks for Pileup Noise Removal](https://arxiv.org/abs/2203.15823) * [Towards an automated data cleaning with deep learning in CRESST](https://arxiv.org/abs/2211.00564) [[DOI](https://doi.org/10.1140/epjp/s13360-023-03674-2)] +* [Restoring original signals from pile-up using deep learning](https://arxiv.org/abs/2304.14496) [[DOI](https://doi.org/10.1016/j.nima.2023.168492)] ### Calibration @@ -871,6 +881,8 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [First demonstration of neural sensing and control in a kilometer-scale gravitational wave observatory](https://arxiv.org/abs/2301.06221) * [High-availability displacement sensing with multi-channel self mixing interferometry](https://arxiv.org/abs/2302.00065) [[DOI](https://doi.org/10.1364/OE.485955)] * [Machine Learning based tool for CMS RPC currents quality monitoring](https://arxiv.org/abs/2302.02764) [[DOI](https://doi.org/10.1016/j.nima.2023.168449)] +* [Predicting the Future of the CMS Detector: Crystal Radiation Damage and Machine Learning at the LHC](https://arxiv.org/abs/2303.15291) +* [Magnetic field regression using artificial neural networks for cold atom experiments](https://arxiv.org/abs/2305.18822) ## Equivariant networks. @@ -889,6 +901,7 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [EPiC-GAN: Equivariant Point Cloud Generation for Particle Jets](https://arxiv.org/abs/2301.08128) * [Discovering Sparse Representations of Lie Groups with Machine Learning](https://arxiv.org/abs/2302.05383) * [Gauge-equivariant pooling layers for preconditioners in lattice QCD](https://arxiv.org/abs/2304.10438) +* [Equivariant Graph Neural Networks for Charged Particle Tracking](https://arxiv.org/abs/2304.05293) ## Decorrelation methods. @@ -984,6 +997,7 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [EPiC-GAN: Equivariant Point Cloud Generation for Particle Jets](https://arxiv.org/abs/2301.08128) * [Ultrafast CMOS image sensors and data-enabled super-resolution for multimodal radiographic imaging and tomography](https://arxiv.org/abs/2301.11865) [[DOI](https://doi.org/10.22323/1.420.0041)] * [Ultra-High-Resolution Detector Simulation with Intra-Event Aware GAN and Self-Supervised Relational Reasoning](https://arxiv.org/abs/2303.08046) +* [New Angles on Fast Calorimeter Shower Simulation](https://arxiv.org/abs/2303.18150) ### Autoencoders @@ -1057,6 +1071,7 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Score-based Generative Models for Calorimeter Shower Simulation](https://arxiv.org/abs/2206.11898) * [Fast Point Cloud Generation with Diffusion Models in High Energy Physics](https://arxiv.org/abs/2304.01266) * [End-To-End Latent Variational Diffusion Models for Inverse Problems in High Energy Physics](https://arxiv.org/abs/2305.10399) +* [CaloClouds: Fast Geometry-Independent Highly-Granular Calorimeter Simulation](https://arxiv.org/abs/2305.04847) * [Jet Diffusion versus JetGPT -- Modern Networks for the LHC](https://arxiv.org/abs/2305.10475) ### Transformer Models @@ -1282,6 +1297,7 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [Differentiable Matrix Elements with MadJax](https://arxiv.org/abs/2203.00057) * [Morphing parton showers with event derivatives](https://arxiv.org/abs/2208.02274) * [Implicit Neural Representation as a Differentiable Surrogate for Photon Propagation in a Monolithic Neutrino Detector](https://arxiv.org/abs/2211.01505) +* [Novel Machine Learning and Differentiable Programming Techniques applied to the VIP-2 Underground Experiment](https://arxiv.org/abs/2305.17153) ## Uncertainty Quantification ### Interpretability @@ -1334,6 +1350,7 @@ This review was built with the help of the HEP-ML community, the [INSPIRE REST A * [A feasibility study of multi-electrode high-purity germanium detector for $^{76}$Ge neutrinoless double beta decay searching](https://arxiv.org/abs/2211.06180) [[DOI](https://doi.org/10.1088/1748-0221/18/05/P05025)] * [Pulse shape discrimination using a convolutional neural network for organic liquid scintillator signals](https://arxiv.org/abs/2211.07892) [[DOI](https://doi.org/10.1088/1748-0221/18/03/P03003)] * [Deep machine learning for the PANDA software trigger](https://arxiv.org/abs/2211.15390) [[DOI](https://doi.org/10.1140/epjc/s10052-023-11494-y)] +* [Automated visual inspection of CMS HGCAL silicon sensor surface using an ensemble of a deep convolutional autoencoder and classifier](https://arxiv.org/abs/2303.15319) ### Searches and measurements were ML reconstruction is a core component diff --git a/docs/about.md b/docs/about.md index bb5dd5e..e9ca6e8 100644 --- a/docs/about.md +++ b/docs/about.md @@ -5,5 +5,5 @@ hide: The purpose of this note is to collect references for modern machine learning as applied to particle physics. A minimal number of categories is chosen in order to be as useful as possible. Note that papers may be referenced in more than one category. The fact that a paper is listed in this document does not endorse or validate its content - that is for the community (and for peer-review) to decide. Furthermore, the classification here is a best attempt and may have flaws - please let us know if (a) we have missed a paper you think should be included, (b) a paper has been misclassified, or (c) a citation for a paper is not correct or if the journal information is now available. In order to be as useful as possible, this document will continue to evolve so please check back before you write your next paper. If you find this review helpful, please consider citing it using ```\cite{hepmllivingreview}``` in `HEPML.bib`. -This review was built with the help of the HEP-ML community, the [INSPIRE REST API](https://github.com/inspirehep/rest-api-doc), and the moderators Benjamin Nachman, Matthew Feickert, Etienne Dreyer, Waleed Esmail, Michele Faucci Giannelli, Claudius Krause, Johnny Raine, Dalila Salamani, and Ramon Winterhalder +This review was built with the help of the HEP-ML community, the [INSPIRE REST API](https://github.com/inspirehep/rest-api-doc), and the moderators Benjamin Nachman, Matthew Feickert, Etienne Dreyer, Waleed Esmail, Michele Faucci Giannelli, Claudius Krause, Johnny Raine, Dalila Salamani, and Ramon Winterhalder. diff --git a/docs/index.md b/docs/index.md index 05ca86f..4daef3d 100644 --- a/docs/index.md +++ b/docs/index.md @@ -227,6 +227,7 @@ const expandElements = shouldExpand => { * [Deep Learning Symmetries and Their Lie Groups, Algebras, and Subalgebras from First Principles](https://arxiv.org/abs/2301.05638) * [On the BSM reach of four top production at the LHC](https://arxiv.org/abs/2302.08281) * [Topological Reconstruction of Particle Physics Processes using Graph Neural Networks](https://arxiv.org/abs/2303.13937) + * [Equivariant Graph Neural Networks for Charged Particle Tracking](https://arxiv.org/abs/2304.05293) #### Sets (point clouds) @@ -423,6 +424,8 @@ const expandElements = shouldExpand => { * [Using Machine Learning for Particle Identification in ALICE](https://arxiv.org/abs/2204.06900) * [Artificial Intelligence for Imaging Cherenkov Detectors at the EIC](https://arxiv.org/abs/2204.08645) * [Using Artificial Intelligence in the Reconstruction of Signals from the PADME Electromagnetic Calorimeter](https://arxiv.org/abs/2210.00811) [[DOI](https://doi.org/10.3390/instruments6040046)] + * [Separation of electrons from pions in GEM TRD using deep learning](https://arxiv.org/abs/2303.10776) + * [Machine learning method for $^{12}$C event classification and reconstruction in the active target time-projection chamber](https://arxiv.org/abs/2304.13233) #### Neutrino Detectors @@ -466,6 +469,7 @@ const expandElements = shouldExpand => { * [GraphNeT: Graph neural networks for neutrino telescope event reconstruction](https://arxiv.org/abs/2210.12194) * [Graph Neural Networks for low-energy event classification \& reconstruction in IceCube](https://arxiv.org/abs/2209.03042) [[DOI](https://doi.org/10.1088/1748-0221/17/11/P11003)] * [Probing the mixing parameter |V\ensuremath{\tau}N|2 for heavy neutrinos](https://arxiv.org/abs/2211.00309) [[DOI](https://doi.org/10.1103/PhysRevD.107.095008)] + * [Assessment of few-hits machine learning classification algorithms for low energy physics in liquid argon detectors](https://arxiv.org/abs/2305.09744) #### Direct Dark Matter Detectors @@ -544,6 +548,7 @@ const expandElements = shouldExpand => { * [Fast muon tracking with machine learning implemented in FPGA](https://arxiv.org/abs/2202.04976) [[DOI](https://doi.org/10.1016/j.nima.2022.167546)] * [Charged Particle Tracking with Machine Learning on FPGAs](https://arxiv.org/abs/2212.02348) * [Reconstruction of fast neutron direction in segmented organic detectors using deep learning](https://arxiv.org/abs/2301.10796) [[DOI](https://doi.org/10.1016/j.nima.2023.168024)] + * [Deep Learning-Based Spatiotemporal Multi-Event Reconstruction for Delay Line Detectors](https://arxiv.org/abs/2306.09359) #### Heavy Ions / Nuclear Physics @@ -596,6 +601,8 @@ const expandElements = shouldExpand => { * [Nuclear corrections on the charged hadron fragmentation functions in a Neural Network global QCD analysis](https://arxiv.org/abs/2305.02664) * [Demonstration of Sub-micron UCN Position Resolution using Room-temperature CMOS Sensor](https://arxiv.org/abs/2305.09562) * [A machine learning study to identify collective flow in small and large colliding systems](https://arxiv.org/abs/2305.09937) + * [Label-free timing analysis of modularized nuclear detectors with physics-constrained deep learning](https://arxiv.org/abs/2304.11930) + * [Neutron-Gamma Pulse Shape Discrimination for Organic Scintillation Detector using 2D CNN based Image Classification](https://arxiv.org/abs/2306.09356) ??? example "Learning strategies" @@ -693,6 +700,7 @@ const expandElements = shouldExpand => { * [Development of a Vertex Finding Algorithm using Recurrent Neural Network](https://arxiv.org/abs/2101.11906) * [Learning the language of QCD jets with transformers](https://arxiv.org/abs/2303.07364) * [Parton Labeling without Matching: Unveiling Emergent Labelling Capabilities in Regression Models](https://arxiv.org/abs/2304.09208) + * [Assessment of few-hits machine learning classification algorithms for low energy physics in liquid argon detectors](https://arxiv.org/abs/2305.09744) #### Regularization @@ -763,6 +771,7 @@ const expandElements = shouldExpand => { * [Nanosecond machine learning regression with deep boosted decision trees in FPGA for high energy physics](https://arxiv.org/abs/2207.05602) * [Charged Particle Tracking with Machine Learning on FPGAs](https://arxiv.org/abs/2212.02348) * [Neural-network-based level-1 trigger upgrade for the SuperCDMS experiment at SNOLAB](https://arxiv.org/abs/2212.07864) [[DOI](https://doi.org/10.1088/1748-0221/18/06/P06012)] + * [Implementation of a framework for deploying AI inference engines in FPGAs](https://arxiv.org/abs/2305.19455) #### Deployment @@ -784,6 +793,7 @@ const expandElements = shouldExpand => { * [Pile-Up Mitigation using Attention](https://arxiv.org/abs/2107.02779) * [Semi-supervised Graph Neural Networks for Pileup Noise Removal](https://arxiv.org/abs/2203.15823) * [Towards an automated data cleaning with deep learning in CRESST](https://arxiv.org/abs/2211.00564) [[DOI](https://doi.org/10.1140/epjp/s13360-023-03674-2)] + * [Restoring original signals from pile-up using deep learning](https://arxiv.org/abs/2304.14496) [[DOI](https://doi.org/10.1016/j.nima.2023.168492)] ??? example "Calibration" @@ -987,6 +997,8 @@ const expandElements = shouldExpand => { * [First demonstration of neural sensing and control in a kilometer-scale gravitational wave observatory](https://arxiv.org/abs/2301.06221) * [High-availability displacement sensing with multi-channel self mixing interferometry](https://arxiv.org/abs/2302.00065) [[DOI](https://doi.org/10.1364/OE.485955)] * [Machine Learning based tool for CMS RPC currents quality monitoring](https://arxiv.org/abs/2302.02764) [[DOI](https://doi.org/10.1016/j.nima.2023.168449)] + * [Predicting the Future of the CMS Detector: Crystal Radiation Damage and Machine Learning at the LHC](https://arxiv.org/abs/2303.15291) + * [Magnetic field regression using artificial neural networks for cold atom experiments](https://arxiv.org/abs/2305.18822) ## Equivariant networks. @@ -1008,6 +1020,7 @@ const expandElements = shouldExpand => { * [EPiC-GAN: Equivariant Point Cloud Generation for Particle Jets](https://arxiv.org/abs/2301.08128) * [Discovering Sparse Representations of Lie Groups with Machine Learning](https://arxiv.org/abs/2302.05383) * [Gauge-equivariant pooling layers for preconditioners in lattice QCD](https://arxiv.org/abs/2304.10438) + * [Equivariant Graph Neural Networks for Charged Particle Tracking](https://arxiv.org/abs/2304.05293) ## Decorrelation methods. @@ -1111,6 +1124,7 @@ const expandElements = shouldExpand => { * [EPiC-GAN: Equivariant Point Cloud Generation for Particle Jets](https://arxiv.org/abs/2301.08128) * [Ultrafast CMOS image sensors and data-enabled super-resolution for multimodal radiographic imaging and tomography](https://arxiv.org/abs/2301.11865) [[DOI](https://doi.org/10.22323/1.420.0041)] * [Ultra-High-Resolution Detector Simulation with Intra-Event Aware GAN and Self-Supervised Relational Reasoning](https://arxiv.org/abs/2303.08046) + * [New Angles on Fast Calorimeter Shower Simulation](https://arxiv.org/abs/2303.18150) ??? example "Autoencoders" @@ -1199,6 +1213,7 @@ const expandElements = shouldExpand => { * [Score-based Generative Models for Calorimeter Shower Simulation](https://arxiv.org/abs/2206.11898) * [Fast Point Cloud Generation with Diffusion Models in High Energy Physics](https://arxiv.org/abs/2304.01266) * [End-To-End Latent Variational Diffusion Models for Inverse Problems in High Energy Physics](https://arxiv.org/abs/2305.10399) + * [CaloClouds: Fast Geometry-Independent Highly-Granular Calorimeter Simulation](https://arxiv.org/abs/2305.04847) * [Jet Diffusion versus JetGPT -- Modern Networks for the LHC](https://arxiv.org/abs/2305.10475) @@ -1482,6 +1497,7 @@ const expandElements = shouldExpand => { * [Differentiable Matrix Elements with MadJax](https://arxiv.org/abs/2203.00057) * [Morphing parton showers with event derivatives](https://arxiv.org/abs/2208.02274) * [Implicit Neural Representation as a Differentiable Surrogate for Photon Propagation in a Monolithic Neutrino Detector](https://arxiv.org/abs/2211.01505) + * [Novel Machine Learning and Differentiable Programming Techniques applied to the VIP-2 Underground Experiment](https://arxiv.org/abs/2305.17153) ## Uncertainty Quantification @@ -1559,6 +1575,7 @@ const expandElements = shouldExpand => { * [A feasibility study of multi-electrode high-purity germanium detector for $^{76}$Ge neutrinoless double beta decay searching](https://arxiv.org/abs/2211.06180) [[DOI](https://doi.org/10.1088/1748-0221/18/05/P05025)] * [Pulse shape discrimination using a convolutional neural network for organic liquid scintillator signals](https://arxiv.org/abs/2211.07892) [[DOI](https://doi.org/10.1088/1748-0221/18/03/P03003)] * [Deep machine learning for the PANDA software trigger](https://arxiv.org/abs/2211.15390) [[DOI](https://doi.org/10.1140/epjc/s10052-023-11494-y)] + * [Automated visual inspection of CMS HGCAL silicon sensor surface using an ensemble of a deep convolutional autoencoder and classifier](https://arxiv.org/abs/2303.15319) ??? example "Searches and measurements were ML reconstruction is a core component" diff --git a/docs/recent.md b/docs/recent.md index 3864a4c..837b36f 100644 --- a/docs/recent.md +++ b/docs/recent.md @@ -9,8 +9,15 @@ search: This is an automatically compiled list of papers which have been added to the living review that were made public within the previous 4 months at the time of updating. This is not an exhaustive list of released papers, and is only able to find those which have both year and month data provided in the bib reference. +## June 2023 +* [Neutron-Gamma Pulse Shape Discrimination for Organic Scintillation Detector using 2D CNN based Image Classification](https://arxiv.org/abs/2306.09356) +* [Deep Learning-Based Spatiotemporal Multi-Event Reconstruction for Delay Line Detectors](https://arxiv.org/abs/2306.09359) + ## May 2023 +* [Implementation of a framework for deploying AI inference engines in FPGAs](https://arxiv.org/abs/2305.19455) +* [Magnetic field regression using artificial neural networks for cold atom experiments](https://arxiv.org/abs/2305.18822) * [Measurements of multijet event isotropies using optimal transport with the ATLAS detector](https://arxiv.org/abs/2305.16930) +* [Novel Machine Learning and Differentiable Programming Techniques applied to the VIP-2 Underground Experiment](https://arxiv.org/abs/2305.17153) * [Search for periodic signals in the dielectron and diphoton invariant mass spectra using 139 fb$^{-1}$ of $pp$ collisions at $\sqrt{s}](https://arxiv.org/abs/2305.10894) * [Jet Diffusion versus JetGPT -- Modern Networks for the LHC](https://arxiv.org/abs/2305.10475) * [End-To-End Latent Variational Diffusion Models for Inverse Problems in High Energy Physics](https://arxiv.org/abs/2305.10399) @@ -21,6 +28,7 @@ This is an automatically compiled list of papers which have been added to the li * [ELSA - Enhanced latent spaces for improved collider simulations](https://arxiv.org/abs/2305.07696) * [Neural Network predictions of inclusive electron-nucleus cross sections](https://arxiv.org/abs/2305.08217) * [Is infrared-collinear safe information all you need for jet classification?](https://arxiv.org/abs/2305.08979) +* [CaloClouds: Fast Geometry-Independent Highly-Granular Calorimeter Simulation](https://arxiv.org/abs/2305.04847) * [Gradient Boosting MUST taggers for highly-boosted jets](https://arxiv.org/abs/2305.04957) * [Searching for dark jets with displaced vertices using weakly supervised machine learning](https://arxiv.org/abs/2305.04372) * [Tip of the Red Giant Branch Bounds on the Axion-Electron Coupling Revisited](https://arxiv.org/abs/2305.03113) @@ -31,7 +39,9 @@ This is an automatically compiled list of papers which have been added to the li ## April 2023 * [Estimation of collision centrality in terms of the number of participating nucleons in heavy-ion collisions using deep learning](https://arxiv.org/abs/2305.00493) * [Exploring the flavor structure of quarks and leptons with reinforcement learning](https://arxiv.org/abs/2304.14176) +* [Machine learning method for $^{12}$C event classification and reconstruction in the active target time-projection chamber](https://arxiv.org/abs/2304.13233) * [A Modern Global Extraction of the Sivers Function](https://arxiv.org/abs/2304.14328) +* [Label-free timing analysis of modularized nuclear detectors with physics-constrained deep learning](https://arxiv.org/abs/2304.11930) * [Gauge-equivariant pooling layers for preconditioners in lattice QCD](https://arxiv.org/abs/2304.10438) * [Uncovering doubly charged scalars with dominant three-body decays using machine learning](https://arxiv.org/abs/2304.09195) * [Parton Labeling without Matching: Unveiling Emergent Labelling Capabilities in Regression Models](https://arxiv.org/abs/2304.09208) @@ -39,6 +49,7 @@ This is an automatically compiled list of papers which have been added to the li * [Research on the distribution formula of QCD strong coupling constant in medium and high energy scale region based on symbolic regression algorithm](https://arxiv.org/abs/2304.07682) * [Jet substructure observables for jet quenching in Quark Gluon Plasma: a Machine Learning driven analysis](https://arxiv.org/abs/2304.07196) * [A variational Monte Carlo algorithm for lattice gauge theories with continuous gauge groups: a study of (2+1)-dimensional compact QED with dynamical fermions at finite density](https://arxiv.org/abs/2304.05916) +* [Equivariant Graph Neural Networks for Charged Particle Tracking](https://arxiv.org/abs/2304.05293) * [Nanosecond anomaly detection with decision trees for high energy physics and real-time application to exotic Higgs decays](https://arxiv.org/abs/2304.03836) * [Probing Dark QCD Sector through the Higgs Portal with Machine Learning at the LHC](https://arxiv.org/abs/2304.03237) * [Locality-constrained autoregressive cum conditional normalizing flow for lattice field theory simulations](https://arxiv.org/abs/2304.01798) diff --git a/make_md.py b/make_md.py index 0180472..9675a07 100644 --- a/make_md.py +++ b/make_md.py @@ -29,7 +29,7 @@ for file in myfile_readme,myfile_about: file.write(r"The purpose of this note is to collect references for modern machine learning as applied to particle physics. A minimal number of categories is chosen in order to be as useful as possible. Note that papers may be referenced in more than one category. The fact that a paper is listed in this document does not endorse or validate its content - that is for the community (and for peer-review) to decide. Furthermore, the classification here is a best attempt and may have flaws - please let us know if (a) we have missed a paper you think should be included, (b) a paper has been misclassified, or (c) a citation for a paper is not correct or if the journal information is now available. In order to be as useful as possible, this document will continue to evolve so please check back before you write your next paper. If you find this review helpful, please consider citing it using ```\cite{hepmllivingreview}``` in `HEPML.bib`.") - file.write("\n\nThis review was built with the help of the HEP-ML community, the [INSPIRE REST API](https://github.com/inspirehep/rest-api-doc), and the moderators Benjamin Nachman, Matthew Feickert, Etienne Dreyer, Waleed Esmail, Michele Faucci Giannelli, Claudius Krause, Johnny Raine, Dalila Salamani, and Ramon Winterhalder\n\n") + file.write("\n\nThis review was built with the help of the HEP-ML community, the [INSPIRE REST API](https://github.com/inspirehep/rest-api-doc), and the moderators Benjamin Nachman, Matthew Feickert, Etienne Dreyer, Waleed Esmail, Michele Faucci Giannelli, Claudius Krause, Johnny Raine, Dalila Salamani, and Ramon Winterhalder.\n\n") ###Add buttons myfile_out.write("""\nExpand all sections\nCollapse all sections\n""")