Skip to content

Commit

Permalink
Update nucl-th from may and june
Browse files Browse the repository at this point in the history
  • Loading branch information
ramonpeter committed Aug 22, 2023
1 parent dada313 commit 3fa500e
Show file tree
Hide file tree
Showing 5 changed files with 179 additions and 12 deletions.
140 changes: 136 additions & 4 deletions HEPML.bib
Original file line number Diff line number Diff line change
Expand Up @@ -314,6 +314,16 @@ @article{Kronheim:2023jrl
}

% Jun. 23, 2023
@article{Wen:2023oju,
author = "Wen, Pengsheng and Holt, Jeremy W. and Li, Maggie",
title = "{Generative modeling of nucleon-nucleon interactions}",
eprint = "2306.13007",
archivePrefix = "arXiv",
primaryClass = "nucl-th",
month = "6",
year = "2023"
}

@article{Dubinski:2023fsy,
author = "Dubi\'nski, Jan and Deja, Kamil and Wenzel, Sandro and Rokita, Przemys\l{}aw and Trzci\'nski, Tomasz",
title = "{Machine Learning methods for simulating particle response in the Zero Degree Calorimeter at the ALICE experiment, CERN}",
Expand All @@ -336,6 +346,27 @@ @article{Anzalone:2023ugq
}

% Jun. 21, 2023
@article{Hizawa:2023plv,
author = "Hizawa, N. and Hagino, K. and Yoshida, K.",
title = "{Analysis of a Skyrme energy density functional with deep learning}",
eprint = "2306.11314",
archivePrefix = "arXiv",
primaryClass = "nucl-th",
reportNumber = "KUNS-2970",
month = "6",
year = "2023"
}

@article{Liu:2023xgl,
author = "Liu, Siyu and Gao, Zepeng and Liao, Zehong and Yang, Yu and Su, Jun and Wang, Yongjia and Zhu, Long",
title = "{Constraining the Woods-Saxon potential in fusion reactions based on a physics-informed neural network}",
eprint = "2306.11236",
archivePrefix = "arXiv",
primaryClass = "nucl-th",
month = "6",
year = "2023"
}

@article{Heinrich:2023bmt,
author = "Heinrich, Lukas and Mishra-Sharma, Siddharth and Pollard, Chris and Windischhofer, Philipp",
title = "{Hierarchical Neural Simulation-Based Inference Over Event Ensembles}",
Expand Down Expand Up @@ -379,6 +410,41 @@ @article{Knipfer:2023zrv
year = "2023"
}

% Jun. 16, 2023
@article{Yoshida:2023wrb,
author = "Yoshida, Sota",
title = "{IMSRG-Net: A machine learning-based solver for In-Medium Similarity Renormalization Group}",
eprint = "2306.08878",
archivePrefix = "arXiv",
primaryClass = "nucl-th",
month = "6",
year = "2023"
}

@article{Lasseri:2023dhi,
author = {Lasseri, Rapha\"el-David and Regnier, David and Frosini, Mika\"el and Verriere, Marc and Schunck, Nicolas},
title = "{Generative deep-learning reveals collective variables of Fermionic systems}",
eprint = "2306.08348",
archivePrefix = "arXiv",
primaryClass = "nucl-th",
month = "6",
year = "2023"
}

@article{Cai:2023gol,
author = "Cai, Bao-Jun and Li, Bao-An and Zhang, Zhen",
title = "{Core States of Neutron Stars from Anatomizing Their Scaled Structure Equations}",
eprint = "2306.08202",
archivePrefix = "arXiv",
primaryClass = "nucl-th",
doi = "10.3847/1538-4357/acdef0",
journal = "Astrophys. J.",
volume = "952",
number = "2",
pages = "147",
year = "2023"
}

% Jun. 14, 2023
@article{Grossi:2023fqq,
author = "Grossi, Michele and Incudini, Massimiliano and Pellen, Mathieu and Pelliccioli, Giovanni",
Expand All @@ -390,6 +456,29 @@ @article{Grossi:2023fqq
year = "2023"
}

% Jun. 13, 2023
@article{Carvalho:2023ele,
author = "Carvalho, Val\'eria and Ferreira, M\'arcio and Malik, Tuhin and Provid\^encia, Constan\c{c}a",
title = "{Decoding Neutron Star Observations: Revealing Composition through Bayesian Neural Networks}",
eprint = "2306.06929",
archivePrefix = "arXiv",
primaryClass = "nucl-th",
month = "6",
year = "2023"
}

% June 08, 2023
@article{Yiu:2023ido,
author = "Yiu, To Chung and Liang, Haozhao and Lee, Jenny",
title = "{Nuclear mass predictions based on deep neural network and finite-range droplet model (2012)}",
eprint = "2306.04171",
archivePrefix = "arXiv",
primaryClass = "nucl-th",
reportNumber = "RIKEN-iTHEMS-Report-23",
month = "6",
year = "2023"
}

% June 07, 2023
@article{Zuniga-Galindo:2023uwp,
author = "Z\'u\~niga-Galindo, W. A.",
Expand Down Expand Up @@ -488,6 +577,21 @@ @article{Chan:2023ume
year = "2023"
}

%May 29, 2023
@article{Wang:2023kcg,
author = "Wang, Yongjia and Li, Qingfeng",
title = "{Machine learning transforms the inference of the nuclear equation of state}",
eprint = "2305.16686",
archivePrefix = "arXiv",
primaryClass = "nucl-th",
doi = "10.1007/s11467-023-1313-3",
journal = "Front. Phys. (Beijing)",
volume = "18",
number = "6",
pages = "64402",
year = "2023"
}

%May 26, 2023
@article{Singh:2023yvj,
author = "Singh, Jaswant and Toll, Tobias",
Expand Down Expand Up @@ -639,13 +743,17 @@ @article{Nachman:2023clf
year = "2023"
}

@article{Hammal:2023njz,
author = "Hammal, O. Al and Martini, M. and Frontera-Pons, J. and Nguyen, T. H. and Perez-Ramos, R.",
title = "{Neural Network predictions of inclusive electron-nucleus cross sections}",
@article{AlHammal:2023svo,
author = "Al Hammal, O. and Martini, M. and Frontera-Pons, J. and Nguyen, T. H. and P\'erez-Ramos, R.",
title = "{Neural network predictions of inclusive electron-nucleus cross sections}",
eprint = "2305.08217",
archivePrefix = "arXiv",
primaryClass = "nucl-th",
month = "5",
doi = "10.1103/PhysRevC.107.065501",
journal = "Phys. Rev. C",
volume = "107",
number = "6",
pages = "065501",
year = "2023"
}

Expand Down Expand Up @@ -673,6 +781,16 @@ @article{Buhmann:2023bwk
year = "2023"
}

@article{Dellen:2023avd,
author = "Dellen, Babette and Jaekel, Uwe and Freitas, Paulo S. A. and Clark, John W.",
title = "{Predicting nuclear masses with product-unit networks}",
eprint = "2305.04675",
archivePrefix = "arXiv",
primaryClass = "nucl-th",
month = "5",
year = "2023"
}

% May 8, 2023
@article{Sengupta:2023xqy,
author = "Sengupta, Debajyoti and Klein, Samuel and Raine, John Andrew and Golling, Tobias",
Expand All @@ -684,6 +802,20 @@ @article{Sengupta:2023xqy
year = "2023"
}

@article{Zhou:2023cfs,
author = "Zhou, Wenjie and Hu, Jinniu and Zhang, Ying and Shen, Hong",
title = "{Nonparametric Model for the Equations of State of a Neutron Star from Deep Neural Network}",
eprint = "2305.03323",
archivePrefix = "arXiv",
primaryClass = "nucl-th",
doi = "10.3847/1538-4357/acd335",
journal = "Astrophys. J.",
volume = "950",
number = "2",
pages = "186",
year = "2023"
}

%May 7, 2023
@article{Aguilar-Saavedra:2023pde,
author = "Aguilar-Saavedra, J. A. and Arganda, E. and Joaquim, F. R. and Sand\'a Seoane, R. M. and Seabra, J. F.",
Expand Down
10 changes: 5 additions & 5 deletions HEPML.tex
Original file line number Diff line number Diff line change
Expand Up @@ -94,11 +94,11 @@
\\\textit{Neutrino detectors are very large in order to have a sizable rate of neutrino detection. The entire neutrino interaction can be characterized to distinguish different neutrino flavors.}
\item \textbf{Direct Dark Matter Detectors}~\cite{Ilyasov_2020,Akerib:2020aws,Khosa:2019qgp,Golovatiuk:2021lqn,McDonald:2021hus,Coarasa:2021fpv,Herrero-Garcia:2021goa,Liang:2021nsz,Li:2022tvg,Biassoni:2023lih}
\\\textit{Dark matter detectors are similar to neutrino detectors, but aim to achieve `zero' background.}
\item \textbf{Cosmology, Astro Particle, and Cosmic Ray physics}~\cite{Ostdiek:2020cqz,Brehmer:2019jyt,Tsai:2020vcx,Verma:2020gnq,Aab:2021rcn,Balazs:2021uhg,gonzalez2021tackling,Conceicao:2021xgn,huang2021convolutionalneuralnetwork,Droz:2021wnh,Han:2021kjx,Arjona:2021hmg,1853992,Shih:2021kbt,Ikeda:2021sxm,Aizpuru:2021vhd,Vago:2021grx,List:2021aer,Kahlhoefer:2021sha,Sabiu:2021aea,Mishra-Sharma:2021nhh,Mishra-Sharma:2021oxe,Bister:2021arb,Chen:2019avc,De:2022sde,Montel:2022fhv,Glauch:2022xth,Sun:2022djj,Abel:2022nje,Zhang:2022djp,Nguyen:2022ldb,Kim:2023wuk}
\item \textbf{Cosmology, Astro Particle, and Cosmic Ray physics}~\cite{Ostdiek:2020cqz,Brehmer:2019jyt,Tsai:2020vcx,Verma:2020gnq,Aab:2021rcn,Balazs:2021uhg,gonzalez2021tackling,Conceicao:2021xgn,huang2021convolutionalneuralnetwork,Droz:2021wnh,Han:2021kjx,Arjona:2021hmg,1853992,Shih:2021kbt,Ikeda:2021sxm,Aizpuru:2021vhd,Vago:2021grx,List:2021aer,Kahlhoefer:2021sha,Sabiu:2021aea,Mishra-Sharma:2021nhh,Mishra-Sharma:2021oxe,Bister:2021arb,Chen:2019avc,De:2022sde,Montel:2022fhv,Glauch:2022xth,Sun:2022djj,Abel:2022nje,Zhang:2022djp,Nguyen:2022ldb,Kim:2023wuk,Zhou:2023cfs,Carvalho:2023ele,Cai:2023gol}
\\\textit{Machine learning is often used in astrophysics and cosmology in different ways than terrestrial particle physics experiments due to a general divide between Bayesian and Frequentist statistics. However, there are many similar tasks and a growing number of proposals designed for one domain that apply to the other. See also https://github.com/georgestein/ml-in-cosmology.}
\item \textbf{Tracking}~\cite{Farrell:DLPS2017,Farrell:2018cjr,Amrouche:2019wmx,Ju:2020xty,Akar:2020jti,Shlomi:2020ufi,Choma:2020cry,Siviero:2020tim,Fox:2020hfm,Amrouche:2021tlm,goto2021development,Biscarat:2021dlj,Akar:2021gns,Thais:2021qcb,Ju:2021ayy,Dezoort:2021kfk,Edmonds:2021lzd,Lavrik:2021zgt,Huth:2021zcm,Goncharov:2021wvd,Wang:2022oer,Alonso-Monsalve:2022zlm,Bakina:2022mhs,Akram:2022zmj,Sun:2022bxx,Abidi:2022ogh,Bae:2023eec,Knipfer:2023zrv}
\\\textit{Charged particle tracking is a challenging pattern recognition task. This category is for various classification tasks associated with tracking, such as seed selection.}
\item \textbf{Heavy Ions / Nuclear Physics}~\cite{Pang:2016vdc,Chien:2018dfn,Du:2020pmp,Du:2019civ,Mallick:2021wop,Nagu:2021zho,Zhao:2021yjo,Sombillo:2021ifs,Zhou:2021bvw,Apolinario:2021olp,Brown:2021upr,Du:2021pqa,Kuttan:2021npg,Huang:2021iux,Shokr:2021ouh,He:2021uko,Habashy:2021orz,Zepeda:2021tzp,Mishra:2021eqb,Ng:2021ibr,Habashy:2021qku,Biro:2021zgm,Lai:2021ckt,Du:2021qwv,Du:2021brx,Xiang:2021ssj,Soma:2022qnv,Rahman:2022tfq,Boglione:2022gpv,Liyanage:2022byj,Liu:2022hzd,Fanelli:2022kro,Chen:2022shj,Saha:2022skj,Lee:2022kdn,Biro:2022zhl,Mallick:2022alr,Steffanic:2023cyx,Mallick:2023vgi,Hirvonen:2023lqy,Biro:2023kyx,He:2023zin,Zhou:2023pti,CrispimRomao:2023ssj,Basak:2023wzq,Shi:2023xfz,Soleymaninia:2023dds,Lin:2023bmy,Wang:2023muv,Ai:2023azx,Karmakar:2023mhy}
\item \textbf{Heavy Ions / Nuclear Physics}~\cite{Pang:2016vdc,Chien:2018dfn,Du:2020pmp,Du:2019civ,Mallick:2021wop,Nagu:2021zho,Zhao:2021yjo,Sombillo:2021ifs,Zhou:2021bvw,Apolinario:2021olp,Brown:2021upr,Du:2021pqa,Kuttan:2021npg,Huang:2021iux,Shokr:2021ouh,He:2021uko,Habashy:2021orz,Zepeda:2021tzp,Mishra:2021eqb,Ng:2021ibr,Habashy:2021qku,Biro:2021zgm,Lai:2021ckt,Du:2021qwv,Du:2021brx,Xiang:2021ssj,Soma:2022qnv,Rahman:2022tfq,Boglione:2022gpv,Liyanage:2022byj,Liu:2022hzd,Fanelli:2022kro,Chen:2022shj,Saha:2022skj,Lee:2022kdn,Biro:2022zhl,Mallick:2022alr,Steffanic:2023cyx,Mallick:2023vgi,Hirvonen:2023lqy,Biro:2023kyx,He:2023zin,Zhou:2023pti,CrispimRomao:2023ssj,Basak:2023wzq,Shi:2023xfz,Soleymaninia:2023dds,Lin:2023bmy,Dellen:2023avd,AlHammal:2023svo,Wang:2023muv,Wang:2023kcg,Ai:2023azx,Yiu:2023ido,Karmakar:2023mhy,Lasseri:2023dhi,Yoshida:2023wrb,Liu:2023xgl,Hizawa:2023plv,Wen:2023oju}
\\\textit{Many tools in high energy nuclear physics are similar to high energy particle physics. The physics target of these studies are to understand collective properties of the strong force.}
\end{itemize}
\item \textbf{Learning strategies}
Expand Down Expand Up @@ -145,7 +145,7 @@
\\\textit{Even though an experimental analysis may provide a single model-dependent interpretation of the result, the results are likely to have important implications for a variety of other models. Recasting is the task of taking a result and interpreting it in the context of a model that was not used for the original analysis.}
\item \textbf{Matrix elements}~\cite{Badger:2020uow,Bishara:2019iwh,1804325,Bury:2020ewi,Sombillo:2021yxe,Sombillo:2021rxv,Aylett-Bullock:2021hmo,Maitre:2021uaa,Danziger:2021eeg,Winterhalder:2021ngy,Karl:2022jda,Alnuqaydan:2022ncd,Dersy:2022bym,Badger:2022hwf,Janssen:2023ahv,Maitre:2023dqz}
\\\textit{Regression methods can be used as surrogate models for functions that are too slow to evaluate. One important class of functions are matrix elements, which form the core component of cross section calculations in quantum field theory.}
\item \textbf{Parameter estimation}~\cite{Lei:2020ucb,1808105,Lazzarin:2020uvv,Kim:2021pcz,Alda:2021rgt,Craven:2021ems,Castro:2022zpq,Meng:2022lmd,Qiu:2023ihi,Hammal:2023njz,Shi:2023xfz}
\item \textbf{Parameter estimation}~\cite{Lei:2020ucb,1808105,Lazzarin:2020uvv,Kim:2021pcz,Alda:2021rgt,Craven:2021ems,Castro:2022zpq,Meng:2022lmd,Qiu:2023ihi,AlHammal:2023svo,Shi:2023xfz}
\\\textit{The target features could be parameters of a model, which can be learned directly through a regression setup. Other forms of inference are described in later sections (which could also be viewed as regression).}
\item \textbf{Parton Distribution Functions (and related)}~\cite{DelDebbio:2020rgv,Grigsby:2020auv,Rossi:2020sbh,Carrazza:2021hny,Ball:2021leu,Ball:2021xlu,Khalek:2021gon,Iranipour:2022iak,Gao:2022uhg,Gao:2022srd,Candido:2023utz,Wang:2023nab,Kassabov:2023hbm,Wang:2023poi,Fernando:2023obn,Rabemananjara:2023xfq}
\\\textit{Various machine learning models can provide flexible function approximators, which can be useful for modeling functions that cannot be determined easily from first principles such as parton distribution functions.}
Expand All @@ -167,9 +167,9 @@
\begin{itemize}
\item \textbf{GANs}~\cite{deOliveira:2017pjk,Paganini:2017hrr,Paganini:2017dwg,Alonso-Monsalve:2018aqs,Butter:2019eyo,Martinez:2019jlu,Bellagente:2019uyp,Vallecorsa:2019ked,SHiP:2019gcl,Carrazza:2019cnt,Butter:2019cae,Lin:2019htn,DiSipio:2019imz,Hashemi:2019fkn,Chekalina:2018hxi,ATL-SOFT-PUB-2018-001,Zhou:2018ill,Carminati:2018khv,Vallecorsa:2018zco,Datta:2018mwd,Musella:2018rdi,Erdmann:2018kuh,Deja:2019vcv,Derkach:2019qfk,Erbin:2018csv,Erdmann:2018jxd,Urban:2018tqv,Oliveira:DLPS2017,deOliveira:2017rwa,Farrell:2019fsm,Hooberman:DLPS2017,Belayneh:2019vyx,Wang:2020tap,buhmann2020getting,Alanazi:2020jod,2009.03796,2008.06545,Kansal:2020svm,Maevskiy:2020ank,Lai:2020byl,Choi:2021sku,Rehm:2021zow,Rehm:2021zoz,Carrazza:2021hny,Rehm:2021qwm,Lebese:2021foi,Winterhalder:2021ave,Kansal:2021cqp,NEURIPS2020_a878dbeb,Khattak:2021ndw,Mu:2021nno,Li:2021cbp,Bravo-Prieto:2021ehz,Anderlini:2021qpm,Chisholm:2021pdn,Desai:2021wbb,Buhmann:2021caf,Bieringer:2022cbs,Ghosh:2022zdz,Anderlini:2022ckd,Ratnikov:2022hge,Rogachev:2022hjg,ATLAS:2022jhk,Anderlini:2022hgm,Buhmann:2023pmh,Yue:2023uva,Hashemi:2023ruu,Diefenbacher:2023prl,Chan:2023ume,Dubinski:2023fsy,Alghamdi:2023emm}
\\\textit{Generative Adversarial Networks~\cite{Goodfellow:2014upx} learn $p(x)$ implicitly through the minimax optimization of two networks: one that maps noise to structure $G(z)$ and one a classifier (called the discriminator) that learns to distinguish examples generated from $G(z)$ and those generated from the target process. When the discriminator is maximally `confused', then the generator is effectively mimicking $p(x)$.}
\item \textbf{Autoencoders}~\cite{Monk:2018zsb,ATL-SOFT-PUB-2018-001,Cheng:2020dal,1816035,Howard:2021pos,Buhmann:2021lxj,Bortolato:2021zic,deja2020endtoend,Hariri:2021clz,Fanelli:2019qaq,Collins:2021pld,Orzari:2021suh,Jawahar:2021vyu,Tsan:2021brw,Buhmann:2021caf,Touranakou:2022qrp,Ilten:2022jfm,Collins:2022qpr,AbhishekAbhishek:2022wby,Cresswell:2022tof,Roche:2023int,Anzalone:2023ugq}
\item \textbf{Autoencoders}~\cite{Monk:2018zsb,ATL-SOFT-PUB-2018-001,Cheng:2020dal,1816035,Howard:2021pos,Buhmann:2021lxj,Bortolato:2021zic,deja2020endtoend,Hariri:2021clz,Fanelli:2019qaq,Collins:2021pld,Orzari:2021suh,Jawahar:2021vyu,Tsan:2021brw,Buhmann:2021caf,Touranakou:2022qrp,Ilten:2022jfm,Collins:2022qpr,AbhishekAbhishek:2022wby,Cresswell:2022tof,Roche:2023int,Anzalone:2023ugq,Lasseri:2023dhi}
\\\textit{An autoencoder consists of two functions: one that maps $x$ into a latent space $z$ (encoder) and a second one that maps the latent space back into the original space (decoder). The encoder and decoder are simultaneously trained so that their composition is nearly the identity. When the latent space has a well-defined probability density (as in variational autoencoders), then one can sample from the autoencoder by applying the detector to a randomly chosen element of the latent space.}
\item \textbf{Normalizing flows}~\cite{Albergo:2019eim,1800956,Kanwar:2003.06413,Brehmer:2020vwc,Bothmann:2020ywa,Gao:2020zvv,Gao:2020vdv,Nachman:2020lpy,Choi:2020bnf,Lu:2020npg,Bieringer:2020tnw,Hollingsworth:2021sii,Winterhalder:2021ave,Krause:2021ilc,Hackett:2021idh,Menary:2021tjg,Hallin:2021wme,NEURIPS2020_a878dbeb,Vandegar:2020yvw,Jawahar:2021vyu,Bister:2021arb,Krause:2021wez,Butter:2021csz,Winterhalder:2021ngy,Butter:2022lkf,Verheyen:2022tov,Leigh:2022lpn,Chen:2022ytr,Albandea:2022fky,Krause:2022jna,Cresswell:2022tof,Kach:2022qnf,Kach:2022uzq,Dolan:2022ikg,Backes:2022vmn,Heimel:2022wyj,Albandea:2023wgd,Rousselot:2023pcj,Diefenbacher:2023vsw,Nicoli:2023qsl,R:2023dcr,Nachman:2023clf,Raine:2023fko,Golling:2023yjq}
\item \textbf{Normalizing flows}~\cite{Albergo:2019eim,1800956,Kanwar:2003.06413,Brehmer:2020vwc,Bothmann:2020ywa,Gao:2020zvv,Gao:2020vdv,Nachman:2020lpy,Choi:2020bnf,Lu:2020npg,Bieringer:2020tnw,Hollingsworth:2021sii,Winterhalder:2021ave,Krause:2021ilc,Hackett:2021idh,Menary:2021tjg,Hallin:2021wme,NEURIPS2020_a878dbeb,Vandegar:2020yvw,Jawahar:2021vyu,Bister:2021arb,Krause:2021wez,Butter:2021csz,Winterhalder:2021ngy,Butter:2022lkf,Verheyen:2022tov,Leigh:2022lpn,Chen:2022ytr,Albandea:2022fky,Krause:2022jna,Cresswell:2022tof,Kach:2022qnf,Kach:2022uzq,Dolan:2022ikg,Backes:2022vmn,Heimel:2022wyj,Albandea:2023wgd,Rousselot:2023pcj,Diefenbacher:2023vsw,Nicoli:2023qsl,R:2023dcr,Nachman:2023clf,Raine:2023fko,Golling:2023yjq,Wen:2023oju}
\\\textit{Normalizing flows~\cite{pmlr-v37-rezende15} learn $p(x)$ explicitly by starting with a simple probability density and then applying a series of bijective transformations with tractable Jacobians.}
\item \textbf{Diffusion Models}~\cite{Mikuni:2022xry,Leigh:2023toe,Mikuni:2023dvk,Shmakov:2023kjj,Buhmann:2023bwk,Butter:2023fov,Mikuni:2023tok,Acosta:2023zik,Imani:2023blb,Amram:2023onf}
\\\textit{These approaches learn the gradient of the density instead of the density directly.}
Expand Down
Loading

0 comments on commit 3fa500e

Please sign in to comment.