diff --git a/bib/bibliography.bib b/bib/bibliography.bib index 62fb04c66c1be0ae7e0045fdb5897f061645840f..a2ff05134fc9ce150ec7293ca28d654ba4e3280b 100644 --- a/bib/bibliography.bib +++ b/bib/bibliography.bib @@ -5,11 +5,24 @@ urldate = {2023-08-13}, abstract = {The discovery of the quantum Hall effect in 1980 marked a turning point in condensed matter physics.}, langid = {english}, - organization = {{Nature}}, + organization = {Nature}, keywords = {/unread,2D material,ARPES,Berry phase,collection,Hall effect,Hall QHE,heterostructures,Heusler,history of science,magnetic order,magnetism,popular science,quantum materials,semimetal,strongly correlated maeterials,superconductor,TMDC,topological,topological insulator,topological phase,vdW materials,Weyl semimetal}, file = {/Users/wasmer/Zotero/storage/SJH8NYEP/fdbjbijfea.html} } +@unpublished{abbottSurrogateModelsElectron2024, + title = {Surrogate Models for the Electron Density and Related Scalar Fields}, + author = {Abbott, Joseph and Chong, Raymond and Lewis, Alan M. and Ceriotti, Michele}, + date = {2024-03-21}, + url = {https://www.dpg-verhandlungen.de/year/2024/conference/berlin/part/o/session/82/contribution/3}, + urldate = {2024-03-11}, + abstract = {The electron density is a central quantity in electronic structure calculations and a fundamental property of molecules and materials, allowing access to in principle any ground state electronic property. Density-functional theory (DFT) is an ab initio method that calculates the electron density of a system by solving self- consistently the Kohn-Sham equations. However, the cubic scaling in system size makes calculations for large systems intractable. As a complementary approach, machine learning (ML) surrogate models are being developed that directly predict the self-consistent electron density at a fraction of the cost of DFT, and with more favourable linear scaling. One framework in particular focuses on learning the coefficients of basis functions fitted to the real-space density using a local equivariant ML model. An end-to-end pipeline for the training and prediction of the the electron density is presented here. Built on top of a modular and scalable software stack, the 'bypassing' of the Kohn-Sham equations to access the density of larger systems becomes increasingly possible. Furthermore, the framework can be applied to scalar field related to the electron density, opening the door to interesting applications such as the ML-driven imaging of scanning tunneling microscopy (STM).}, + eventtitle = {{{DPG SKM24}}}, + venue = {Berlin}, + keywords = {AML,charge density,DFT,DFT speedup,DFT speedup with ML,DPG,hybrid AI/simulation,library,ML,ML-Density,ML-ESM,prediction of electron density,PyTorch,scalar field,SKM24,with-code}, + file = {/Users/wasmer/Zotero/storage/GX4WMQWH/Surrogate models for the electron density and related scalar fields.html} +} + @article{acharMachineLearningElectron2023, title = {Machine {{Learning Electron Density Prediction Using Weighted Smooth Overlap}} of {{Atomic Positions}}}, author = {Achar, Siddarth K. and Bernasconi, Leonardo and Johnson, J. Karl}, @@ -18,7 +31,7 @@ volume = {13}, number = {12}, pages = {1853}, - publisher = {{Multidisciplinary Digital Publishing Institute}}, + publisher = {Multidisciplinary Digital Publishing Institute}, issn = {2079-4991}, doi = {10.3390/nano13121853}, url = {https://www.mdpi.com/2079-4991/13/12/1853}, @@ -40,7 +53,7 @@ volume = {20}, number = {8}, pages = {080201}, - publisher = {{IOP Publishing}}, + publisher = {IOP Publishing}, issn = {1367-2630}, doi = {10.1088/1367-2630/aad1ea}, url = {https://dx.doi.org/10.1088/1367-2630/aad1ea}, @@ -51,6 +64,47 @@ file = {/Users/wasmer/Nextcloud/Zotero/AcÃn et al_2018_The quantum technologies roadmap.pdf} } +@book{aggarwalArtificialIntelligenceTextbook2021, + title = {Artificial Intelligence: A Textbook}, + shorttitle = {Artificial Intelligence}, + author = {Aggarwal, Charu C.}, + date = {2021}, + publisher = {Springer}, + location = {Cham}, + isbn = {978-3-030-72357-6 978-3-030-72356-9}, + langid = {english}, + pagetotal = {483}, + keywords = {/unread,agent,AI,artificial intelligence,educational,intelligent agent,knowledge graph,learning material,ML,multi-agent system,textbook} +} + +@book{aggarwalLinearAlgebraOptimization2020, + title = {Linear Algebra and Optimization for Machine Learning: A Textbook}, + shorttitle = {Linear Algebra and Optimization for Machine Learning}, + author = {Aggarwal, Charu C.}, + date = {2020}, + publisher = {Springer}, + location = {Cham, Switzerland}, + isbn = {978-3-030-40343-0 978-3-030-40346-1}, + langid = {english}, + pagetotal = {495}, + keywords = {/unread,educational,learning material,linear algebra,mathematics,ML,ML theory,optimization,textbook} +} + +@book{aggarwalNeuralNetworksDeep2023, + title = {Neural Networks and Deep Learning: A Textbook}, + shorttitle = {Neural Networks and Deep Learning}, + author = {Aggarwal, Charu C.}, + date = {2023}, + edition = {Second edition}, + publisher = {Springer}, + location = {Cham}, + abstract = {This textbook covers both classical and modern models in deep learning and includes examples and exercises throughout the chapters. Deep learning methods for various data domains, such as text, images, and graphs are presented in detail. The chapters of this book span three categories: The basics of neural networks: The backpropagation algorithm is discussed in Chapter 2. Many traditional machine learning models can be understood as special cases of neural networks. Chapter 3 explores the connections between traditional machine learning and neural networks. Support vector machines, linear/logistic regression, singular value decomposition, matrix factorization, and recommender systems are shown to be special cases of neural networks. Fundamentals of neural networks: A detailed discussion of training and regularization is provided in Chapters 4 and 5. Chapters 6 and 7 present radial-basis function (RBF) networks and restricted Boltzmann machines. Advanced topics in neural networks: Chapters 8, 9, and 10 discuss recurrent neural networks, convolutional neural networks, and graph neural networks. Several advanced topics like deep reinforcement learning, attention mechanisms, transformer networks, Kohonen self-organizing maps, and generative adversarial networks are introduced in Chapters 11 and 12. The textbook is written for graduate students and upper under graduate level students. Researchers and practitioners working within this related field will want to purchase this as well. Where possible, an application-centric view is highlighted in order to provide an understanding of the practical uses of each class of techniques. The second edition is substantially reorganized and expanded with separate chapters on backpropagation and graph neural networks. Many chapters have been significantly revised over the first edition. Greater focus is placed on modern deep learning ideas such as attention mechanisms, transformers, and pre-trained language models}, + isbn = {978-3-031-29642-0}, + langid = {english}, + keywords = {/unread}, + annotation = {OCLC: 1390406271} +} + @incollection{agostiniExactFactorizationElectron2020, title = {Exact {{Factorization}} of the {{Electron}}–{{Nuclear Wave Function}}: {{Theory}} and {{Applications}}}, shorttitle = {Exact {{Factorization}} of the {{Electron}}–{{Nuclear Wave Function}}}, @@ -58,7 +112,7 @@ author = {Agostini, Federica and Gross, E. K. U.}, date = {2020}, pages = {531--562}, - publisher = {{John Wiley \& Sons, Ltd}}, + publisher = {John Wiley \& Sons, Ltd}, doi = {10.1002/9781119417774.ch17}, url = {https://onlinelibrary.wiley.com/doi/abs/10.1002/9781119417774.ch17}, urldate = {2023-09-21}, @@ -94,7 +148,7 @@ url = {https://github.com/aimhubio/aim}, urldate = {2021-05-13}, abstract = {Aim — a super-easy way to record, search and compare 1000s of ML training runs}, - organization = {{Aim}}, + organization = {Aim}, keywords = {experiment-tracking,keras,ML,MLOps,nlp,pytorch,reinforcement-learning,tensorflow} } @@ -108,7 +162,7 @@ volume = {154}, number = {17}, pages = {174705}, - publisher = {{American Institute of Physics}}, + publisher = {American Institute of Physics}, issn = {0021-9606}, doi = {10.1063/5.0048714}, url = {https://aip.scitation.org/doi/10.1063/5.0048714}, @@ -127,7 +181,7 @@ urldate = {2023-06-14}, abstract = {Applications of topology in condensed matter based on bulk-edge correspondence. Special attention to the most active research topics in topological condensed matter: theory of topological insulators and Majorana fermions, topological classification of “grand ten†symmetry classes, and topological quantum computation}, langid = {american}, - organization = {{TU Delft OCW}}, + organization = {TU Delft OCW}, keywords = {/unread,Chern insulator,course,course material,Hall effect,Hall QSHE,learning material,Majorana,MOOC,online course,physics,quantum computing,theory,topological insulator,TRS}, file = {/Users/wasmer/Zotero/storage/YHURA66E/topology-condensed-matter-concept.html} } @@ -140,8 +194,8 @@ date = {2019-07-25}, series = {{{KDD}} '19}, pages = {2623--2631}, - publisher = {{Association for Computing Machinery}}, - location = {{New York, NY, USA}}, + publisher = {Association for Computing Machinery}, + location = {New York, NY, USA}, doi = {10.1145/3292500.3330701}, url = {https://doi.org/10.1145/3292500.3330701}, urldate = {2023-11-17}, @@ -176,7 +230,7 @@ volume = {52}, number = {1}, pages = {013001}, - publisher = {{IOP Publishing}}, + publisher = {IOP Publishing}, issn = {0022-3727}, doi = {10.1088/1361-6463/aad926}, url = {https://doi.org/10.1088/1361-6463/aad926}, @@ -196,7 +250,7 @@ volume = {8}, number = {18}, pages = {eabm7185}, - publisher = {{American Association for the Advancement of Science}}, + publisher = {American Association for the Advancement of Science}, doi = {10.1126/sciadv.abm7185}, url = {https://www.science.org/doi/full/10.1126/sciadv.abm7185}, urldate = {2023-03-19}, @@ -233,7 +287,7 @@ volume = {55}, number = {2}, pages = {1142--1161}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevB.55.1142}, url = {https://link.aps.org/doi/10.1103/PhysRevB.55.1142}, urldate = {2023-07-12}, @@ -250,7 +304,7 @@ volume = {11}, number = {8}, pages = {1873}, - publisher = {{Multidisciplinary Digital Publishing Institute}}, + publisher = {Multidisciplinary Digital Publishing Institute}, doi = {10.3390/nano11081873}, url = {https://www.mdpi.com/2079-4991/11/8/1873}, urldate = {2021-07-28}, @@ -323,7 +377,7 @@ volume = {6}, number = {1}, pages = {117--157}, - publisher = {{Multidisciplinary Digital Publishing Institute}}, + publisher = {Multidisciplinary Digital Publishing Institute}, issn = {2078-1547}, doi = {10.3390/challe6010117}, url = {https://www.mdpi.com/2078-1547/6/1/117}, @@ -342,7 +396,7 @@ journaltitle = {WSEAS Transactions on Power Systems}, volume = {15}, pages = {50--59}, - publisher = {{WSEAS}}, + publisher = {WSEAS}, doi = {10.37394/232016.2020.15.6}, url = {https://www.wseas.com/journals/articles.php?id=1152}, urldate = {2023-08-30}, @@ -415,7 +469,7 @@ volume = {145}, number = {16}, pages = {8736--8750}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, issn = {0002-7863}, doi = {10.1021/jacs.2c13467}, url = {https://doi.org/10.1021/jacs.2c13467}, @@ -433,7 +487,7 @@ urldate = {2023-05-10}, abstract = {In the last decades, immense technological and scientific progress was made thanks to the increasing available calculation power provided by the exponential growth of processor capability. However, the miniaturization of transistors is reaching the physical limits of classical processor architectures. In the future, the next big leap for scientific computing is expected to come from the realization of quantum computers. Making more performant quantum computing platforms requires to overcome challenges of decoherence and dephasing of the qubits that form the building blocks for quantum computers. Topological protection is a viable way towards the realization of fault tolerant qubits. Materials that combine magnetism, spin-orbit interaction and conventional s-wave superconductivity are a suitable platform to study Majorana zero modes (MZM) [1], that can be used as building blocks for fault-tolerant topological qubits. In general, magnetic impurities in superconductors leads to localized Yu-Shiba-Rusinov (YSR) states at the impurity [2]. Understanding their interplay with MZMs is crucial to achieve topological quantum computers in the future. In our work, we implemented the Bogoliubov-de Gennes (BdG) formalism in the juKKR Korringa-Kohn-Rostoker Green function impurity code [3] to allow the material-specific description of defects perfectly embedded in superconductors from first principles. We apply it to an Fe impurity embedded in bulk Pb in the normal and superconducting state, then analyze the YSR states of different magnetic transition-metal adatoms placed on a superconducting Nb(110) surface where the influence of the impurity-substrate distance on the energy of the YSR states is discussed. [1] Nadj-Perge et al., Science 346, 6209 (2014). [2] L. Yu, Acta Physica Sinica 21, 75 (1965); H. Shiba, Prog. Theor. Phys. 40, 435 (1968); A. I. Rusinov, Sov. J. Exp. Theor. Phys. 29, 1101 (1969). [3] https://iffgit.fz-juelich.de/kkr/jukkr}, eventtitle = {?}, - venue = {{?}}, + venue = {?}, keywords = {/unread}, file = {/Users/wasmer/Zotero/storage/83GMAQZZ/AbstractList.html} } @@ -463,7 +517,7 @@ volume = {8}, number = {1}, pages = {1--9}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2057-3960}, doi = {10.1038/s41524-022-00729-3}, url = {https://www.nature.com/articles/s41524-022-00729-3}, @@ -480,7 +534,7 @@ shorttitle = {{{APS}} -{{APS March Meeting}} 2020 - {{Event}} - {{MuST}}}, booktitle = {Bulletin of the {{American Physical Society}}}, volume = {Volume 65, Number 1}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, url = {https://meetings.aps.org/Meeting/MAR20/Session/L43.3}, urldate = {2023-09-19}, eventtitle = {{{APS March Meeting}} 2020}, @@ -494,7 +548,7 @@ booktitle = {Bulletin of the {{American Physical Society}}}, date = {2020-03}, volume = {Volume 65, Number 1}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, url = {https://meetings.aps.org/Meeting/MAR20/Session/F40.7}, urldate = {2023-09-19}, eventtitle = {{{APS March Meeting}} 2020}, @@ -507,7 +561,7 @@ booktitle = {Bulletin of the {{American Physical Society}}}, date = {2020-03}, volume = {Volume 65, Number 1}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, url = {https://meetings.aps.org/Meeting/MAR20/Session/S45.9}, urldate = {2023-09-19}, eventtitle = {{{APS March Meeting}} 2020}, @@ -515,6 +569,28 @@ file = {/Users/wasmer/Zotero/storage/SJ7GXD99/S45.html} } +@article{aravaMagneticHopfionRings2023, + title = {Magnetic Hopfion Rings in New Era for Topology}, + author = {Arava, Hanu and Phatak, Charudatta M.}, + date = {2023-11}, + journaltitle = {Nature}, + volume = {623}, + number = {7988}, + pages = {702--703}, + publisher = {Nature Publishing Group}, + doi = {10.1038/d41586-023-03502-8}, + url = {https://www.nature.com/articles/d41586-023-03502-8}, + urldate = {2023-11-24}, + abstract = {3D spin texture observed in a chiral magnet.}, + issue = {7988}, + langid = {english}, + keywords = {/unread,condensed matter,hopfion,magnetic structure,magnetic supperlattice,PGI-1/IAS-1,skyrmions,spin texture,spin-dependent,spintronics,topological}, + annotation = {Bandiera\_abtest: a\\ +Cg\_type: News And Views\\ +Subject\_term: Condensed-matter physics, Materials science}, + file = {/Users/wasmer/Nextcloud/Zotero/Arava_Phatak_2023_Magnetic hopfion rings in new era for topology.pdf;/Users/wasmer/Zotero/storage/3F88ALZJ/d41586-023-03502-8.html} +} + @article{artrithBestPracticesMachine2021, title = {Best Practices in Machine Learning for Chemistry}, author = {Artrith, Nongnuch and Butler, Keith T. and Coudert, François-Xavier and Han, Seungwu and Isayev, Olexandr and Jain, Anubhav and Walsh, Aron}, @@ -524,7 +600,7 @@ volume = {13}, number = {6}, pages = {505--508}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {1755-4349}, doi = {10.1038/s41557-021-00716-z}, url = {https://www.nature.com/articles/s41557-021-00716-z}, @@ -545,7 +621,7 @@ volume = {96}, number = {1}, pages = {014112}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevB.96.014112}, url = {https://link.aps.org/doi/10.1103/PhysRevB.96.014112}, urldate = {2021-10-18}, @@ -559,8 +635,8 @@ date = {2016}, series = {Lecture {{Notes}} in {{Physics}}}, volume = {919}, - publisher = {{Springer International Publishing}}, - location = {{Cham}}, + publisher = {Springer International Publishing}, + location = {Cham}, doi = {10.1007/978-3-319-25607-8}, url = {http://link.springer.com/10.1007/978-3-319-25607-8}, urldate = {2023-10-01}, @@ -586,7 +662,7 @@ volume = {3}, number = {12}, pages = {1023--1032}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2522-5839}, doi = {10.1038/s42256-021-00418-8}, url = {https://www.nature.com/articles/s42256-021-00418-8}, @@ -595,7 +671,9 @@ issue = {12}, langid = {english}, keywords = {CNN,equivariant,GCN,GDL,GNN,invariance,molecules,MPNN,review,review-of-GDL}, - annotation = {Primary\_atype: Reviews Subject\_term: Cheminformatics;Computational models;Computational science Subject\_term\_id: cheminformatics;computational-models;computational-science}, + annotation = {Primary\_atype: Reviews\\ +Subject\_term: Cheminformatics;Computational models;Computational science\\ +Subject\_term\_id: cheminformatics;computational-models;computational-science}, file = {/Users/wasmer/Nextcloud/Zotero/Atz et al_2021_Geometric deep learning on molecular representations.pdf;/Users/wasmer/Zotero/storage/WJWQFR9K/s42256-021-00418-8.html} } @@ -608,7 +686,7 @@ volume = {7}, number = {1}, pages = {1--7}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2397-4648}, doi = {10.1038/s41535-022-00455-5}, url = {https://www.nature.com/articles/s41535-022-00455-5}, @@ -620,6 +698,26 @@ file = {/Users/wasmer/Nextcloud/Zotero/Bac et al_2022_Topological response of the anomalous Hall effect in MnBi2Te4 due to magnetic.pdf;/Users/wasmer/Nextcloud/Zotero/Bac et al_2022_Topological response of the anomalous Hall effect in MnBi2Te4 due to magnetic2_supplementary.pdf;/Users/wasmer/Zotero/storage/E6I5UGGJ/s41535-022-00455-5.html} } +@article{baiMachineLearningHohenbergKohn2022, + title = {Machine Learning the {{Hohenberg-Kohn}} Map for Molecular Excited States}, + author = {Bai, Yuanming and Vogt-Maranto, Leslie and Tuckerman, Mark E. and Glover, William J.}, + date = {2022-11-17}, + journaltitle = {Nature Communications}, + shortjournal = {Nat Commun}, + volume = {13}, + number = {1}, + pages = {7044}, + publisher = {Nature Publishing Group}, + issn = {2041-1723}, + doi = {10.1038/s41467-022-34436-w}, + url = {https://www.nature.com/articles/s41467-022-34436-w}, + urldate = {2024-03-08}, + abstract = {The Hohenberg-Kohn theorem of density-functional theory establishes the existence of a bijection between the ground-state electron density and the external potential of a many-body system. This guarantees a one-to-one map from the electron density to all observables of interest including electronic excited-state energies. Time-Dependent Density-Functional Theory (TDDFT) provides one framework to resolve this map; however, the approximations inherent in practical TDDFT calculations, together with their computational expense, motivate finding a cheaper, more direct map for electronic excitations. Here, we show that determining density and energy functionals via machine learning allows the equations of TDDFT to be bypassed. The framework we introduce is used to perform the first excited-state molecular dynamics simulations with a machine-learned functional on malonaldehyde and correctly capture the kinetics of its excited-state intramolecular proton transfer, allowing insight into how mechanical constraints can be used to control the proton transfer reaction in this molecule. This development opens the door to using machine-learned functionals for highly efficient excited-state dynamics simulations.}, + langid = {english}, + keywords = {AIMD,AML,excited states,HK map,KRR,MD,ML,ML-DFT,ML-ESM,ML-HK map,molecules,prediction from potential,prediction of electron density,prediction of energy,TDDFT,with-code,with-data}, + file = {/Users/wasmer/Nextcloud/Zotero/Bai et al_2022_Machine learning the Hohenberg-Kohn map for molecular excited states.pdf} +} + @online{bakshiLearningQuantumHamiltonians2023, title = {Learning Quantum {{Hamiltonians}} at Any Temperature in Polynomial Time}, author = {Bakshi, Ainesh and Liu, Allen and Moitra, Ankur and Tang, Ewin}, @@ -630,7 +728,7 @@ doi = {10.48550/arXiv.2310.02243}, url = {http://arxiv.org/abs/2310.02243}, urldate = {2023-10-05}, - abstract = {We study the problem of learning a local quantum Hamiltonian \$H\$ given copies of its Gibbs state \$\textbackslash rho = e\^\{-\textbackslash beta H\}/\textbackslash textrm\{tr\}(e\^\{-\textbackslash beta H\})\$ at a known inverse temperature \$\textbackslash beta{$>$}0\$. Anshu, Arunachalam, Kuwahara, and Soleimanifar (arXiv:2004.07266) gave an algorithm to learn a Hamiltonian on \$n\$ qubits to precision \$\textbackslash epsilon\$ with only polynomially many copies of the Gibbs state, but which takes exponential time. Obtaining a computationally efficient algorithm has been a major open problem [Alhambra'22 (arXiv:2204.08349)], [Anshu, Arunachalam'22 (arXiv:2204.08349)], with prior work only resolving this in the limited cases of high temperature [Haah, Kothari, Tang'21 (arXiv:2108.04842)] or commuting terms [Anshu, Arunachalam, Kuwahara, Soleimanifar'21]. We fully resolve this problem, giving a polynomial time algorithm for learning \$H\$ to precision \$\textbackslash epsilon\$ from polynomially many copies of the Gibbs state at any constant \$\textbackslash beta {$>$} 0\$. Our main technical contribution is a new flat polynomial approximation to the exponential function, and a translation between multi-variate scalar polynomials and nested commutators. This enables us to formulate Hamiltonian learning as a polynomial system. We then show that solving a low-degree sum-of-squares relaxation of this polynomial system suffices to accurately learn the Hamiltonian.}, + abstract = {We study the problem of learning a local quantum Hamiltonian \$H\$ given copies of its Gibbs state \$\textbackslash rho = e\textasciicircum\{-\textbackslash beta H\}/\textbackslash textrm\{tr\}(e\textasciicircum\{-\textbackslash beta H\})\$ at a known inverse temperature \$\textbackslash beta{$>$}0\$. Anshu, Arunachalam, Kuwahara, and Soleimanifar (arXiv:2004.07266) gave an algorithm to learn a Hamiltonian on \$n\$ qubits to precision \$\textbackslash epsilon\$ with only polynomially many copies of the Gibbs state, but which takes exponential time. Obtaining a computationally efficient algorithm has been a major open problem [Alhambra'22 (arXiv:2204.08349)], [Anshu, Arunachalam'22 (arXiv:2204.08349)], with prior work only resolving this in the limited cases of high temperature [Haah, Kothari, Tang'21 (arXiv:2108.04842)] or commuting terms [Anshu, Arunachalam, Kuwahara, Soleimanifar'21]. We fully resolve this problem, giving a polynomial time algorithm for learning \$H\$ to precision \$\textbackslash epsilon\$ from polynomially many copies of the Gibbs state at any constant \$\textbackslash beta {$>$} 0\$. Our main technical contribution is a new flat polynomial approximation to the exponential function, and a translation between multi-variate scalar polynomials and nested commutators. This enables us to formulate Hamiltonian learning as a polynomial system. We then show that solving a low-degree sum-of-squares relaxation of this polynomial system suffices to accurately learn the Hamiltonian.}, pubstate = {preprint}, keywords = {AML,finite-temperature,ML,ML-QM,ML-QMBP,NQS,prediction of Hamiltonian matrix}, file = {/Users/wasmer/Nextcloud/Zotero/Bakshi et al_2023_Learning quantum Hamiltonians at any temperature in polynomial time.pdf;/Users/wasmer/Zotero/storage/BGGJUKBE/2310.html} @@ -661,7 +759,7 @@ volume = {22}, number = {8}, pages = {401--409}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, issn = {2156-8952}, doi = {10.1021/acscombsci.0c00057}, url = {https://doi.org/10.1021/acscombsci.0c00057}, @@ -671,6 +769,22 @@ file = {/Users/wasmer/Nextcloud/Zotero/Banko_Ludwig_2020_Fast-Track to Research Data Management in Experimental Material Science–Setting.pdf;/Users/wasmer/Zotero/storage/7HEKG4XK/acscombsci.html} } +@online{baoDeepLearningDatabaseDensity2024, + title = {Deep-{{Learning Database}} of {{Density Functional Theory Hamiltonians}} for {{Twisted Materials}}}, + author = {Bao, Ting and Xu, Runzhang and Li, He and Gong, Xiaoxun and Tang, Zechen and Fu, Jingheng and Duan, Wenhui and Xu, Yong}, + date = {2024-04-09}, + eprint = {2404.06449}, + eprinttype = {arxiv}, + eprintclass = {cond-mat}, + doi = {10.48550/arXiv.2404.06449}, + url = {http://arxiv.org/abs/2404.06449}, + urldate = {2024-04-18}, + abstract = {Moir\textbackslash 'e-twisted materials have garnered significant research interest due to their distinctive properties and intriguing physics. However, conducting first-principles studies on such materials faces challenges, notably the formidable computational cost associated with simulating ultra-large twisted structures. This obstacle impedes the construction of a twisted materials database crucial for datadriven materials discovery. Here, by using high-throughput calculations and state-of-the-art neural network methods, we construct a Deep-learning Database of density functional theory (DFT) Hamiltonians for Twisted materials named DDHT. The DDHT database comprises trained neural-network models of over a hundred homo-bilayer and hetero-bilayer moir\textbackslash 'e-twisted materials. These models enable accurate prediction of the DFT Hamiltonian for these materials across arbitrary twist angles, with an averaged mean absolute error of approximately 1.0 meV or lower. The database facilitates the exploration of flat bands and correlated materials platforms within ultra-large twisted structures.}, + pubstate = {preprint}, + keywords = {/unread,\_tablet,Condensed Matter - Materials Science}, + file = {/Users/wasmer/Nextcloud/Zotero/Bao et al_2024_Deep-Learning Database of Density Functional Theory Hamiltonians for Twisted.pdf;/Users/wasmer/Zotero/storage/JU4UIG8U/2404.html} +} + @article{barrettAutoregressiveNeuralnetworkWavefunctions2022, title = {Autoregressive Neural-Network Wavefunctions for Ab Initio Quantum Chemistry}, author = {Barrett, Thomas D. and Malyshev, Aleksei and Lvovsky, A. I.}, @@ -680,7 +794,7 @@ volume = {4}, number = {4}, pages = {351--358}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2522-5839}, doi = {10.1038/s42256-022-00461-z}, url = {https://www.nature.com/articles/s42256-022-00461-z}, @@ -701,7 +815,7 @@ volume = {5}, number = {13}, pages = {1629--1642}, - publisher = {{IOP Publishing}}, + publisher = {IOP Publishing}, issn = {0022-3719}, doi = {10.1088/0022-3719/5/13/012}, url = {https://doi.org/10.1088/0022-3719/5/13/012}, @@ -717,8 +831,8 @@ author = {BartÏŒk-Pártay, Albert}, date = {2010}, series = {Springer {{Theses}}}, - publisher = {{Springer Berlin Heidelberg}}, - location = {{Berlin, Heidelberg}}, + publisher = {Springer Berlin Heidelberg}, + location = {Berlin, Heidelberg}, doi = {10.1007/978-3-642-14067-9}, url = {http://link.springer.com/10.1007/978-3-642-14067-9}, urldate = {2021-07-06}, @@ -738,7 +852,7 @@ volume = {104}, number = {13}, pages = {136403}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevLett.104.136403}, url = {https://link.aps.org/doi/10.1103/PhysRevLett.104.136403}, urldate = {2021-07-06}, @@ -767,7 +881,7 @@ author = {Bartók, Albert P. and De, Sandip and Poelking, Carl and Bernstein, Noam and Kermode, James R. and Csányi, Gábor and Ceriotti, Michele}, date = {2017-12}, journaltitle = {Science Advances}, - publisher = {{American Association for the Advancement of Science}}, + publisher = {American Association for the Advancement of Science}, doi = {10.1126/sciadv.1701816}, url = {https://www.science.org/doi/10.1126/sciadv.1701816}, urldate = {2022-10-03}, @@ -799,7 +913,7 @@ volume = {16}, number = {11}, pages = {1077--1088}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {1476-4660}, doi = {10.1038/nmat5017}, url = {https://www.nature.com/articles/nmat5017}, @@ -807,7 +921,11 @@ abstract = {The past decade has witnessed an explosion in the field of quantum materials, headlined by the predictions and discoveries of novel Landau-symmetry-broken phases in correlated electron systems, topological phases in systems with strong spin–orbit coupling, and ultra-manipulable materials platforms based on two-dimensional van der Waals crystals. Discovering pathways to experimentally realize quantum phases of matter and exert control over their properties is a central goal of modern condensed-matter physics, which holds promise for a new generation of electronic/photonic devices with currently inaccessible and likely unimaginable functionalities. In this Review, we describe emerging strategies for selectively perturbing microscopic interaction parameters, which can be used to transform materials into a desired quantum state. Particular emphasis will be placed on recent successes to tailor electronic interaction parameters through the application of intense fields, impulsive electromagnetic stimulation, and nanostructuring or interface engineering. Together these approaches outline a potential roadmap to an era of quantum phenomena on demand.}, issue = {11}, langid = {english}, - annotation = {Bandiera\_abtest: a Cg\_type: Nature Research Journals Primary\_atype: Reviews Subject\_term: Electronic properties and materials;Phase transitions and critical phenomena Subject\_term\_id: electronic-properties-and-materials;phase-transitions-and-critical-phenomena}, + annotation = {Bandiera\_abtest: a\\ +Cg\_type: Nature Research Journals\\ +Primary\_atype: Reviews\\ +Subject\_term: Electronic properties and materials;Phase transitions and critical phenomena\\ +Subject\_term\_id: electronic-properties-and-materials;phase-transitions-and-critical-phenomena}, file = {/Users/wasmer/Nextcloud/Zotero/Basov et al_2017_Towards properties on demand in quantum materials.pdf} } @@ -827,7 +945,7 @@ abstract = {We introduce equi-tuning, a novel fine-tuning method that transforms (potentially non-equivariant) pretrained models into group equivariant models while incurring minimum L\_2 loss between the feature representations of the pretrained and the equivariant models. Large pretrained models can be equi-tuned for different groups to satisfy the needs of various downstream tasks. Equi-tuned models benefit from both group equivariance as an inductive bias and semantic priors from pretrained models. We provide applications of equi-tuning on three different tasks: image classification, compositional generalization in language, and fairness in natural language generation (NLG). We also provide a novel group-theoretic definition for fairness in NLG. The effectiveness of this definition is shown by testing it against a standard empirical method of fairness in NLG. We provide experimental results for equi-tuning using a variety of pretrained models: Alexnet, Resnet, VGG, and Densenet for image classification; RNNs, GRUs, and LSTMs for compositional generalization; and GPT2 for fairness in NLG. We test these models on benchmark datasets across all considered tasks to show the generality and effectiveness of the proposed method.}, issue = {6}, langid = {english}, - keywords = {\_tablet,/unread,benchmarking,equivariant,fine-tuning,group theory,image classification,inductive bias,natural language generation,nlp,pretrained models,symmetry}, + keywords = {/unread,\_tablet,benchmarking,equivariant,fine-tuning,group theory,image classification,inductive bias,natural language generation,nlp,pretrained models,symmetry}, file = {/Users/wasmer/Nextcloud/Zotero/Basu et al_2023_Equi-Tuning.pdf} } @@ -838,7 +956,7 @@ eprint = {2205.06643}, eprinttype = {arxiv}, eprintclass = {cond-mat, physics:physics, stat}, - publisher = {{arXiv}}, + publisher = {arXiv}, doi = {10.48550/arXiv.2205.06643}, url = {http://arxiv.org/abs/2205.06643}, urldate = {2022-05-21}, @@ -863,6 +981,38 @@ file = {/Users/wasmer/Nextcloud/Zotero/Batatia et al_2023_Equivariant Matrix Function Neural Networks.pdf;/Users/wasmer/Zotero/storage/2D2JTAAK/2310.html} } +@online{batatiaFoundationModelAtomistic2023, + title = {A Foundation Model for Atomistic Materials Chemistry}, + author = {Batatia, Ilyes and Benner, Philipp and Chiang, Yuan and Elena, Alin M. and Kovács, Dávid P. and Riebesell, Janosh and Advincula, Xavier R. and Asta, Mark and Baldwin, William J. and Bernstein, Noam and Bhowmik, Arghya and Blau, Samuel M. and Cărare, Vlad and Darby, James P. and De, Sandip and Della Pia, Flaviano and Deringer, Volker L. and ElijoÅ¡ius, Rokas and El-Machachi, Zakariya and Fako, Edvin and Ferrari, Andrea C. and Genreith-Schriever, Annalena and George, Janine and Goodall, Rhys E. A. and Grey, Clare P. and Han, Shuang and Handley, Will and Heenen, Hendrik H. and Hermansson, Kersti and Holm, Christian and Jaafar, Jad and Hofmann, Stephan and Jakob, Konstantin S. and Jung, Hyunwook and Kapil, Venkat and Kaplan, Aaron D. and Karimitari, Nima and Kroupa, Namu and Kullgren, Jolla and Kuner, Matthew C. and Kuryla, Domantas and Liepuoniute, Guoda and Margraf, Johannes T. and Magdău, Ioan-Bogdan and Michaelides, Angelos and Moore, J. Harry and Naik, Aakash A. and Niblett, Samuel P. and Norwood, Sam Walton and O'Neill, Niamh and Ortner, Christoph and Persson, Kristin A. and Reuter, Karsten and Rosen, Andrew S. and Schaaf, Lars L. and Schran, Christoph and Sivonxay, Eric and Stenczel, Tamás K. and Svahn, Viktor and Sutton, Christopher and family=Oord, given=Cas, prefix=van der, useprefix=true and Varga-Umbrich, Eszter and Vegge, Tejs and Vondrák, Martin and Wang, Yangshuai and Witt, William C. and Zills, Fabian and Csányi, Gábor}, + date = {2023-12-29}, + eprint = {2401.00096}, + eprinttype = {arxiv}, + eprintclass = {cond-mat, physics:physics}, + doi = {10.48550/arXiv.2401.00096}, + url = {http://arxiv.org/abs/2401.00096}, + urldate = {2024-01-20}, + abstract = {Machine-learned force fields have transformed the atomistic modelling of materials by enabling simulations of ab initio quality on unprecedented time and length scales. However, they are currently limited by: (i) the significant computational and human effort that must go into development and validation of potentials for each particular system of interest; and (ii) a general lack of transferability from one chemical system to the next. Here, using the state-of-the-art MACE architecture we introduce a single general-purpose ML model, trained on a public database of 150k inorganic crystals, that is capable of running stable molecular dynamics on molecules and materials. We demonstrate the power of the MACE-MP-0 model -- and its qualitative and at times quantitative accuracy -- on a diverse set problems in the physical sciences, including the properties of solids, liquids, gases, and chemical reactions. The model can be applied out of the box and as a starting or "foundation model" for any atomistic system of interest and is thus a step towards democratising the revolution of ML force fields by lowering the barriers to entry.}, + pubstate = {preprint}, + keywords = {/unread,\_tablet,AML,benchmarking,foundation models,GNN,groundbreaking,MACE,MACE-MP-0,ML,MLP,MLP comparison,MPNN,original publication,PES,prediction of energy,pretrained models,SOTA,todo-tagging,universal potential,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Batatia et al_2023_A foundation model for atomistic materials chemistry.pdf;/Users/wasmer/Zotero/storage/M6QDTIVB/2401.html} +} + +@online{batatiaGeneralFrameworkEquivariant2023, + title = {A {{General Framework}} for {{Equivariant Neural Networks}} on {{Reductive Lie Groups}}}, + author = {Batatia, Ilyes and Geiger, Mario and Munoz, Jose and Smidt, Tess and Silberman, Lior and Ortner, Christoph}, + date = {2023-05-31}, + eprint = {2306.00091}, + eprinttype = {arxiv}, + eprintclass = {hep-th, stat}, + doi = {10.48550/arXiv.2306.00091}, + url = {http://arxiv.org/abs/2306.00091}, + urldate = {2023-12-18}, + abstract = {Reductive Lie Groups, such as the orthogonal groups, the Lorentz group, or the unitary groups, play essential roles across scientific fields as diverse as high energy physics, quantum mechanics, quantum chromodynamics, molecular dynamics, computer vision, and imaging. In this paper, we present a general Equivariant Neural Network architecture capable of respecting the symmetries of the finite-dimensional representations of any reductive Lie Group G. Our approach generalizes the successful ACE and MACE architectures for atomistic point clouds to any data equivariant to a reductive Lie group action. We also introduce the lie-nn software library, which provides all the necessary tools to develop and implement such general G-equivariant neural networks. It implements routines for the reduction of generic tensor products of representations into irreducible representations, making it easy to apply our architecture to a wide range of problems and groups. The generality and performance of our approach are demonstrated by applying it to the tasks of top quark decay tagging (Lorentz group) and shape recognition (orthogonal group).}, + pubstate = {preprint}, + keywords = {\_tablet,ACE,AML,cluster expansion,E(3),equivariant,General ML,geometric deep learning,GNN,library,Lie groups,MACE,ML,MPNN,symmetry,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Batatia et al_2023_A General Framework for Equivariant Neural Networks on Reductive Lie Groups.pdf;/Users/wasmer/Zotero/storage/6X2PAYFQ/2306.html} +} + @online{batatiaMACEHigherOrder2022, title = {{{MACE}}: {{Higher Order Equivariant Message Passing Neural Networks}} for {{Fast}} and {{Accurate Force Fields}}}, shorttitle = {{{MACE}}}, @@ -886,7 +1036,7 @@ date = {2020-11-09}, journaltitle = {Nature Reviews Materials}, pages = {1--24}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2058-8437}, doi = {10.1038/s41578-020-00255-y}, url = {https://www.nature.com/articles/s41578-020-00255-y}, @@ -906,7 +1056,7 @@ volume = {123}, number = {25}, pages = {15859--15866}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, issn = {1932-7447}, doi = {10.1021/acs.jpcc.9b03925}, url = {https://doi.org/10.1021/acs.jpcc.9b03925}, @@ -944,7 +1094,7 @@ volume = {5}, number = {8}, pages = {437--438}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2522-5820}, doi = {10.1038/s42254-023-00615-x}, url = {https://www.nature.com/articles/s42254-023-00615-x}, @@ -976,7 +1126,7 @@ author = {Bauer, David Siegfried Georg}, date = {2014}, number = {FZJ-2014-01052}, - institution = {{Forschungszentrum Jülich GmbH Zentralbibliothek, Verlag}}, + institution = {Forschungszentrum Jülich GmbH Zentralbibliothek, Verlag}, url = {http://hdl.handle.net/2128/5899}, urldate = {2022-08-12}, abstract = {This thesis is concerned with the quantum mechanical investigation of a novel class of magnetic phenomena in atomic- and nanoscale-sized systems deposited on surfaces or embedded in bulk materials that result from a competition between the exchange and the relativistic spin-orbit interactions. The thesis is motivated by the observation of novel spin-textures of one- and two-dimensional periodicity of nanoscale pitchlength exhibiting a unique winding sense observed in ultra-thin magnetic lms on nonmagnetic metallic substrates with a large spin-orbit interaction. The goal is to extend this eld to magnetic clusters and nano-structures of nite size in order to investigate in how far the size of the cluster and the atoms at the edge of the cluster or ribbon that are particular susceptible to relativistic eects change the balance betweendierent interactions and thus lead to new magnetic phenomena. As an example, the challenging problem of Fe nano-islands on Ir(111) is addressed in detail as for an Fe monolayer on Ir(111) a magnetic nanoskyrmion lattice was observed as magnetic structure.To achieve this goal a new rst-principles all-electron electronic structure code based on density functional theory was developed. The method of choice is the Korringa-Kohn-Rostoker (KKR) impurity Green function method, resorting on a multiple scattering approach. This method has been conceptually further advanced to combine the neglect of any shape approximation to the full potential, with the treatment ofnon-collinear magnetism, of the spin-orbit interaction, as well as of the structural relaxation together with the perfect embedding of a nite size magnetic cluster of atoms into a surface or a bulk environment. For this purpose the formalism makes use of an expansion of the Green function involving explicitly left- and right-hand side scattering solutions. Relativistic eects are treated via the scalar-relativistic approximation and a spin-orbit coupling term treated self-consistently. This required the development of a new algorithm to solve the relativistic quantum mechanical scattering problem for a single atom with a non-spherical potential formulated in terms of the Lippmann-Schwinger integral equation. Prior to the investigation of the Fe nano-islands, the magnetic structure of an Fe monolayer is studied using atomistic spin-dynamics on the basis of a classical model Hamiltonian, which uses realistic coupling parameters obtained from rst principles. It is shown that this method is capable to nd the experimentally determined magnetic structure. [...] Bauer, David Siegfried Georg}, @@ -996,7 +1146,7 @@ volume = {157}, number = {23}, pages = {234102}, - publisher = {{American Institute of Physics}}, + publisher = {American Institute of Physics}, issn = {0021-9606}, doi = {10.1063/5.0128996}, url = {https://aip.scitation.org/doi/10.1063/5.0128996}, @@ -1015,7 +1165,7 @@ volume = {134}, number = {7}, pages = {074106}, - publisher = {{American Institute of Physics}}, + publisher = {American Institute of Physics}, issn = {0021-9606}, doi = {10.1063/1.3553717}, url = {https://aip.scitation.org/doi/full/10.1063/1.3553717}, @@ -1050,7 +1200,7 @@ date = {2021-03-29}, journaltitle = {Chemical Reviews}, shortjournal = {Chem. Rev.}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, issn = {0009-2665}, doi = {10.1021/acs.chemrev.0c00868}, url = {https://doi.org/10.1021/acs.chemrev.0c00868}, @@ -1083,7 +1233,7 @@ volume = {145}, number = {17}, pages = {170901}, - publisher = {{American Institute of Physics}}, + publisher = {American Institute of Physics}, issn = {0021-9606}, doi = {10.1063/1.4966192}, url = {https://aip.scitation.org/doi/full/10.1063/1.4966192}, @@ -1122,7 +1272,7 @@ volume = {6}, number = {6}, pages = {428--442}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2397-3358}, doi = {10.1038/s41570-022-00391-9}, url = {https://www.nature.com/articles/s41570-022-00391-9}, @@ -1160,7 +1310,7 @@ volume = {102}, number = {23}, pages = {235130}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevB.102.235130}, url = {https://link.aps.org/doi/10.1103/PhysRevB.102.235130}, urldate = {2021-10-20}, @@ -1178,7 +1328,7 @@ volume = {106}, number = {12}, pages = {L121116}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevB.106.L121116}, url = {https://link.aps.org/doi/10.1103/PhysRevB.106.L121116}, urldate = {2022-09-28}, @@ -1187,6 +1337,24 @@ file = {/Users/wasmer/Nextcloud/Zotero/Ben Mahmoud et al_2022_Predicting hot-electron free energies from ground-state data.pdf;/Users/wasmer/Zotero/storage/6U9PWZG6/Ben Mahmoud et al. - 2022 - Predicting hot-electron free energies from ground-.pdf;/Users/wasmer/Zotero/storage/5YSTIB2N/PhysRevB.106.html} } +@book{berciouxTopologicalMatterLectures2018, + title = {Topological {{Matter}}: {{Lectures}} from the {{Topological Matter School}} 2017}, + shorttitle = {Topological {{Matter}}}, + editor = {Bercioux, Dario and Cayssol, Jérôme and Vergniory, Maia G. and Reyes Calvo, M.}, + date = {2018}, + series = {Springer {{Series}} in {{Solid-State Sciences}}}, + volume = {190}, + publisher = {Springer International Publishing}, + location = {Cham}, + doi = {10.1007/978-3-319-76388-0}, + url = {http://link.springer.com/10.1007/978-3-319-76388-0}, + urldate = {2024-02-03}, + isbn = {978-3-319-76387-3 978-3-319-76388-0}, + langid = {english}, + keywords = {condensed matter,educational,Ferromagnetism,Hall effect,Hall QSHE,heterostructures,Majorana,non-Abelian,online book,physics,school,superconductor,textbook,topological,topological insulator,Topological Superconductor}, + file = {/Users/wasmer/Nextcloud/Zotero/Bercioux et al_2018_Topological Matter.pdf} +} + @unpublished{bernerModernMathematicsDeep2021, title = {The {{Modern Mathematics}} of {{Deep Learning}}}, author = {Berner, Julius and Grohs, Philipp and Kutyniok, Gitta and Petersen, Philipp}, @@ -1209,7 +1377,7 @@ volume = {603}, number = {7899}, pages = {41--51}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {1476-4687}, doi = {10.1038/s41586-021-04105-x}, url = {https://www.nature.com/articles/s41586-021-04105-x}, @@ -1236,7 +1404,7 @@ urldate = {2023-06-14}, abstract = {In this article, we provide an overview of the basic concepts of novel topological materials. This new class of materials developed by combining the Weyl/Dirac fermionic electron states and magnetism, provide a materials-science platform to test predictions of the laws of topological physics. Owing to their dissipationless transport, these materials hold high promises for technological applications in quantum computing and spintronics devices.}, langid = {english}, - keywords = {\_tablet,/unread,ARPES,Berry phase,breaking of TRS,Fermi arc,Hall effect,Hall QHE,semimetal,TKNN,topological insulator,TRS}, + keywords = {/unread,\_tablet,ARPES,Berry phase,breaking of TRS,Fermi arc,Hall effect,Hall QHE,semimetal,TKNN,topological insulator,TRS}, file = {/Users/wasmer/Nextcloud/Zotero/Bhardwaj_Chatterjee_2020_Topological Materials.pdf} } @@ -1249,7 +1417,7 @@ volume = {157}, number = {23}, pages = {234101}, - publisher = {{American Institute of Physics}}, + publisher = {American Institute of Physics}, issn = {0021-9606}, doi = {10.1063/5.0124363}, url = {https://aip.scitation.org/doi/10.1063/5.0124363}, @@ -1276,6 +1444,52 @@ file = {/Users/wasmer/Nextcloud/Zotero/Bigi et al_2023_Wigner kernels.pdf;/Users/wasmer/Zotero/storage/LERSCPN4/2303.html} } +@article{bihaniEGraFFBenchEvaluationEquivariant2024, + title = {{{EGraFFBench}}: Evaluation of Equivariant Graph Neural Network Force Fields for Atomistic Simulations}, + shorttitle = {{{EGraFFBench}}}, + author = {Bihani, Vaibhav and Mannan, Sajid and Pratiush, Utkarsh and Du, Tao and Chen, Zhimin and Miret, Santiago and Micoulaut, Matthieu and M.~Smedskjaer, Morten and Ranu, Sayan and Anoop~Krishnan, N. M.}, + date = {2024-03-04}, + journaltitle = {Digital Discovery}, + volume = {3}, + number = {4}, + pages = {759--768}, + publisher = {Royal Society of Chemistry}, + doi = {10.1039/D4DD00027G}, + url = {https://pubs.rsc.org/en/content/articlelanding/2024/dd/d4dd00027g}, + urldate = {2024-05-08}, + langid = {english}, + keywords = {Allegro,AML,benchmarking,BOTNet,Equiformer,MACE,materials,ML,MLP,MLP comparison,molecules,NequIP,TorchMDNet,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Bihani et al_2024_EGraFFBench.pdf} +} + +@book{bishopDeepLearningFoundations2024, + title = {Deep {{Learning}}: {{Foundations}} and {{Concepts}}}, + shorttitle = {Deep {{Learning}}}, + author = {Bishop, Christopher M. and Bishop, Hugh}, + date = {2024}, + publisher = {Springer International Publishing}, + location = {Cham}, + doi = {10.1007/978-3-031-45468-4}, + url = {https://www.bishopbook.com/}, + urldate = {2023-12-17}, + isbn = {978-3-031-45467-7 978-3-031-45468-4}, + langid = {english}, + keywords = {/unread,autoencoder,Deep learning,deep learning theory,educational,GAN,General ML,GNN,learning material,ML,ML theory,MPNN,neural network,online book,textbook,transformer,VAE} +} + +@book{bishopPatternRecognitionMachine2006, + title = {Pattern Recognition and Machine Learning}, + author = {Bishop, Christopher M.}, + date = {2006}, + series = {Information Science and Statistics}, + publisher = {Springer}, + location = {New York}, + url = {https://www.microsoft.com/en-us/research/uploads/prod/2006/01/Bishop-Pattern-Recognition-and-Machine-Learning-2006.pdf}, + isbn = {978-0-387-31073-2}, + pagetotal = {738}, + keywords = {/unread,educational,General ML,learning material,ML,ML theory,online book,textbook} +} + @online{bishopPlenaryFifthParadigm2022, type = {Video}, title = {Plenary: {{The}} Fifth Paradigm of Scientific Discovery}, @@ -1286,7 +1500,7 @@ urldate = {2023-01-16}, abstract = {Chris Bishop, technical fellow and director of Microsoft Research AI4Science joins colleagues and collaborators across Microsoft Research to discuss how deep learning is set to have a transformational impact on the sciences – including potential applications for drug discovery and materials design. Learn more about the 2022 Microsoft Research Summit […]}, langid = {american}, - organization = {{Microsoft Research Summit 2022}}, + organization = {Microsoft Research Summit 2022}, keywords = {/unread,AML,emulator,fifth paradigm,general ML,geometric deep learning,Microsoft Research,ML,surrogate model}, file = {/Users/wasmer/Zotero/storage/IJ8MX5EV/plenary-the-fifth-paradigm-of-scientific-discovery.html} } @@ -1301,6 +1515,45 @@ keywords = {/unread,AML,literature analysis,ML,popular science} } +@article{blaiszikDataEcosystemSupport2019, + title = {A Data Ecosystem to Support Machine Learning in Materials Science}, + author = {Blaiszik, Ben and Ward, Logan and Schwarting, Marcus and Gaff, Jonathon and Chard, Ryan and Pike, Daniel and Chard, Kyle and Foster, Ian}, + date = {2019-12-01}, + journaltitle = {MRS Communications}, + shortjournal = {MRS Communications}, + volume = {9}, + number = {4}, + pages = {1125--1133}, + issn = {2159-6867}, + doi = {10.1557/mrc.2019.118}, + url = {https://doi.org/10.1557/mrc.2019.118}, + urldate = {2023-12-07}, + abstract = {Facilitating the application of machine learning (ML) to materials science problems requires enhancing the data ecosystem to enable discovery and collection of data from many sources, automated dissemination of new data across the ecosystem, and the connecting of data with materials-specific ML models. Here, we present two projects, the Materials Data Facility (MDF) and the Data and Learning Hub for Science (DLHub), that address these needs. We use examples to show how MDF and DLHub capabilities can be leveraged to link data with ML models and how users can access those capabilities through web and programmatic interfaces.}, + langid = {english}, + keywords = {/unread,AML,Database,failed platform,failure,materials,materials database,materials informatics,ML,ML model database}, + file = {/Users/wasmer/Nextcloud/Zotero/Blaiszik et al_2019_A data ecosystem to support machine learning in materials science.pdf} +} + +@article{blaiszikMaterialsDataFacility2016, + title = {The {{Materials Data Facility}}: {{Data Services}} to {{Advance Materials Science Research}}}, + shorttitle = {The {{Materials Data Facility}}}, + author = {Blaiszik, B. and Chard, K. and Pruyne, J. and Ananthakrishnan, R. and Tuecke, S. and Foster, I.}, + date = {2016-08-01}, + journaltitle = {JOM}, + shortjournal = {JOM}, + volume = {68}, + number = {8}, + pages = {2045--2052}, + issn = {1543-1851}, + doi = {10.1007/s11837-016-2001-3}, + url = {https://doi.org/10.1007/s11837-016-2001-3}, + urldate = {2023-12-07}, + abstract = {With increasingly strict data management requirements from funding agencies and institutions, expanding focus on the challenges of research replicability, and growing data sizes and heterogeneity, new data needs are emerging in the materials community. The materials data facility (MDF) operates two cloud-hosted services, data publication and data discovery, with features to promote open data sharing, self-service data publication and curation, and encourage data reuse, layered with powerful data discovery tools. The data publication service simplifies the process of copying data to a secure storage location, assigning data a citable persistent identifier, and recording custom (e.g., material, technique, or instrument specific) and automatically-extracted metadata in a registry while the data discovery service will provide advanced search capabilities (e.g., faceting, free text range querying, and full text search) against the registered data and metadata. The MDF services empower individual researchers, research projects, and institutions to (I) publish research datasets, regardless of size, from local storage, institutional data stores, or cloud storage, without involvement of third-party publishers; (II) build, share, and enforce extensible domain-specific custom metadata schemas; (III) interact with published data and metadata via representational state transfer (REST) application program interfaces (APIs) to facilitate automation, analysis, and feedback; and (IV) access a data discovery model that allows researchers to search, interrogate, and eventually build on existing published data. We describe MDF’s design, current status, and future plans.}, + langid = {english}, + keywords = {/unread,Database,materials,materials database,materials informatics,with-data}, + file = {/Users/wasmer/Nextcloud/Zotero/Blaiszik et al_2016_The Materials Data Facility.pdf} +} + @inproceedings{blanchardComputationalWorkflowAccelerated2022, title = {Computational {{Workflow}} for~{{Accelerated Molecular Design Using Quantum Chemical Simulations}} and~{{Deep Learning Models}}}, booktitle = {Accelerating {{Science}} and {{Engineering Discoveries Through Integrated Research Infrastructure}} for {{Experiment}}, {{Big Data}}, {{Modeling}} and {{Simulation}}}, @@ -1309,8 +1562,8 @@ date = {2022}, series = {Communications in {{Computer}} and {{Information Science}}}, pages = {3--19}, - publisher = {{Springer Nature Switzerland}}, - location = {{Cham}}, + publisher = {Springer Nature Switzerland}, + location = {Cham}, doi = {10.1007/978-3-031-23606-8_1}, abstract = {Efficient methods for searching the chemical space of molecular compounds are needed to automate and accelerate the design of new functional molecules such as pharmaceuticals. Given the high cost in both resources and time for experimental efforts, computational approaches play a key role in guiding the selection of promising molecules for further investigation. Here, we construct a workflow to accelerate design by combining approximate quantum chemical methods [i.e. density-functional tight-binding (DFTB)], a graph convolutional neural network (GCNN) surrogate model for chemical property prediction, and a masked language model (MLM) for molecule generation. Property data from the DFTB calculations are used to train the surrogate model; the surrogate model is used to score candidates generated by the MLM. The surrogate reduces computation time by orders of magnitude compared to the DFTB calculations, enabling an increased search of chemical space. Furthermore, the MLM generates a diverse set of chemical modifications based on pre-training from a large compound library. We utilize the workflow to search for near-infrared photoactive molecules by minimizing the predicted HOMO-LUMO gap as the target property. Our results show that the workflow can generate optimized molecules outside of the original training set, which suggests that iterations of the workflow could be useful for searching vast chemical spaces in a wide range of design problems.}, isbn = {978-3-031-23606-8}, @@ -1328,11 +1581,12 @@ volume = {103}, number = {10}, pages = {4129--4137}, - publisher = {{American Institute of Physics}}, + publisher = {American Institute of Physics}, issn = {0021-9606}, doi = {10.1063/1.469597}, url = {https://aip.scitation.org/doi/10.1063/1.469597}, urldate = {2021-10-22}, + keywords = {AML,groundbreaking,HDNNP,history of AI,MD,ML,MLP,original publication}, file = {/Users/wasmer/Nextcloud/Zotero/Blank et al_1995_Neural network models of potential energy surfaces.pdf} } @@ -1342,7 +1596,7 @@ author = {Blügel, S. and Müller-Krumbhaar, H. and Spatschek, R. and Koch, E. and Gompper, G. and Winkler, R. G.}, date = {2006}, number = {PreJuSER-56047}, - institution = {{Forschungszentrum, Zentralbibliothek}}, + institution = {Forschungszentrum, Zentralbibliothek}, url = {http://hdl.handle.net/2128/2396}, urldate = {2021-12-12}, abstract = {Blügel, S.; Gompper, G.; Koch, E.; Müller-Krumbhaar, H.; Spatschek, R.; Winkler, R. G.}, @@ -1357,7 +1611,7 @@ author = {Blügel, S.}, date = {2006}, number = {PreJuSER-51316}, - institution = {{Theorie I}}, + institution = {Theorie I}, url = {https://juser.fz-juelich.de/record/51316}, urldate = {2021-12-12}, abstract = {Blügel, S.}, @@ -1374,8 +1628,8 @@ date = {2017}, series = {Schriften Des {{Forschungszentrums Jülich Reihe Schlüsseltechnologien}} / {{Key Technologies}}}, number = {139}, - publisher = {{Forschungszentrum Jülich GmbH Zentralbibliothek, Verlag}}, - location = {{Jülich}}, + publisher = {Forschungszentrum Jülich GmbH Zentralbibliothek, Verlag}, + location = {Jülich}, url = {https://juser.fz-juelich.de/record/830530}, keywords = {/unread,collinear,DFT,FZJ,IFF,IFF spring school,learning material,magnetism,non-collinear,PGI-1/IAS-1,tutorial} } @@ -1386,8 +1640,8 @@ date = {2017}, series = {Schriften Des {{Forschungszentrums Jülich}}. {{Reihe Schlüsseltechnologien}} / {{Key Technologies}}}, number = {139}, - publisher = {{Forschungszentrum Jülich GmbH Zentralbibliothek, Verlag}}, - location = {{Jülich}}, + publisher = {Forschungszentrum Jülich GmbH Zentralbibliothek, Verlag}, + location = {Jülich}, url = {http://hdl.handle.net/2128/22133}, abstract = {Condensed matter physics is currently undergoing a revolution through the introduction of concepts arising from topology that are used to characterize physical states, fields and properties from a completely different perspective. With the introduction of topology, the perspective is changed from describing complex systems in terms of local order parameters to a characterization by global quantities, which are measured nonlocally and which endow the systems with a global stability to perturbations. Prominent examples are topological insulators, skyrmions and Majorana fermions. Since topology translates into quantization, and topological order to entanglement, this ongoing revolution has impact on fields like mathematics, materials science, nanoelectronics and quantum information resulting in new device concepts enabling computations without dissipation of energy or enabling the possibility of realizing platforms for topological quantum computation, and ultimately reaching out into applications. Thus, these new exciting scientific developments and their applications are closely related to the grand challenges in information and communication technology and energy saving. Topology is the branch of mathematics that deals with properties of spaces that are invariant under smooth deformations. It provides newly appreciated mathematical tools in condensed matter physics that are currently revolutionizing the field of quantum matter and materials. Topology dictates that if two different Hamiltonians can be smoothly deformed into each other they give rise to many common physical properties and their states are homotopy invariant. Thus, topological invariance, which is often protected by discrete symmetries, provides some robustness that translates into the quantization of properties; such a robust quantization motivates the search and discovery of new topological matter. So far, the mainstream of modern topological condensed matter physics relies on two profoundly different scenarios: the emergence of the complex topology either in real space, as manifested e.g. in non-trivial magnetic structures or in momentum space, finding its realization in such materials as topological and Chern insulators. The latter renowned class of solids attracted considerable attention in recent years owing to its fascinating properties of spin-momentum locking, emergence of topologically protected surface/edge states governed by Dirac physics, as well as the quantization of Hall conductance and the discovery of the quantum spin Hall effect. Historically, the discovery of topological insulators gave rise to the discovery of a whole plethora of topologically non-trivial materials such asWeyl semimetals or topological superconductors, relevant in the context of the realization of Majorana fermions and topological quantum computation. [...]}, eventtitle = {Lecture {{Notes}} of the 48th {{IFF Spring School}} 2017}, @@ -1417,7 +1671,7 @@ author = {Blum, Avrim and Hopcroft, John and Kannan, Ravi}, date = {2020-01-31}, edition = {1}, - publisher = {{Cambridge University Press}}, + publisher = {Cambridge University Press}, doi = {10.1017/9781108755528}, url = {https://www.cambridge.org/core/product/identifier/9781108755528/type/book}, urldate = {2021-05-04}, @@ -1430,8 +1684,8 @@ title = {Magnetism in Condensed Matter}, author = {Blundell, Stephen}, date = {2001}, - publisher = {{Oxford University Press}}, - location = {{Oxford; New York}}, + publisher = {Oxford University Press}, + location = {Oxford; New York}, url = {http://public.eblib.com/choice/publicfullrecord.aspx?p=4963266}, urldate = {2022-06-18}, abstract = {An understanding of the quantum mechanical nature of magnetism has led to the development of new magnetic materials which are used as permanent magnets, sensors, and information storage. Behind these practical applications lie a range of fundamental ideas, including symmetry breaking, order parameters, excitations, frustration, and reduced dimensionality. This superb new textbook presents a logical account of these ideas, staring from basic concepts in electromagnetsim and quantum mechanics. It outlines the origin of magnetic moments in atoms and how these moments can be affected by their local environment inside a crystal. The different types of interactions which can be present between magnetic moments are described. The final chapters of the book are devoted to the magnetic properties of metals, and to the complex behaviour which can occur when competing magnetic interactions are present and/or the system has a reduced dimensionality. Throughout the text, the theoretical principles are applied to real systems. There is substantial discussion of experimental techniques and current research topics.; The book is copiously illustrated and contains detailed appendices which cover the fundamental principles.}, @@ -1442,33 +1696,38 @@ file = {/Users/wasmer/Nextcloud/Zotero/Blundell_2001_Magnetism in condensed matter.pdf} } +@online{bochkarevAtomicClusterExpansion2023, + title = {Atomic {{Cluster Expansion}} for Semilocal Interactions beyond Equivariant Message Passing}, + author = {Bochkarev, Anton and Lysogorskiy, Yury and Drautz, Ralf}, + date = {2023-11-27}, + eprint = {2311.16326}, + eprinttype = {arxiv}, + eprintclass = {cond-mat}, + doi = {10.48550/arXiv.2311.16326}, + url = {http://arxiv.org/abs/2311.16326}, + urldate = {2023-12-18}, + abstract = {We extend the basis functions of the Atomic Cluster Expansion to graphs. This naturally leads to a representation that enables us to describe semilocal interactions in physiscally and chemically transparent form. Simplifications of the graph Atomic Cluster Expansion recover the currently most accurate message-passing representations of atomic interactions. We demonstrate the accuracy and efficiency of our expansion for a number of small molecules, clusters and a general-purpose model for carbon.}, + pubstate = {preprint}, + keywords = {\_tablet,ACE,AML,carbon,clusters,descriptors,equivariant,GNN,graph ACE,ML,MLP,MPNN,semilocal interactions,smal organic molecules,symmetry}, + file = {/Users/wasmer/Nextcloud/Zotero/Bochkarev et al_2023_Atomic Cluster Expansion for semilocal interactions beyond equivariant message.pdf;/Users/wasmer/Zotero/storage/UMGXF4LV/2311.html} +} + @article{bochkarevEfficientParametrizationAtomic2022, title = {Efficient Parametrization of the Atomic Cluster Expansion}, - author = {Bochkarev, Anton}, - date = {2022}, + author = {Bochkarev, Anton and Lysogorskiy, Yury and Menon, Sarath and Qamar, Minaam and Mrovec, Matous and Drautz, Ralf}, + date = {2022-01-24}, journaltitle = {Physical Review Materials}, - shortjournal = {Phys. Rev. Materials}, + shortjournal = {Phys. Rev. Mater.}, volume = {6}, number = {1}, + pages = {013804}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevMaterials.6.013804}, - keywords = {\_tablet,ACE,descriptors,library,ML,pacemaker,with-code}, - file = {/Users/wasmer/Nextcloud/Zotero/Bochkarev_2022_Efficient parametrization of the atomic cluster expansion.pdf;/Users/wasmer/Zotero/storage/LLPTMRGA/PhysRevMaterials.6.html} -} - -@unpublished{bochkarevMultilayerAtomicCluster2022, - title = {Multilayer Atomic Cluster Expansion for Semi-Local Interactions}, - author = {Bochkarev, Anton and Lysogorskiy, Yury and Ortner, Christoph and Csányi, Gábor and Drautz, Ralf}, - date = {2022-05-17}, - eprint = {2205.08177}, - eprinttype = {arxiv}, - eprintclass = {cond-mat}, - publisher = {{arXiv}}, - doi = {10.48550/arXiv.2205.08177}, - url = {http://arxiv.org/abs/2205.08177}, - urldate = {2022-05-21}, - abstract = {Traditionally, interatomic potentials assume local bond formation supplemented by long-range electrostatic interactions when necessary. This ignores intermediate range multi-atom interactions that arise from the relaxation of the electronic structure. Here, we present the multilayer atomic cluster expansion (ml-ACE) that includes collective, semi-local multi-atom interactions naturally within its remit. We demonstrate that ml-ACE significantly improves fit accuracy compared to a local expansion on selected examples and provide physical intuition to understand this improvement.}, - keywords = {\_tablet,ACE,descriptors,ML,ml-ACE}, - file = {/Users/wasmer/Nextcloud/Zotero/Bochkarev et al_2022_Multilayer atomic cluster expansion for semi-local interactions.pdf;/Users/wasmer/Zotero/storage/NQ2MH8V7/2205.html} + url = {https://link.aps.org/doi/10.1103/PhysRevMaterials.6.013804}, + urldate = {2023-12-08}, + abstract = {The atomic cluster expansion (ACE) provides a general, local, and complete representation of atomic energies. Here we present an efficient framework for parametrization of ACE models for elements, alloys, and molecules. To this end, we first introduce general requirements for a physically meaningful description of the atomic interaction, in addition to the usual equivariance requirements. We then demonstrate that ACE can be converged systematically with respect to two fundamental characteristics—the number and complexity of basis functions and the choice of nonlinear representation. The construction of ACE parametrizations is illustrated for several representative examples with different bond chemistries, including metallic copper, covalent carbon, and several multicomponent molecular and alloy systems. We discuss the Pareto front of optimal force to energy matching contributions in the loss function, the influence of regularization, the importance of consistent and reliable reference data, and the necessity of unbiased validation. Our ACE parametrization strategy is implemented in the freely available software package pacemaker that enables largely automated and GPU accelerated training. The resulting ACE models are shown to be superior or comparable to the best currently available ML potentials and can be readily used in large-scale atomistic simulations.}, + keywords = {\_tablet,ACDC,ACE,AML,descriptors,library,ML,pacemaker,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Bochkarev et al_2022_Efficient parametrization of the atomic cluster expansion.pdf;/Users/wasmer/Zotero/storage/4TU3DH2J/PhysRevMaterials.6.html} } @online{bochkarevMultilayerAtomicCluster2022a, @@ -1483,7 +1742,7 @@ urldate = {2022-09-29}, abstract = {Traditionally, interatomic potentials assume local bond formation supplemented by long-range electrostatic interactions when necessary. This ignores intermediate range multi-atom interactions that arise from the relaxation of the electronic structure. Here, we present the multilayer atomic cluster expansion (ml-ACE) that includes collective, semi-local multi-atom interactions naturally within its remit. We demonstrate that ml-ACE significantly improves fit accuracy compared to a local expansion on selected examples and provide physical intuition to understand this improvement.}, pubstate = {preprint}, - keywords = {ACE,descriptors,ML,MLP,multilayer-ACE,semilocal interactions}, + keywords = {ACE,descriptors,ML,ml-ACE,MLP,multilayer-ACE,semilocal interactions}, file = {/Users/wasmer/Nextcloud/Zotero/Bochkarev et al_2022_Multilayer atomic cluster expansion for semi-local interactions2.pdf;/Users/wasmer/Zotero/storage/ZVU3IARD/2205.html} } @@ -1495,7 +1754,7 @@ volume = {14}, number = {8}, pages = {1883}, - publisher = {{Multidisciplinary Digital Publishing Institute}}, + publisher = {Multidisciplinary Digital Publishing Institute}, issn = {1996-1944}, doi = {10.3390/ma14081883}, url = {https://www.mdpi.com/1996-1944/14/8/1883}, @@ -1512,7 +1771,7 @@ shorttitle = {Topological {{Deep Learning}}}, author = {Bodnar, Cristian}, date = {2023-06-20T12:04:30Z}, - institution = {{University of Cambridge}}, + institution = {University of Cambridge}, url = {https://www.repository.cam.ac.uk/handle/1810/350982}, urldate = {2023-06-22}, abstract = {The types of spaces where data resides - graphs, meshes, grids, manifolds - are becoming increasingly varied and heterogeneous. Therefore, translating ideas, models, and theoretical results between different domains is becoming more and more challenging. Nonetheless, two fundamental principles unite all these settings. The first states that data is localised, meaning that data is associated with some regions of the underlying space. The second says that data is relational, and this relational structure reflects how the various regions of the space overlap. It is natural to formalise these axioms using algebraic topology. The "space'' in question is a topological space - a set with a neighbourhood structure - and the data attached to its neighbourhoods are algebraic objects like vector spaces. Since graphs, manifolds and everything in between is a topological space, we adopt this mathematical viewpoint to smoothly transition between domains, improve our theoretical understanding of existent models and design new ones, including for spaces that are yet to be explored in Machine Learning. Guided by this perspective, this work introduces Topological Deep Learning, a research programme studying (deep) models performing inference on data glued to a topological space. This thesis includes four research works expanding upon the directions outlined above. The first work proposes Message Passing Simplicial Networks (MPSNs), a family of models operating on simplicial complexes, a higher-dimensional generalisation of graphs coming from algebraic topology. We study the symmetries these models must satisfy, the topological invariants that describe their behaviour, and how they can learn representations based on discrete differential forms. The second work takes this generalisation further to cell complexes, a class of spaces that also subsume simplicial complexes. We show their additional flexibility benefits molecular applications, where the resulting models outperform prior art on molecular property prediction tasks. The third work proposes a general topological framework for constructing graph coarsening (aka pooling) operators in a way that naturally generalises existing pooling approaches in computer vision. We show that this framework can be used to construct graph-based hierarchical models and visualise attributed graphs. Finally, the last work introduces a new perspective on graph models based on sheaf theory, a subfield of algebraic topology. Sheaves, which are mathematical data structures that naturally store the data attached to a topological space and its relational structure, faithfully realise the axiomatic principles of Topological Deep Learning. We show that sheaf structures on graphs are intimately connected with the asymptotic behaviour of message passing graph models and exploit these connections to design new sheaf-based convolutional architectures. We demonstrate that these models can cope with the challenges of oversmoothing and heterophilic graphs, which affect many existent graph models. Overall, this thesis introduces a novel topological perspective on deep learning for structured data, whose ramifications establish many new connections with algebraic topology.}, @@ -1546,7 +1805,7 @@ volume = {11}, number = {1}, pages = {5223}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2041-1723}, doi = {10.1038/s41467-020-19093-1}, url = {https://www.nature.com/articles/s41467-020-19093-1}, @@ -1555,7 +1814,12 @@ issue = {1}, langid = {english}, keywords = {\_tablet,2-step model,AML,CC,CCSD(T),coupled cluster,Delta,delta learning,DFT,HK map,KKR,ML,ML-DFA,ML-DFT,ML-ESM,ML-HK map,molecules,multi-step model,prediction of electron density,with-code,Δ-machine learning}, - annotation = {Bandiera\_abtest: a Cc\_license\_type: cc\_by Cg\_type: Nature Research Journals Primary\_atype: Research Subject\_term: Computational chemistry;Computational science Subject\_term\_id: computational-chemistry;computational-science}, + annotation = {Bandiera\_abtest: a\\ +Cc\_license\_type: cc\_by\\ +Cg\_type: Nature Research Journals\\ +Primary\_atype: Research\\ +Subject\_term: Computational chemistry;Computational science\\ +Subject\_term\_id: computational-chemistry;computational-science}, file = {/home/johannes/Nextcloud/Zotero/false;/Users/wasmer/Nextcloud/Zotero/Bogojeski et al_2020_Quantum chemical accuracy from density functional approximations via machine.pdf} } @@ -1615,7 +1879,7 @@ author = {Bornemann, Marcel}, date = {2019}, number = {FZJ-2019-02271}, - institution = {{Forschungszentrum Jülich GmbH Zentralbibliothek, Verlag}}, + institution = {Forschungszentrum Jülich GmbH Zentralbibliothek, Verlag}, url = {http://hdl.handle.net/2128/22015}, urldate = {2022-08-12}, abstract = {The large-scale Density Functional Theory (DFT) code KKRnano allows one to perform \$\textbackslash textit\{ab initio\}\$ simulations for thousands of atoms. In this thesis an extension of KKRnano is presented and utilized which facilitates the investigation of exotic non-collinear magnetic textures in bulk materials on huge length scales. Such an undertakinginevitably involves the utilization of High Performance Computing (HPC) which is itself a scientific field. The work in this context includes the adaptation of new coding paradigms and the optimization of codes on constantly changing hardware architectures. In KKRnano, the runtime of a simulation scales linearly with the number of atoms due to an advanced Korringa-Kohn-Rostoker (KKR) scheme that is applied, in which the sparsity of the matrices in the multiple-scattering equations is exploited. This enables us to investigate phenomena that occur on a length scale of nanometers involving thousands of atoms.The main purpose of this thesis was to generalize the KKR formalism in KKRnano in such a way that a non-collinear alignment of the atomic spins can be treated. In addition to this, the relativistic coupling of spin and orbital degrees of freedom, which arises from the Dirac equation, was introduced to the code. This coupling gives rise to the Dzyaloshinskii-Moriya interaction (DMI) from which the formation of non-collinear magnetic textures usually originates. Other methodological features that were added to KKRnano or were re-established in the context of this thesis are the Generalized Gradient Approximation (GGA), Lloyd’s formula and a semi-core energy contour integration. GGA is known to be a better approximation to the exchange-correlation energy in DFT than the still very popular Local Density Approximation (LDA), Lloyd’s formula allows to determine the charge density exactly, despite the angular momentum expansion of all quantities, and the semi-core energy contour integration facilitates the treatment of high-lying electronic core states. Furthermore, an experimental port of the multiple-scattering solver routine to Graphics Processing Unit (GPU) architectures is discussed and the large-scale capabilities of KKR nano are demonstrated by benchmark calculations on the supercomputer JUQUEEN that include more than 200.000 atoms. The new version of KKRnano is used to investigate the magnetic B20 compounds B20-MnGe and B20-FeGe as well as alloys of B20-Mn\$\_\{1−x\}\$Fe\$\_\{x\}\$Ge type with varied concentration of Mn and Ge. These compounds are well-known for exhibiting helicalstates. Recently reported observations of topologically protected magnetic particles, also known as skyrmions, make them promising candidates for future spintronic devices. Initially, the known pressure-induced transition from a high-spin to a low-spin state in B20-MnGe is reproduced with KKRnano and an examination of the magnetocrystalline anisotropy yields unexpected results. [...] Bornemann, Marcel}, @@ -1661,7 +1925,7 @@ author = {Bouaziz, Juba}, date = {2019}, number = {FZJ-2019-05254}, - institution = {{Forschungszentrum Jülich GmbH Zentralbibliothek, Verlag}}, + institution = {Forschungszentrum Jülich GmbH Zentralbibliothek, Verlag}, url = {http://hdl.handle.net/2128/23183}, urldate = {2022-08-12}, abstract = {This thesis provides a theoretical description of magnetic nanostructures in inversion-asymmetric environments with strong spin-orbit interaction (SOI). The theoretical concepts introduced here can be applied in the field of spin-orbitronics, which consists ofexploiting the SOI to manipulate the electron spin without external magnetic fields. The investigated systems display a plethora of interesting phenomena ranging from chiral magnetic interactions to gapped magnetic excitations. In practice, we adopt two different approaches: First, a model-based one relying on the Rashba Hamiltonian, which is employed to demystify and understand magnetic and transport properties of magnetic nanostructures embedded in a Rashba electron gas. Second, we use a first-principles approach within the framework of the Korringa-Kohn-Rostoker (KKR) Green function method to investigate the ground state properties of magnetic impurities in topologically insulating hosts. This method is suitable to simulate nanostructures in real space. Then, we employed our newly developed code based on time-dependent density functional theory to compute the spin excitation spectra of these magnetic nanostructures embedded in topological insulators. Moreover, the KKR Green function method was used to simulate the electronic structure and ground state properties of large magnetic nanostructures, namely magnetic Skyrmions. In the first part, the analytical Rashba Green function and the scattering matrices modeling the magnetic impurities in the s-wave approximation are employed for the computation of the magnetic interaction tensor which contains: isotropic exchange, Dzyaloshinskii-Moriya (DM) and pseudo-dipolar interactions. The competition between these interactions leads to a rich phase diagram depending on the distance between the magnetic impurities. Next, we consider an external perturbing electric field and investigate the transport properties by computing the residual resistivity tensor within linear response theory. The contribution of SOI is explored. The investigation of arbitrary orientations of the impurity magnetic moment allowed a detailed analysis of contributions from the anisotropic magnetoresistance and planar Hall effect. Moreover, we calculate the impurity induced bound currents in the Rashba electron gas, which are used to compute the induced orbital magnetization. For a trimer of impurities with a non-vanishing spin chirality (SC) a finite orbital magnetization is observed when SOI is turned off. Since it emerges from the SC, it was named chiral orbital magnetization. [...] Bouaziz, Juba}, @@ -1671,6 +1935,18 @@ file = {/Users/wasmer/Nextcloud/Zotero/Bouaziz_2019_Spin-orbitronics at the nanoscale.pdf;/Users/wasmer/Zotero/storage/YM28TKHA/865993.html} } +@book{boydConvexOptimization2004, + title = {Convex Optimization}, + author = {Boyd, Stephen P. and Vandenberghe, Lieven}, + date = {2004}, + publisher = {Cambridge University Press}, + location = {Cambridge, UK ; New York}, + url = {https://stanford.edu/~boyd/cvxbook/}, + isbn = {978-0-521-83378-3}, + pagetotal = {716}, + keywords = {/unread,convex optimization,educational,General ML,learning material,mathematics,ML,ML theory,numerical analysis,online book,optimization,textbook} +} + @article{brackTenSimpleRules2022, title = {Ten Simple Rules for Making a Software Tool Workflow-Ready}, author = {Brack, Paul and Crowther, Peter and Soiland-Reyes, Stian and Owen, Stuart and Lowe, Douglas and Williams, Alan R. and Groom, Quentin and Dillen, Mathias and Coppens, Frederik and Grüning, Björn and Eguinoa, Ignacio and Ewels, Philip and Goble, Carole}, @@ -1680,7 +1956,7 @@ volume = {18}, number = {3}, pages = {e1009823}, - publisher = {{Public Library of Science}}, + publisher = {Public Library of Science}, issn = {1553-7358}, doi = {10.1371/journal.pcbi.1009823}, url = {https://journals.plos.org/ploscompbiol/article?id=10.1371/journal.pcbi.1009823}, @@ -1714,7 +1990,7 @@ eprint = {2202.07643}, eprinttype = {arxiv}, eprintclass = {cs}, - publisher = {{arXiv}}, + publisher = {arXiv}, doi = {10.48550/arXiv.2202.07643}, url = {http://arxiv.org/abs/2202.07643}, urldate = {2022-06-09}, @@ -1732,7 +2008,7 @@ volume = {20}, number = {1}, pages = {8}, - publisher = {{Ubiquity Press}}, + publisher = {Ubiquity Press}, issn = {1683-1470}, doi = {10.5334/dsj-2021-008}, url = {http://datascience.codata.org/articles/10.5334/dsj-2021-008/}, @@ -1788,7 +2064,7 @@ volume = {33}, number = {40}, pages = {404002}, - publisher = {{IOP Publishing}}, + publisher = {IOP Publishing}, issn = {0953-8984}, doi = {10.1088/1361-648X/ac1280}, url = {https://dx.doi.org/10.1088/1361-648X/ac1280}, @@ -1808,7 +2084,7 @@ volume = {8}, number = {1}, pages = {872}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2041-1723}, doi = {10.1038/s41467-017-00839-3}, url = {https://www.nature.com/articles/s41467-017-00839-3}, @@ -1820,26 +2096,46 @@ file = {/Users/wasmer/Nextcloud/Zotero/Brockherde et al_2017_Bypassing the Kohn-Sham equations with machine learning.pdf;/Users/wasmer/Zotero/storage/8X4ALINZ/s41467-017-00839-3.html} } -@book{broderHighthroughputAllElectronDensity2021, - title = {High-Throughput {{All-Electron Density Functional Theory Simulations}} for a {{Data-driven Chemical Interpretation}} of {{X-ray Photoelectron Spectra}}}, - author = {Bröder, Jens}, +@thesis{broderHighthroughputAllelectronDensity2021, + title = {High-Throughput All-Electron Density Functional Theory Simulations for a Data-Driven Chemical Interpretation of {{X-ray}} Photoelectron Spectra}, + author = {Bröder, Jens and Mazzarello, Riccardo and Linsmeier, Christian and Blügel, Stefan}, date = {2021}, - series = {Schriften Des {{Forschungszentrums Jülich}}. {{Reihe Schlüsseltechnologien}} / {{Key Technologies}}}, - number = {229}, - publisher = {{Forschungszentrum Jülich GmbH Zentralbibliothek, Verlag}}, - location = {{Jülich}}, - abstract = {Enabling computer-driven materials design to find and create materials with advanced propertiesfromthe enormous haystack of material phase space is a worthy goal for humanity. Most high-technologies, for example in the energy or health sector, strongly depend on advanced tailored materials. Since conventional research and screening of materials is rather slow and expensive, being able to determine material properties on the computer poses a paradigm shift. For the calculation of properties for pure materials on the nano scale ab initio methods based on the theory of quantum mechanics are well established. Density Functional Theory(DFT) is such a widely applied method from first principles with high predictive power. To screen through larger sets of atomic configurations physical property calculation processes need to be robust and automated. Automation is achieved through the deployment of advanced frameworks which manage many workflows while tracking the provenance of data and calculations. Through workflows, which are essential property calculator procedures, a high-level automation environment is achievable and accumulated knowledge can be reused by others. Workflows can be complex and include multiple programs solving problems over several physical length scales. In this work, the open source all-electron DFT program FLEUR implementing the highly accurate Full-potential Linearized Augmented Plane Wave (FLAPW) method is connected and deployed through the open source Automated Interactive Infrastructure and Database for Computational Science (AiiDA) framework to achieve automation. AiiDA is a Python framework which is capable of provenance tracking millions of high-through put simulations and their data. Basic and advanced workflows are implemented in an open source Pythonpackage AiiDA-FLEUR, especially to calculate properties for the chemical analysis of X-rayphotoemission spectra. These workflows are applied on a wide range of materials, in particular on most known metallic binary compounds. The chemical-phase composition and other material properties of a surface region can be understood through the careful chemical analysis of high-resolution X-ray photoemission spectra. The spectra evaluation process is improved through the development of a fittingmethod driven by data from ab initio simulations. For complex multi-phase spectra this proposedevaluation process is expected to have advantages over the widely applied conventional methods. The spectra evaluation process is successfully deployed on well-behaved spectra of materials relevant for the inner wall (blanket and divertor) plasma-facing components of a nuclear fusion reactor. In particular, the binary beryllium systems Be-Ti, Be-Wand Be-Ta are investigated. Furthermore, different approaches to calculate spectral properties like chemical shifts and binding energies are studied and benchmarked against the experimental literature and data from the NIST X-ray photoelectron spectroscopy database}, - isbn = {978-3-95806-526-0}, + number = {RWTH-2021-03326}, + institution = {Forschungszentrum Jülich GmbH, Zentralbibliothek, Verlag}, + url = {https://publications.rwth-aachen.de/record/816617}, + urldate = {2024-05-08}, + abstract = {Viele Hochtechnologien, wie die Kernfusion sind stark auf maßgeschneiderte hochspezialisierte Materialien angewiesen. Die Ermöglichung von computergestüzter Materialentwicklung ist somit ein lohnenswertes Ziel der Menschheit, um aus dem riesigen Heuhaufen des Materialphasenraumes High-tech Materialien mit gewollten Eigenschaften zu designen. Für reine Materialien auf kleinen Lägenskalen sind etablierte ab initio Methoden, welche auf der Theorie der Quantenmechanik basieren, wie die Dichtefunktionaltheorie (DFT) der Stand der Technik, um Materialeigenschaften mit Hilfe des Computers zu bestimmen, bevor diese Materialien im Labor langsam und kostenintensiv überprüft werden. Für computergestützte Materialentwicklung müssen Prozesse zur Berechnung von physikalischen Eigenschaften robust und automatisiert werden, um Berechnungen an größeren Mengen von Kristallstrukturkonfigurationen durchführen zu können. Die Automatisierung wird durch den Einsatz hochentwickelter Frameworks erreicht, welche die Herkunft von Daten und Berechnungen verfolgen und verwalten. Durch sogenannte Workflows, welche Protokolle zur physikalischen Eigenschaftsberechnung darstellen, wird ein hohes Maß an Automatisierung erreicht und Expertenwissen kann in diesen konserviert und von anderen wiederverwendet werden. In dieser Arbeit wurde das Open-Source DFT-Programm FLEUR für die anstehenden Aufgaben ausgewählt, welches alle Elektronen mithilfe der leistungsfähigen, hochpräzisen Linearized Augmentierte Plane Wave (FLAPW) behandelt. Der FLEUR-Program wird an das Open-Source Automated Interactive Infrastructure und Datenbank für Computational Science (AiiDA) Framework angebunden, um eine hohe Automatisierung mit FLEUR erreichen zu können. AiiDA ist ein Python-Framework, das Millionen an Hochdurchsatzsimulationen und ihre Daten in einer Datenbank nachverfolgen und verwalten kann. Fundamentale und fortgeschrittene Workflows wurden in einem Open-Source Python-Paket (AiiDA-FLEUR) implementiert, um insbesondere Eigenschaften für die chemische Analyse von Röntgenphotoemissionsspektren zu berechnen. Diese Workflows wurden auf eine Vielzahl von Materialien angewendet, insbesondere auf die meisten bekannten, metallischen, binären Verbindungen. Die genaue Phasenzusammensetzung und andere Eigenschaften eines oberflächennahen Materials können durch die sorgfältige chemische Analyse von hochauflösenden Röntgenphotoemissionsspektren verstanden werden. In dieser Arbeit wird der Spektrenauswertungsprozess basierend auf ab initio Ergebnissen durch die Entwicklung einer Anpassungsmethode für vorerst einfache, Mehrphasenspektren verbessert. Dieses XPS-Auswertungsverfahren mit ab initio Daten wurde erfolgreich auf Spektren von Materialien angewendet, die für die Wandkomponenten eines Kernfusionsreaktors relevant sind, insbesondere für die Berylliumverbindungen (Be-Ti, Be-W, Be-Ta). Weitere Ansätze zur Berechnung der Spektren-Eigenschaften wie chemische Verschiebungen und Bindungsenergien wurden untersucht und mit der experimentellen Literatur, insbesondere der NIST Datenbank für Röntgen-Photoelektronenspektroskopie verglichen. Enabling computer-driven materials design to find and create materials with advanced properties from the enormous haystack of material phase space is a worthy goal for humanity. Most high-technologies, for example in the energy or health sector, strongly depend on advanced tailored materials. Since conventional research and screening of materials is rather slow and expensive, being able to determine material properties on the computer poses a paradigm shift. For the calculation of properties for pure materials on the nano scale ab initio methods based on the theory of quantum mechanics are well established. Density Functional Theory (DFT) is such a widely applied method from first principles with high predictive power. To screen through larger sets of atomic configurations physical property calculation processes need to be robust and automated. Automation is achieved through the deployment of advanced frameworks which manage many workflows while tracking the provenance of data and calculations. Through workflows, which are essential property calculator procedures, a high-level automation environment is achievable and accumulated knowledge can be reused by others. Workflows can be complex and include multiple programs solving problems over several physical length scales. In this work, the open source all-electron DFT program FLEUR implementing the highly accurate Full-potential Linearized Augmented Plane Wave (FLAPW) method is connected and deployed through the open source Automated Interactive Infrastructure and Database for Computational Science (AiiDA) framework to achieve automation. AiiDA is a Python framework which is capable of provenance tracking millions of high-throughput simulations and their data. Basic and advanced workflows are implemented in an open source Python package AiiDA-FLEUR, especially to calculate properties for the chemical analysis of X-ray photoemission spectra. These workflows are applied on a wide range of materials, in particular on most known metallic binary compounds. The chemical-phase composition and other material properties of a surface region can be understood through the careful chemical analysis of high-resolution X-ray photoemission spectra. The spectra evaluation process is improved through the development of a fitting method driven by data from ab initio simulations. For complex multi-phase spectra this proposed evaluation process is expected to have advantages over the widely applied conventional methods. The spectra evaluation process is successfully deployed on well-behaved spectra of materials relevant for the inner wall (blanket and divertor) plasma-facing components of a nuclear fusion reactor. In particular, the binary beryllium systems Be-Ti, Be-W and Be-Ta are investigated. Furthermore, different approaches to calculate spectral properties like chemical shifts and binding energies are studied and benchmarked against the experimental literature and data from the NIST X-ray photoelectron spectroscopy database. Bröder, Jens; Blügel, Stefan; Mazzarello, Riccardo; Linsmeier, Christian}, + langid = {english}, + keywords = {AiiDA,AiiDA-FLEUR,DFT,FLEUR,high-throughput computing,PGI-1/IAS-1,thesis}, + file = {/Users/wasmer/Nextcloud/Zotero/Bröder et al_2021_High-throughput all-electron density functional theory simulations for a.pdf;/Users/wasmer/Zotero/storage/Y3KDN6CL/816617.html} +} + +@article{broeckerMachineLearningQuantum2017, + title = {Machine Learning Quantum Phases of Matter beyond the Fermion Sign Problem}, + author = {Broecker, Peter and Carrasquilla, Juan and Melko, Roger G. and Trebst, Simon}, + date = {2017-08-18}, + journaltitle = {Scientific Reports}, + shortjournal = {Sci Rep}, + volume = {7}, + number = {1}, + pages = {8823}, + publisher = {Nature Publishing Group}, + issn = {2045-2322}, + doi = {10.1038/s41598-017-09098-0}, + url = {https://www.nature.com/articles/s41598-017-09098-0}, + urldate = {2024-02-28}, + abstract = {State-of-the-art machine learning techniques promise to become a powerful tool in statistical mechanics via their capacity to distinguish different phases of matter in an automated way. Here we demonstrate that convolutional neural networks (CNN) can be optimized for quantum many-fermion systems such that they correctly identify and locate quantum phase transitions in such systems. Using auxiliary-field quantum Monte Carlo (QMC) simulations to sample the many-fermion system, we show that the Green’s function holds sufficient information to allow for the distinction of different fermionic phases via a CNN. We demonstrate that this QMC + machine learning approach works even for systems exhibiting a severe fermion sign problem where conventional approaches to extract information from the Green’s function, e.g. in the form of equal-time correlation functions, fail.}, + issue = {1}, langid = {english}, - pagetotal = {169}, - keywords = {/unread,AiiDA,AiiDA-FLEUR,DFT,FLEUR,high-throughput computing,PGI-1/IAS-1,thesis} + keywords = {/unread,ML,phase transition,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Broecker et al_2017_Machine learning quantum phases of matter beyond the fermion sign problem.pdf} } @report{broholmBasicResearchNeeds2016, title = {Basic {{Research Needs Workshop}} on {{Quantum Materials}} for {{Energy Relevant Technology}}}, author = {Broholm, Collin and Fisher, Ian and Moore, Joel and Murnane, Margaret and Moreo, Adriana and Tranquada, John and Basov, Dimitri and Freericks, Jim and Aronson, Meigan and MacDonald, Allan and Fradkin, Eduardo and Yacoby, Amir and Samarth, Nitin and Stemmer, Susanne and Horton, Linda and Horwitz, Jim and Davenport, Jim and Graf, Matthias and Krause, Jeff and Pechan, Mick and Perry, Kelly and Rhyne, Jim and Schwartz, Andy and Thiyagarajan, Thiyaga and Yarris, Lynn and Runkles, Katie}, date = {2016-02-10}, - institution = {{USDOE Office of Science (SC) (United States)}}, + institution = {USDOE Office of Science (SC) (United States)}, doi = {10.2172/1616509}, url = {https://www.osti.gov/biblio/1616509}, urldate = {2023-06-28}, @@ -1873,7 +2169,7 @@ volume = {466}, number = {7304}, pages = {310--311}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {1476-4687}, doi = {10.1038/466310a}, url = {https://www.nature.com/articles/466310a}, @@ -1885,6 +2181,25 @@ file = {/Users/wasmer/Nextcloud/Zotero/Brumfiel_2010_Topological insulators.pdf;/Users/wasmer/Zotero/storage/3FD5BH4T/466310a.html} } +@article{bubeckConvexOptimizationAlgorithms2015, + title = {Convex {{Optimization}}: {{Algorithms}} and {{Complexity}}}, + shorttitle = {Convex {{Optimization}}}, + author = {Bubeck, Sébastien}, + date = {2015}, + journaltitle = {Foundations and Trends® in Machine Learning}, + shortjournal = {FNT in Machine Learning}, + volume = {8}, + number = {3-4}, + pages = {231--357}, + issn = {1935-8237, 1935-8245}, + doi = {10.1561/2200000050}, + url = {http://www.nowpublishers.com/article/Details/MAL-050}, + urldate = {2023-11-24}, + langid = {english}, + keywords = {/unread,convex optimization,educational,General ML,learning material,mathematics,ML,ML theory,numerical analysis,online book,optimization,textbook}, + file = {/Users/wasmer/Nextcloud/Zotero/Bubeck_2015_Convex Optimization.pdf} +} + @article{burkeDFTNutshell2013, title = {{{DFT}} in a Nutshell}, author = {Burke, Kieron and Wagner, Lucas O.}, @@ -1929,7 +2244,7 @@ volume = {3}, number = {1}, pages = {015012}, - publisher = {{IOP Publishing}}, + publisher = {IOP Publishing}, issn = {2632-2153}, doi = {10.1088/2632-2153/ac3eb3}, url = {https://dx.doi.org/10.1088/2632-2153/ac3eb3}, @@ -1948,7 +2263,7 @@ volume = {559}, number = {7715}, pages = {547--555}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {1476-4687}, doi = {10.1038/s41586-018-0337-2}, url = {https://www.nature.com/articles/s41586-018-0337-2}, @@ -1970,7 +2285,7 @@ volume = {18}, number = {4}, pages = {2180--2192}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, issn = {1549-9618}, doi = {10.1021/acs.jctc.1c00904}, url = {https://doi.org/10.1021/acs.jctc.1c00904}, @@ -1996,6 +2311,25 @@ file = {/Users/wasmer/Zotero/storage/FUN9D5UI/Bystrom and Kozinsky - 2023 - Nonlocal Machine-Learned Exchange Functional for M.pdf;/Users/wasmer/Zotero/storage/9YGJRHDG/2303.html} } +@article{caiSelfadaptiveFirstprinciplesApproach2023, + title = {A Self-Adaptive First-Principles Approach for Magnetic Excited States}, + author = {Cai, Zefeng and Wang, Ke and Xu, Yong and Wei, Su-Huai and Xu, Ben}, + date = {2023-12-04}, + journaltitle = {Quantum Frontiers}, + shortjournal = {Quantum Front}, + volume = {2}, + number = {1}, + pages = {21}, + issn = {2731-6106}, + doi = {10.1007/s44214-023-00041-1}, + url = {https://doi.org/10.1007/s44214-023-00041-1}, + urldate = {2024-05-08}, + abstract = {The profound impact of excited magnetic states on the intricate interplay between electron and lattice behaviors in magnetic materials is a topic of great interest. Unfortunately, despite the significant strides that have been made in first-principles methods, accurately tracking these phenomena remains a challenging and elusive task. The crux of the challenge that lies before us is centered on the intricate task of characterizing the magnetic configuration of an excited state, utilizing a first-principle approach that is firmly rooted in the ground state of the system. We propose a versatile self-adaptive spin-constrained density functional theory formalism. By iteratively optimizing the constraining field alongside the electron wave function during energy minimization, we are able to obtain an accurate potential energy surface that captures the longitudinal and transverse variations of magnetization in itinerant ferromagnetic Fe. Moreover, this technique allows us to identify the subtle coupling between magnetic moments and other degrees of freedom by tracking energy variation, providing new insights into the intricate interplay between magnetic interactions, electronic band structure, and phonon dispersion curves in single-layered \$\textbackslash mathrm\{CrI\} \_\{3\}\$. This new methodology represents a significant breakthrough in our ability to probe the complex and multifaceted properties of magnetic systems.}, + langid = {english}, + keywords = {/unread,2D material,constrained DFT,DFT,Ferromagnetism,Heisenberg model,itinerant magnetism,Jij,Landau-Lifshits-Gilbert equation,magnetism,spin dynamics,spin-dependent,VASP,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Cai et al_2023_A self-adaptive first-principles approach for magnetic excited states.pdf} +} + @article{calderonAFLOWStandardHighthroughput2015, title = {The {{AFLOW}} Standard for High-Throughput Materials Science Calculations}, author = {Calderon, Camilo E. and Plata, Jose J. and Toher, Cormac and Oses, Corey and Levy, Ohad and Fornari, Marco and Natan, Amir and Mehl, Michael J. and Hart, Gus and Buongiorno Nardelli, Marco and Curtarolo, Stefano}, @@ -2018,7 +2352,7 @@ title = {Characterization of Defects, Modulations and Surface Layers in Topological Insulators and Structurally Related Compounds}, author = {Callaert, Carolien}, date = {2020}, - institution = {{University of Antwerpen}}, + institution = {University of Antwerpen}, url = {https://repository.uantwerpen.be/desktop/irua}, urldate = {2023-06-14}, keywords = {magnetic doping,thesis,topological insulator}, @@ -2046,8 +2380,8 @@ shorttitle = {Density {{Functional Theory}}}, editor = {Cancès, Eric and Friesecke, Gero}, date = {2023}, - publisher = {{Springer International Publishing}}, - location = {{Cham}}, + publisher = {Springer International Publishing}, + location = {Cham}, doi = {10.1007/978-3-031-22340-2}, url = {https://link.springer.com/10.1007/978-3-031-22340-2}, urldate = {2023-09-24}, @@ -2086,7 +2420,7 @@ volume = {88}, number = {6}, pages = {062505}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevA.88.062505}, url = {https://link.aps.org/doi/10.1103/PhysRevA.88.062505}, urldate = {2022-07-08}, @@ -2105,7 +2439,7 @@ volume = {4}, number = {3}, pages = {034204}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevMaterials.4.034204}, url = {https://link.aps.org/doi/10.1103/PhysRevMaterials.4.034204}, urldate = {2021-05-21}, @@ -2123,7 +2457,7 @@ volume = {7}, number = {2}, pages = {69--70}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2397-3358}, doi = {10.1038/s41570-022-00454-x}, url = {https://www.nature.com/articles/s41570-022-00454-x}, @@ -2194,7 +2528,7 @@ volume = {91}, number = {4}, doi = {10.1103/RevModPhys.91.045002}, - keywords = {Many-body theory,ML,review,science}, + keywords = {AML,Many-body theory,ML,ML-DFT,ML-QMBP,Neuromorphic,particle physics,prediction of electron density,QML,quantum computing,review,review-of-AML,science,unconventional computing}, file = {/Users/wasmer/Nextcloud/Zotero/Carleo_2019_Machine learning and the physical sciences.pdf;/Users/wasmer/Zotero/storage/9YE6JEBD/RevModPhys.91.html} } @@ -2206,11 +2540,11 @@ volume = {355}, number = {6325}, pages = {602--606}, - publisher = {{American Association for the Advancement of Science}}, + publisher = {American Association for the Advancement of Science}, doi = {10.1126/science.aag2302}, url = {https://www.science.org/doi/10.1126/science.aag2302}, urldate = {2022-03-29}, - keywords = {ML,ML-QM,NN,rec-by-bluegel}, + keywords = {AML,ML,ML-QMBP,NN,rec-by-bluegel}, file = {/Users/wasmer/Nextcloud/Zotero/Carleo_Troyer_2017_Solving the quantum many-body problem with artificial neural networks.pdf} } @@ -2223,7 +2557,7 @@ volume = {100}, number = {2}, pages = {024112}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevB.100.024112}, url = {https://link.aps.org/doi/10.1103/PhysRevB.100.024112}, urldate = {2021-05-13}, @@ -2232,12 +2566,69 @@ file = {/Users/wasmer/Nextcloud/Zotero/Caro_2019_Optimizing many-body atomic descriptors for enhanced computational performance.pdf;/Users/wasmer/Zotero/storage/FDHHHJTR/PhysRevB.100.html} } +@article{carrasquillaHowUseNeural2021, + title = {How {{To Use Neural Networks To Investigate Quantum Many-Body Physics}}}, + author = {Carrasquilla, Juan and Torlai, Giacomo}, + date = {2021-11-12}, + journaltitle = {PRX Quantum}, + shortjournal = {PRX Quantum}, + volume = {2}, + number = {4}, + pages = {040201}, + publisher = {American Physical Society}, + doi = {10.1103/PRXQuantum.2.040201}, + url = {https://link.aps.org/doi/10.1103/PRXQuantum.2.040201}, + urldate = {2024-02-28}, + abstract = {Over the past few years, machine learning has emerged as a powerful computational tool to tackle complex problems in a broad range of scientific disciplines. In particular, artificial neural networks have been successfully used to mitigate the exponential complexity often encountered in quantum many-body physics, the study of properties of quantum systems built from a large number of interacting particles. In this article, we review some applications of neural networks in condensed matter physics and quantum information, with particular emphasis on hands-on tutorials serving as a quick start for a newcomer to the field. The prerequisites of this tutorial are basic probability theory and calculus, linear algebra, basic notions of neural networks, statistical physics, and quantum mechanics. The reader is introduced to supervised machine learning with convolutional neural networks to learn a phase transition, unsupervised learning with restricted Boltzmann machines to perform quantum tomography, and the variational Monte Carlo method with recurrent neural networks for approximating the ground state of a many-body Hamiltonian. For each algorithm, we briefly review the key ingredients and their corresponding neural-network implementation, and show numerical experiments for a system of interacting Rydberg atoms in two dimensions.}, + keywords = {ML,ML-QMBP,phase transition,RNN,TODO,todo-tagging,tutorial,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Carrasquilla_Torlai_2021_How To Use Neural Networks To Investigate Quantum Many-Body Physics.pdf;/Users/wasmer/Zotero/storage/TF25Y68Q/PRXQuantum.2.html} +} + +@article{carrasquillaMachineLearningPhases2017, + title = {Machine Learning Phases of Matter}, + author = {Carrasquilla, Juan and Melko, Roger G.}, + date = {2017-05}, + journaltitle = {Nature Physics}, + shortjournal = {Nature Phys}, + volume = {13}, + number = {5}, + pages = {431--434}, + publisher = {Nature Publishing Group}, + issn = {1745-2481}, + doi = {10.1038/nphys4035}, + url = {https://www.nature.com/articles/nphys4035}, + urldate = {2024-02-28}, + abstract = {The success of machine learning techniques in handling big data sets proves ideal for classifying condensed-matter phases and phase transitions. The technique is even amenable to detecting non-trivial states lacking in conventional order.}, + issue = {5}, + langid = {english}, + keywords = {Ising,ML,ML-QMBP,NQS,phase transition,symmetry breaking,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Carrasquilla_Melko_2017_Machine learning phases of matter.pdf} +} + +@article{carrasquillaMachineLearningQuantum2020, + title = {Machine Learning for Quantum Matter}, + author = {Carrasquilla, Juan}, + date = {2020-01-01}, + journaltitle = {Advances in Physics: X}, + volume = {5}, + number = {1}, + pages = {1797528}, + publisher = {Taylor \& Francis}, + issn = {null}, + doi = {10.1080/23746149.2020.1797528}, + url = {https://doi.org/10.1080/23746149.2020.1797528}, + urldate = {2024-02-28}, + abstract = {Quantum matter, the research field studying phases of matter whose properties are intrinsically quantum mechanical, draws from areas as diverse as hard condensed matter physics, materials science, statistical mechanics, quantum information, quantum gravity, and large-scale numerical simulations. Recently, researchers interested in quantum matter and strongly correlated quantum systems have turned their attention to the algorithms underlying modern machine learning with an eye on making progress in their fields. Here we provide a short review on the recent development and adaptation of machine learning ideas for the purpose advancing research in quantum matter, including ideas ranging from algorithms that recognize conventional and topological states of matter in synthetic experimental data, to representations of quantum states in terms of neural networks and their applications to the simulation and control of quantum systems. We discuss the outlook for future developments in areas at the intersection between machine learning and quantum many-body physics.}, + keywords = {/unread,ML,review,strongly correlated,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Carrasquilla_2020_Machine learning for quantum matter.pdf} +} + @report{carterAdvancedResearchDirections2023, title = {Advanced {{Research Directions}} on {{AI}} for {{Science}}, {{Energy}}, and {{Security}}}, author = {Carter, Jonathan and Feddema, John and Kothe, Doug and Neely, Rob and Pruet, Jason and Stevens, Rick}, date = {2023-06-12}, number = {ANL-22/91}, - institution = {{DOE Office of Science}}, + institution = {DOE Office of Science}, url = {https://www.anl.gov/ai-for-science-report}, urldate = {2023-06-28}, abstract = {Over the past decade, fundamental changes in artificial intelligence (AI) have delivered dramatic insights across a wide breadth of U.S. Department of Energy (DOE) mission space. AI is helping to augment and improve scientific and engineering workflows in national security, the Office of Science, and DOE’s applied energy programs. The progress and potential for AI in DOE science was captured in the 2020 \hspace{0pt}“AI for Science†report. In the short interim, the scale and scope of AI have accelerated, revealing new, emergent properties that yield insights that go beyond enabling opportunities to being potentially transformative in the way that scientific problems are posed and solved. These AI advances also highlight the crucial importance of responsible development of AI, focusing on challenges relating to AI technology (e.g., explainability, validation, security and privacy), implementation (e.g., transparency, safety engineering, ethics), and application (e.g., AI-Human interactions, education, and employment impacts). Under the guidance of both the Office of Science (SC) and the National Nuclear Security Administration (NNSA), the DOE national laboratories organized a series of workshops in 2022 to gather input on new and rapidly emerging opportunities and challenges of scientific AI. This 2023 report is a synthesis of those workshops. The report shows how unique DOE capabilities can enable the community to drive progress in scientific use of AI, building on DOE strengths and investments in computation, data, and communications infrastructure. This report lays out a vision for DOE to leverage and expand new capabilities in AI to accelerate the progress, and deepen the quality of mission areas spanning science, energy, and security. The vision and blueprint align precisely with the pressing need for scientific grounding in areas such as bias, transparency, explainability, security, validation, and the impact of AI on jobs. While dramatic progress being made in AI by industry and defense in the U.S. and other nations, the associated objectives and incentives only partially align with DOE’s mission. This progress also reflects the migration of AI and computer science talent to industry, creating a workforce disruption that DOE must address with urgency. Nevertheless, DOE’s investments in exascale systems, infrastructure, software, theory, and applications—combined with unique, multidisciplinary co-design approaches scaled to thousands of experts—uniquely position the DOE complex to address the challenges of responsible AI and to extend its global leadership in science, energy, and security.}, @@ -2255,7 +2646,7 @@ volume = {97}, number = {11}, pages = {115453}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevB.97.115453}, url = {https://link.aps.org/doi/10.1103/PhysRevB.97.115453}, urldate = {2021-05-21}, @@ -2277,7 +2668,7 @@ urldate = {2023-10-05}, abstract = {Density functional theory (DFT) stands as a cornerstone method in computational quantum chemistry and materials science due to its remarkable versatility and scalability. Yet, it suffers from limitations in accuracy, particularly when dealing with strongly correlated systems. To address these shortcomings, recent work has begun to explore how machine learning can expand the capabilities of DFT; an endeavor with many open questions and technical challenges. In this work, we present Grad DFT: a fully differentiable JAX-based DFT library, enabling quick prototyping and experimentation with machine learning-enhanced exchange-correlation energy functionals. Grad DFT employs a pioneering parametrization of exchange-correlation functionals constructed using a weighted sum of energy densities, where the weights are determined using neural networks. Moreover, Grad DFT encompasses a comprehensive suite of auxiliary functions, notably featuring a just-in-time compilable and fully differentiable self-consistent iterative procedure. To support training and benchmarking efforts, we additionally compile a curated dataset of experimental dissociation energies of dimers, half of which contain transition metal atoms characterized by strong electronic correlations. The software library is tested against experimental results to study the generalization capabilities of a neural functional across potential energy surfaces and atomic species, as well as the effect of training data noise on the resulting model accuracy.}, pubstate = {preprint}, - keywords = {\_tablet,/unread,AML,autodiff,DM21,JAX,library,ML,ML-DFA,ML-DFT,ML-ESM,prediction of Exc,transition metals,with-code}, + keywords = {/unread,\_tablet,AML,autodiff,DM21,JAX,library,ML,ML-DFA,ML-DFT,ML-ESM,prediction of Exc,transition metals,with-code}, file = {/Users/wasmer/Nextcloud/Zotero/Casares et al_2023_Grad DFT.pdf;/Users/wasmer/Zotero/storage/EZ4L7B7D/2309.html} } @@ -2291,7 +2682,7 @@ volume = {121}, number = {5}, pages = {2777--2779}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, issn = {0009-2665}, doi = {10.1021/acs.chemrev.0c01322}, url = {https://doi.org/10.1021/acs.chemrev.0c01322}, @@ -2308,7 +2699,7 @@ volume = {154}, number = {16}, pages = {160401}, - publisher = {{American Institute of Physics}}, + publisher = {American Institute of Physics}, issn = {0021-9606}, doi = {10.1063/5.0051418}, url = {https://aip.scitation.org/doi/10.1063/5.0051418}, @@ -2346,7 +2737,7 @@ eprint = {21730167}, eprinttype = {pmid}, pages = {13023--13028}, - publisher = {{National Academy of Sciences}}, + publisher = {National Academy of Sciences}, issn = {0027-8424, 1091-6490}, doi = {10.1073/pnas.1108486108}, url = {https://www.pnas.org/content/108/32/13023}, @@ -2365,7 +2756,7 @@ volume = {150}, number = {15}, pages = {150901}, - publisher = {{AIP Publishing LLCAIP Publishing}}, + publisher = {AIP Publishing LLCAIP Publishing}, issn = {0021-9606}, doi = {10.1063/1.5091842}, url = {https://aip.scitation.org/doi/abs/10.1063/1.5091842}, @@ -2401,7 +2792,7 @@ volume = {2}, number = {3}, pages = {035038}, - publisher = {{IOP Publishing}}, + publisher = {IOP Publishing}, issn = {2632-2153}, doi = {10.1088/2632-2153/abfe7c}, url = {https://doi.org/10.1088/2632-2153/abfe7c}, @@ -2418,7 +2809,7 @@ date = {2021-10-06}, url = {https://openreview.net/forum?id=WE4qe9xlnQw}, urldate = {2023-06-30}, - abstract = {Equivariance is becoming an increasingly popular design choice to build data efficient neural networks by exploiting prior knowledge about the symmetries of the problem at hand. Euclidean steerable CNNs are one of the most common classes of equivariant networks. While the constraints these architectures need to satisfy are understood, existing approaches are tailored to specific (classes of) groups. No generally applicable method that is practical for implementation has been described so far. In this work, we generalize the Wigner-Eckart theorem proposed in Lang \& Weiler (2020), which characterizes general \$G\$-steerable kernel spaces for compact groups \$G\$ over their homogeneous spaces, to arbitrary \$G\$-spaces. This enables us to directly parameterize filters in terms of a band-limited basis on the whole space rather than on \$G\$'s orbits, but also to easily implement steerable CNNs equivariant to a large number of groups. To demonstrate its generality, we instantiate our method on a variety of isometry groups acting on the Euclidean space \$\textbackslash mathbb\{R\}\^3\$. Our framework allows us to build \$E(3)\$ and \$SE(3)\$-steerable CNNs like previous works, but also CNNs with arbitrary \$G\textbackslash leq O(3)\$-steerable kernels. For example, we build 3D CNNs equivariant to the symmetries of platonic solids or choose \$G=SO(2)\$ when working with 3D data having only azimuthal symmetries. We compare these models on 3D shapes and molecular datasets, observing improved performance by matching the model's symmetries to the ones of the data.}, + abstract = {Equivariance is becoming an increasingly popular design choice to build data efficient neural networks by exploiting prior knowledge about the symmetries of the problem at hand. Euclidean steerable CNNs are one of the most common classes of equivariant networks. While the constraints these architectures need to satisfy are understood, existing approaches are tailored to specific (classes of) groups. No generally applicable method that is practical for implementation has been described so far. In this work, we generalize the Wigner-Eckart theorem proposed in Lang \& Weiler (2020), which characterizes general \$G\$-steerable kernel spaces for compact groups \$G\$ over their homogeneous spaces, to arbitrary \$G\$-spaces. This enables us to directly parameterize filters in terms of a band-limited basis on the whole space rather than on \$G\$'s orbits, but also to easily implement steerable CNNs equivariant to a large number of groups. To demonstrate its generality, we instantiate our method on a variety of isometry groups acting on the Euclidean space \$\textbackslash mathbb\{R\}\textasciicircum 3\$. Our framework allows us to build \$E(3)\$ and \$SE(3)\$-steerable CNNs like previous works, but also CNNs with arbitrary \$G\textbackslash leq O(3)\$-steerable kernels. For example, we build 3D CNNs equivariant to the symmetries of platonic solids or choose \$G=SO(2)\$ when working with 3D data having only azimuthal symmetries. We compare these models on 3D shapes and molecular datasets, observing improved performance by matching the model's symmetries to the ones of the data.}, eventtitle = {International {{Conference}} on {{Learning Representations}}}, langid = {english}, keywords = {3D,AML,CNN,computer vision,E(3),E(n),equivariant,General ML,library,ML,molecules,PyTorch,SO(3),steerable CNN,tensor field}, @@ -2434,7 +2825,7 @@ volume = {5}, number = {1}, pages = {1--7}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2057-3960}, doi = {10.1038/s41524-019-0162-7}, url = {https://www.nature.com/articles/s41524-019-0162-7}, @@ -2443,7 +2834,12 @@ issue = {1}, langid = {english}, keywords = {\_tablet,custom structural descriptors,descriptors,DFT,FCNN,grid-based descriptors,LDOS,ML,ML-DFT,ML-ESM,models,NN,prediction from structure,prediction of electron density,prediction of LDOS,RNN}, - annotation = {Bandiera\_abtest: a Cc\_license\_type: cc\_by Cg\_type: Nature Research Journals Primary\_atype: Research Subject\_term: Computational methods;Electronic structure;Theory and computation Subject\_term\_id: computational-methods;electronic-structure;theory-and-computation}, + annotation = {Bandiera\_abtest: a\\ +Cc\_license\_type: cc\_by\\ +Cg\_type: Nature Research Journals\\ +Primary\_atype: Research\\ +Subject\_term: Computational methods;Electronic structure;Theory and computation\\ +Subject\_term\_id: computational-methods;electronic-structure;theory-and-computation}, file = {/Users/wasmer/Nextcloud/Zotero/Chandrasekaran et al_2019_Solving the electronic structure problem with machine learning.pdf;/Users/wasmer/Nextcloud/Zotero/Chandrasekaran et al_2019_Solving the electronic structure problem with machine learning2.pdf;/Users/wasmer/Zotero/storage/TL92B668/s41524-019-0162-7.html} } @@ -2455,7 +2851,7 @@ volume = {340}, number = {6129}, pages = {167--170}, - publisher = {{American Association for the Advancement of Science}}, + publisher = {American Association for the Advancement of Science}, doi = {10.1126/science.1234414}, url = {https://www.science.org/doi/10.1126/science.1234414}, urldate = {2022-05-13}, @@ -2472,7 +2868,7 @@ volume = {8}, number = {1}, pages = {1--9}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2057-3960}, doi = {10.1038/s41524-022-00929-x}, url = {https://www.nature.com/articles/s41524-022-00929-x}, @@ -2484,6 +2880,20 @@ file = {/Users/wasmer/Nextcloud/Zotero/Chang et al_2022_Towards overcoming data scarcity in materials science.pdf} } +@book{chanIntroductionProbabilityData2023, + title = {Introduction to Probability for Data Science}, + author = {Chan, Stanley H.}, + date = {2023}, + publisher = {Michigan Publishing}, + location = {Ann Arbor}, + url = {https://probability4datascience.com}, + abstract = {"Probability is one of the most interesting subjects in electrical engineering and computer science. It bridges our favorite engineering principles to the practical reality, a world that is full of uncertainty. However, because probability is such a mature subject, the undergraduate textbooks alone might fill several rows of shelves in a library. When the literature is so rich, the challenge becomes how one can pierce through to the insight while diving into the details. For example, many of you have used a normal random variable before, but have you ever wondered where the 'bell shape' comes from? Every probability class will teach you about flipping a coin, but how can 'flipping a coin' ever be useful in machine learning today? Data scientists use the Poisson random variables to model the internet traffic, but where does the gorgeous Poisson equation come from? This book is designed to fill these gaps with knowledge that is essential to all data science students." -- Preface}, + isbn = {978-1-60785-747-1}, + langid = {english}, + keywords = {/unread,educational,learning material,mathematics,online book,probability theory,statistics,textbook,with-exercises}, + annotation = {OCLC: 1391367629} +} + @article{chanussotOpenCatalyst20202021, title = {The {{Open Catalyst}} 2020 ({{OC20}}) {{Dataset}} and {{Community Challenges}}}, author = {Chanussot, Lowik and Das, Abhishek and Goyal, Siddharth and Lavril, Thibaut and Shuaibi, Muhammed and Riviere, Morgane and Tran, Kevin and Heras-Domingo, Javier and Ho, Caleb and Hu, Weihua and Palizhati, Aini and Sriram, Anuroop and Wood, Brandon and Yoon, Junwoong and Parikh, Devi and Zitnick, C. Lawrence and Ulissi, Zachary}, @@ -2529,7 +2939,7 @@ volume = {53}, number = {9}, pages = {1981--1991}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, issn = {0001-4842}, doi = {10.1021/acs.accounts.0c00403}, url = {https://doi.org/10.1021/acs.accounts.0c00403}, @@ -2548,7 +2958,7 @@ volume = {31}, number = {9}, pages = {3564--3572}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, issn = {0897-4756}, doi = {10.1021/acs.chemmater.9b01294}, url = {https://doi.org/10.1021/acs.chemmater.9b01294}, @@ -2587,6 +2997,24 @@ file = {/Users/wasmer/Nextcloud/Zotero/Chen_Ong_2022_A Universal Graph Deep Learning Interatomic Potential for the Periodic Table.pdf;/Users/wasmer/Zotero/storage/H4FKVKUF/2202.html} } +@article{chngMachineLearningPhases2017, + title = {Machine {{Learning Phases}} of {{Strongly Correlated Fermions}}}, + author = {Ch’ng, Kelvin and Carrasquilla, Juan and Melko, Roger G. and Khatami, Ehsan}, + date = {2017-08-30}, + journaltitle = {Physical Review X}, + shortjournal = {Phys. Rev. X}, + volume = {7}, + number = {3}, + pages = {031038}, + publisher = {American Physical Society}, + doi = {10.1103/PhysRevX.7.031038}, + url = {https://link.aps.org/doi/10.1103/PhysRevX.7.031038}, + urldate = {2024-02-28}, + abstract = {Machine learning offers an unprecedented perspective for the problem of classifying phases in condensed matter physics. We employ neural-network machine learning techniques to distinguish finite-temperature phases of the strongly correlated fermions on cubic lattices. We show that a three-dimensional convolutional network trained on auxiliary field configurations produced by quantum Monte Carlo simulations of the Hubbard model can correctly predict the magnetic phase diagram of the model at the average density of one (half filling). We then use the network, trained at half filling, to explore the trend in the transition temperature as the system is doped away from half filling. This transfer learning approach predicts that the instability to the magnetic phase extends to at least 5\% doping in this region. Our results pave the way for other machine learning applications in correlated quantum many-body systems.}, + keywords = {/unread,ML,prediction of phase,strongly correlated,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Ch’ng et al_2017_Machine Learning Phases of Strongly Correlated Fermions.pdf;/Users/wasmer/Zotero/storage/A5D9QCWJ/PhysRevX.7.html} +} + @online{chongRobustnessLocalPredictions2023, title = {Robustness of {{Local Predictions}} in {{Atomistic Machine Learning Models}}}, author = {Chong, Sanggyu and Grasselli, Federico and Mahmoud, Chiheb Ben and Morrow, Joe D. and Deringer, Volker L. and Ceriotti, Michele}, @@ -2612,7 +3040,7 @@ volume = {7}, number = {1}, pages = {1--8}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2057-3960}, doi = {10.1038/s41524-021-00650-1}, url = {https://www.nature.com/articles/s41524-021-00650-1}, @@ -2632,8 +3060,8 @@ urldate = {2023-07-01}, abstract = {Lack of rigorous reproducibility and validation are major hurdles for scientific development across many fields. Materials science in particular encompasses a variety of experimental and theoretical approaches that require careful benchmarking. Leaderboard efforts have been developed previously to mitigate these issues. However, a comprehensive comparison and benchmarking on an integrated platform with multiple data modalities with both perfect and defect materials data is still lacking. This work introduces JARVIS-Leaderboard, an open-source and community-driven platform that facilitates benchmarking and enhances reproducibility. The platform allows users to set up benchmarks with custom tasks and enables contributions in the form of dataset, code, and meta-data submissions. We cover the following materials design categories: Artificial Intelligence (AI), Electronic Structure (ES), Force-fields (FF), Quantum Computation (QC) and Experiments (EXP). For AI, we cover several types of input data, including atomic structures, atomistic images, spectra, and text. For ES, we consider multiple ES approaches, software packages, pseudopotentials, materials, and properties, comparing results to experiment. For FF, we compare multiple approaches for material property predictions. For QC, we benchmark Hamiltonian simulations using various quantum algorithms and circuits. Finally, for experiments, we use the inter-laboratory approach to establish benchmarks. There are 1281 contributions to 274 benchmarks using 152 methods with more than 8 million data-points, and the leaderboard is continuously expanding. The JARVIS-Leaderboard is available at the website: https://pages.nist.gov/jarvis\_leaderboard}, langid = {english}, - organization = {{arXiv.org}}, - keywords = {todo-tagging}, + pubstate = {preprint}, + keywords = {benchmarking,JARVIS,materials database,todo-tagging,with-code}, file = {/Users/wasmer/Nextcloud/Zotero/Choudhary et al_2023_Large Scale Benchmark of Materials Design Methods.pdf} } @@ -2647,7 +3075,7 @@ volume = {2}, number = {8}, pages = {083801}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevMaterials.2.083801}, url = {https://link.aps.org/doi/10.1103/PhysRevMaterials.2.083801}, urldate = {2021-06-26}, @@ -2679,7 +3107,7 @@ volume = {2}, number = {2}, pages = {022501}, - publisher = {{IOP Publishing}}, + publisher = {IOP Publishing}, issn = {2634-4386}, doi = {10.1088/2634-4386/ac4a83}, url = {https://dx.doi.org/10.1088/2634-4386/ac4a83}, @@ -2702,7 +3130,7 @@ urldate = {2023-11-05}, abstract = {Data-driven techniques are increasingly used to replace electronic-structure calculations of matter. In this context, a relevant question is whether machine learning (ML) should be applied directly to predict the desired properties or be combined explicitly with physically-grounded operations. We present an example of an integrated modeling approach, in which a symmetry-adapted ML model of an effective Hamiltonian is trained to reproduce electronic excitations from a quantum-mechanical calculation. The resulting model can make predictions for molecules that are much larger and more complex than those that it is trained on, and allows for dramatic computational savings by indirectly targeting the outputs of well-converged calculations while using a parameterization corresponding to a minimal atom-centered basis. These results emphasize the merits of intertwining data-driven techniques with physical approximations, improving the transferability and interpretability of ML models without affecting their accuracy and computational efficiency, and providing a blueprint for developing ML-augmented electronic-structure methods.}, pubstate = {preprint}, - keywords = {AML,B3LYP,basis set,emulator,excited states,hybrid AI/simulation,library,ML,ML-DFT,ML-ESM,ML-WFT,molecular orbitals,molecules,prediction of charge transfer,prediction of Hamiltonian matrix,prediction of orbital energies,STO-3G,transfer learning,with-code}, + keywords = {AML,B3LYP,basis set,emulator,excited states,hybrid AI/simulation,library,ML,ML-DFT,ML-ESM,ML-WFT,molecular orbitals,molecules,partial charges,prediction of charge transfer,prediction of Hamiltonian matrix,prediction of orbital energies,STO-3G,transfer learning,with-code}, file = {/Users/wasmer/Nextcloud/Zotero/Cignoni et al_2023_Electronic excited states from physically-constrained machine learning.pdf;/Users/wasmer/Zotero/storage/XWXG8UVG/2311.html} } @@ -2713,8 +3141,8 @@ date = {2023}, series = {Lecture {{Notes}} in {{Physics}}}, volume = {1000}, - publisher = {{Springer International Publishing}}, - location = {{Cham}}, + publisher = {Springer International Publishing}, + location = {Cham}, doi = {10.1007/978-3-031-32469-7}, url = {https://link.springer.com/10.1007/978-3-031-32469-7}, urldate = {2023-10-06}, @@ -2764,7 +3192,7 @@ volume = {321}, number = {5890}, pages = {792--794}, - publisher = {{American Association for the Advancement of Science}}, + publisher = {American Association for the Advancement of Science}, doi = {10.1126/science.1158722}, url = {https://www.science.org/doi/10.1126/science.1158722}, urldate = {2023-04-11}, @@ -2782,7 +3210,7 @@ volume = {300}, number = {5617}, pages = {286--290}, - publisher = {{American Association for the Advancement of Science}}, + publisher = {American Association for the Advancement of Science}, doi = {10.1126/science.1084564}, url = {https://www.science.org/doi/full/10.1126/science.1084564}, urldate = {2021-10-15}, @@ -2828,6 +3256,25 @@ file = {/Users/wasmer/Nextcloud/Zotero/Crisostomo et al_2023_Seven Useful Questions in Density Functional Theory.pdf;/Users/wasmer/Zotero/storage/45PQRMPB/2207.html} } +@article{cuevas-zuviriaAnalyticalModelElectron2020, + title = {Analytical {{Model}} of {{Electron Density}} and {{Its Machine Learning Inference}}}, + author = {Cuevas-ZuvirÃa, Bruno and Pacios, Luis F.}, + date = {2020-08-24}, + journaltitle = {Journal of Chemical Information and Modeling}, + shortjournal = {J. Chem. Inf. Model.}, + volume = {60}, + number = {8}, + pages = {3831--3842}, + publisher = {American Chemical Society}, + issn = {1549-9596}, + doi = {10.1021/acs.jcim.0c00197}, + url = {https://doi.org/10.1021/acs.jcim.0c00197}, + urldate = {2024-04-05}, + abstract = {We present an analytical model representation of the electron density Ï(r) in molecules in the form of expansions of a few functions (exponentials and a Gaussian) per atom. Based on a former analytical model of Ï(r) in atoms, we devised its molecular implementation by introducing the anisotropy inherent in the electron distribution of atoms in molecules by means of proper anisotropic functions. The resulting model named A2MD (anisotropic analytical model of density) takes an analytical form highly suitable for obtaining the electron density in large biomolecules as its computational cost scales linearly with the number of atoms. To obtain the parameters of the model, we first devised a fitting procedure to reference electron densities obtained in ab initio correlated quantum calculations. Second, in order to skip costly ab initio calculations, we also developed a machine learning (ML)-based predictor that used neural networks trained on broad molecular datasets to determine the parameters of the model. The resulting ML methodology that we named A2MDnet (A2MD network-trained) was able to provide reliable electron densities as a basis to predict molecular features without requiring quantum calculations. The results presented together with the low computational scaling associated to the A2MD representation of Ï(r) suggest potential applications to obtain reliable electron densities and Ï(r)-based molecular properties in biomacromolecules.}, + keywords = {all-electron,AML,analytical model,biomolecules,CCSD(T),DFT,library,linear regression,linear scaling,ML,ML-DFT,ML-ESM,molecules,NN,prediction of electron density,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Cuevas-ZuvirÃa_Pacios_2020_Analytical Model of Electron Density and Its Machine Learning Inference.pdf} +} + @article{cuevas-zuviriaMachineLearningAnalytical2021, title = {Machine {{Learning}} of {{Analytical Electron Density}} in {{Large Molecules Through Message-Passing}}}, author = {Cuevas-ZuvirÃa, Bruno and Pacios, Luis F.}, @@ -2837,7 +3284,7 @@ volume = {61}, number = {6}, pages = {2658--2666}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, issn = {1549-9596}, doi = {10.1021/acs.jcim.1c00227}, url = {https://doi.org/10.1021/acs.jcim.1c00227}, @@ -2856,7 +3303,7 @@ volume = {14}, number = {35}, pages = {7924--7930}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, doi = {10.1021/acs.jpclett.3c02036}, url = {https://doi.org/10.1021/acs.jpclett.3c02036}, urldate = {2023-09-23}, @@ -2928,7 +3375,7 @@ volume = {12}, number = {3}, pages = {191--201}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {1476-4660}, doi = {10.1038/nmat3568}, url = {https://www.nature.com/articles/nmat3568}, @@ -3001,7 +3448,7 @@ eprintclass = {cond-mat}, url = {http://arxiv.org/abs/2112.13055}, urldate = {2022-01-03}, - abstract = {Many atomic descriptors are currently limited by their unfavourable scaling with the number of chemical elements \$S\$ e.g. the length of body-ordered descriptors, such as the Smooth Overlap of Atomic Positions (SOAP) power spectrum (3-body) and the Atomic Cluster Expansion (ACE) (multiple body-orders), scales as \$(NS)\^\textbackslash nu\$ where \$\textbackslash nu+1\$ is the body-order and \$N\$ is the number of radial basis functions used in the density expansion. We introduce two distinct approaches which can be used to overcome this scaling for the SOAP power spectrum. Firstly, we show that the power spectrum is amenable to lossless compression with respect to both \$S\$ and \$N\$, so that the descriptor length can be reduced from \$\textbackslash mathcal\{O\}(N\^2S\^2)\$ to \$\textbackslash mathcal\{O\}\textbackslash left(NS\textbackslash right)\$. Secondly, we introduce a generalized SOAP kernel, where compression is achieved through the use of the total, element agnostic density, in combination with radial projection. The ideas used in the generalized kernel are equally applicably to any other body-ordered descriptors and we demonstrate this for the Atom Centered Symmetry Functions (ACSF). Finally, both compression approaches are shown to offer comparable performance to the original descriptor across a variety of numerical tests.}, + abstract = {Many atomic descriptors are currently limited by their unfavourable scaling with the number of chemical elements \$S\$ e.g. the length of body-ordered descriptors, such as the Smooth Overlap of Atomic Positions (SOAP) power spectrum (3-body) and the Atomic Cluster Expansion (ACE) (multiple body-orders), scales as \$(NS)\textasciicircum\textbackslash nu\$ where \$\textbackslash nu+1\$ is the body-order and \$N\$ is the number of radial basis functions used in the density expansion. We introduce two distinct approaches which can be used to overcome this scaling for the SOAP power spectrum. Firstly, we show that the power spectrum is amenable to lossless compression with respect to both \$S\$ and \$N\$, so that the descriptor length can be reduced from \$\textbackslash mathcal\{O\}(N\textasciicircum 2S\textasciicircum 2)\$ to \$\textbackslash mathcal\{O\}\textbackslash left(NS\textbackslash right)\$. Secondly, we introduce a generalized SOAP kernel, where compression is achieved through the use of the total, element agnostic density, in combination with radial projection. The ideas used in the generalized kernel are equally applicably to any other body-ordered descriptors and we demonstrate this for the Atom Centered Symmetry Functions (ACSF). Finally, both compression approaches are shown to offer comparable performance to the original descriptor across a variety of numerical tests.}, keywords = {\_tablet,ACE,ACSF,chemical species scaling problem,descriptor dimred,descriptors,descriptors analysis,dimensionality reduction,library,ML,SOAP,with-code}, file = {/Users/wasmer/Nextcloud/Zotero/Darby et al_2021_Compressing local atomic neighbourhood descriptors.pdf;/Users/wasmer/Zotero/storage/GXXQQPAA/2112.html} } @@ -3015,12 +3462,12 @@ volume = {8}, number = {1}, pages = {1--13}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2057-3960}, doi = {10.1038/s41524-022-00847-y}, url = {https://www.nature.com/articles/s41524-022-00847-y}, urldate = {2022-09-27}, - abstract = {Many atomic descriptors are currently limited by their unfavourable scaling with the number of chemical elements S e.g. the length of body-ordered descriptors, such as the SOAP power spectrum (3-body) and the (ACE) (multiple body-orders), scales as (NS)ν where ν\,+\,1 is the body-order and N is the number of radial basis functions used in the density expansion. We introduce two distinct approaches which can be used to overcome this scaling for the SOAP power spectrum. Firstly, we show that the power spectrum is amenable to lossless compression with respect to both S and N, so that the descriptor length can be reduced from \$\$\{\{\{\textbackslash mathcal\{O\}\}\}\}(\{N\}\^\{2\}\{S\}\^\{2\})\$\$to \$\$\{\{\{\textbackslash mathcal\{O\}\}\}\}\textbackslash left(NS\textbackslash right)\$\$. Secondly, we introduce a generalised SOAP kernel, where compression is achieved through the use of the total, element agnostic density, in combination with radial projection. The ideas used in the generalised kernel are equally applicably to any other body-ordered descriptors and we demonstrate this for the (ACSF).}, + abstract = {Many atomic descriptors are currently limited by their unfavourable scaling with the number of chemical elements S e.g. the length of body-ordered descriptors, such as the SOAP power spectrum (3-body) and the (ACE) (multiple body-orders), scales as (NS)ν where ν\,+\,1 is the body-order and N is the number of radial basis functions used in the density expansion. We introduce two distinct approaches which can be used to overcome this scaling for the SOAP power spectrum. Firstly, we show that the power spectrum is amenable to lossless compression with respect to both S and N, so that the descriptor length can be reduced from \$\$\{\{\{\textbackslash mathcal\{O\}\}\}\}(\{N\}\textasciicircum\{2\}\{S\}\textasciicircum\{2\})\$\$to \$\$\{\{\{\textbackslash mathcal\{O\}\}\}\}\textbackslash left(NS\textbackslash right)\$\$. Secondly, we introduce a generalised SOAP kernel, where compression is achieved through the use of the total, element agnostic density, in combination with radial projection. The ideas used in the generalised kernel are equally applicably to any other body-ordered descriptors and we demonstrate this for the (ACSF).}, issue = {1}, langid = {english}, keywords = {\_tablet,ACE,ACSF,chemical species scaling problem,descriptor dimred,descriptors,descriptors analysis,dimensionality reduction,HEA,library,ML,SOAP,with-code}, @@ -3030,17 +3477,15 @@ @online{darbyTensorreducedAtomicDensity2022, title = {Tensor-Reduced Atomic Density Representations}, author = {Darby, James P. and Kovács, Dávid P. and Batatia, Ilyes and Caro, Miguel A. and Hart, Gus L. W. and Ortner, Christoph and Csányi, Gábor}, - date = {2022-10-01}, - eprint = {2210.01705}, - eprinttype = {arxiv}, - eprintclass = {cond-mat, physics:physics}, - doi = {10.48550/arXiv.2210.01705}, - url = {http://arxiv.org/abs/2210.01705}, - urldate = {2022-10-05}, + date = {2022-12-06}, + doi = {10.48550/ARXIV.2210.01705}, + url = {https://arxiv.org/abs/2210.01705}, + urldate = {2023-12-18}, abstract = {Density based representations of atomic environments that are invariant under Euclidean symmetries have become a widely used tool in the machine learning of interatomic potentials, broader data-driven atomistic modelling and the visualisation and analysis of materials datasets.The standard mechanism used to incorporate chemical element information is to create separate densities for each element and form tensor products between them. This leads to a steep scaling in the size of the representation as the number of elements increases. Graph neural networks, which do not explicitly use density representations, escape this scaling by mapping the chemical element information into a fixed dimensional space in a learnable way. We recast this approach as tensor factorisation by exploiting the tensor structure of standard neighbour density based descriptors. In doing so, we form compact tensor-reduced representations whose size does not depend on the number of chemical elements, but remain systematically convergeable and are therefore applicable to a wide range of data analysis and regression tasks.}, pubstate = {preprint}, - keywords = {\_tablet,ACE,chemical species scaling problem,descriptor dimred,descriptors,dimensionality reduction,MACE,ML,Multi-ACE}, - file = {/Users/wasmer/Nextcloud/Zotero/Darby et al_2022_Tensor-reduced atomic density representations.pdf;/Users/wasmer/Zotero/storage/6XMXCLL4/2210.html} + version = {2}, + keywords = {ACE,AML,chemical species scaling problem,descriptor comparison,descriptor dimred,descriptors,descriptors analysis,dimensionality reduction,HEA,invariance,MACE,ML,MPNN,Multi-ACE,organic chemistry,SOAP,todo-tagging,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Darby et al_2022_Tensor-reduced atomic density representations2.pdf} } @article{darbyTensorReducedAtomicDensity2023, @@ -3052,13 +3497,13 @@ volume = {131}, number = {2}, pages = {028001}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevLett.131.028001}, url = {https://link.aps.org/doi/10.1103/PhysRevLett.131.028001}, urldate = {2023-09-22}, abstract = {Density-based representations of atomic environments that are invariant under Euclidean symmetries have become a widely used tool in the machine learning of interatomic potentials, broader data-driven atomistic modeling, and the visualization and analysis of material datasets. The standard mechanism used to incorporate chemical element information is to create separate densities for each element and form tensor products between them. This leads to a steep scaling in the size of the representation as the number of elements increases. Graph neural networks, which do not explicitly use density representations, escape this scaling by mapping the chemical element information into a fixed dimensional space in a learnable way. By exploiting symmetry, we recast this approach as tensor factorization of the standard neighbour-density-based descriptors and, using a new notation, identify connections to existing compression algorithms. In doing so, we form compact tensor-reduced representation of the local atomic environment whose size does not depend on the number of chemical elements, is systematically convergable, and therefore remains applicable to a wide range of data analysis and regression tasks.}, - keywords = {ACE,AML,chemical species scaling problem,descriptor comparison,descriptors,descriptors analysis,dimensionality reduction,HEA,invariance,ML,MPNN,organic chemistry,SOAP,todo-tagging}, - file = {/Users/wasmer/Zotero/storage/BW53SBPW/Darby et al. - 2023 - Tensor-Reduced Atomic Density Representations.pdf;/Users/wasmer/Zotero/storage/PF3QMDXQ/PhysRevLett.131.html} + keywords = {ACE,AML,chemical species scaling problem,descriptor comparison,descriptor dimred,descriptors,descriptors analysis,dimensionality reduction,HEA,invariance,MACE,ML,MPNN,Multi-ACE,organic chemistry,SOAP,todo-tagging,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Darby et al_2023_Tensor-Reduced Atomic Density Representations3.pdf;/Users/wasmer/Zotero/storage/BW53SBPW/Darby et al. - 2023 - Tensor-Reduced Atomic Density Representations.pdf;/Users/wasmer/Zotero/storage/PF3QMDXQ/PhysRevLett.131.html} } @article{dasCrysXPPExplainableProperty2022, @@ -3071,7 +3516,7 @@ volume = {8}, number = {1}, pages = {1--11}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2057-3960}, doi = {10.1038/s41524-022-00716-8}, url = {https://www.nature.com/articles/s41524-022-00716-8}, @@ -3091,7 +3536,7 @@ urldate = {2023-10-08}, abstract = {Payel Das is a AI Science Department, IBM The Applied Machine Learning Days channel features talks and performances from the Applied Machine Learning Days held at the EPFL. AMLD is one of the largest machine learning \& AI events in Europe, focused specifically on the applications of machine learning and AI, making it particularly interesting to industry and academia. Follow AMLD: on Twitter: https://www.twitter.com/appliedmldays on LinkedIn: https://www.linkedin.com/company/appl... on Mastodon : https://mastodon.social/@amld AMLD Website: https://www.appliedmldays.org}, eventtitle = {Applied {{Machine Learning Days}}}, - venue = {{EPFL, Lausanne, Switzerland}}, + venue = {EPFL, Lausanne, Switzerland}, keywords = {/unread,AML,chemistry,IBM,large dataset,LLM,ML,MoLFormer,multi-target prediction,pretrained models,transformer} } @@ -3103,8 +3548,8 @@ date = {2008-06-09}, series = {{{SIGMOD}} '08}, pages = {1345--1350}, - publisher = {{Association for Computing Machinery}}, - location = {{New York, NY, USA}}, + publisher = {Association for Computing Machinery}, + location = {New York, NY, USA}, doi = {10.1145/1376616.1376772}, url = {https://doi.org/10.1145/1376616.1376772}, urldate = {2021-10-17}, @@ -3122,10 +3567,26 @@ urldate = {2023-08-25}, abstract = {In these Lecture Notes, we provide a comprehensive introduction to the most recent advances in the application of machine learning methods in quantum sciences. We cover the use of deep learning and kernel methods in supervised, unsupervised, and reinforcement learning algorithms for phase classification, representation of many-body quantum states, quantum feedback control, and quantum circuits optimization. Moreover, we introduce and discuss more specialized topics such as differentiable programming, generative models, statistical approach to machine learning, and quantum machine learning.}, langid = {english}, - keywords = {condensed matter,educational,equivariant,general ML,Ising,learning material,lecture notes,magnetic structure,ML,ML-WFT,NQS,quantum science,spin,spin symmetry,spin texture,symmetrization,symmetry,todo-tagging}, + keywords = {condensed matter,educational,equivariant,general ML,Ising,learning material,lecture notes,magnetic structure,ML,ML-QMBP,ML-WFT,NQS,prediction of wavefunction,quantum science,quantum state tomography,spin,spin symmetry,spin texture,symmetrization,symmetry,todo-tagging}, file = {/Users/wasmer/Nextcloud/Zotero/Dawid et al_2022_Modern applications of machine learning in quantum sciences3.pdf} } +@online{dawidModernApplicationsMachine2023, + title = {Modern Applications of Machine Learning in Quantum Sciences}, + author = {Dawid, Anna and Arnold, Julian and Requena, Borja and Gresch, Alexander and PÅ‚odzieÅ„, Marcin and Donatella, Kaelan and Nicoli, Kim A. and Stornati, Paolo and Koch, Rouven and Büttner, Miriam and OkuÅ‚a, Robert and Muñoz-Gil, Gorka and Vargas-Hernández, Rodrigo A. and Cervera-Lierta, Alba and Carrasquilla, Juan and Dunjko, Vedran and Gabrié, Marylou and Huembeli, Patrick and family=Nieuwenburg, given=Evert, prefix=van, useprefix=true and Vicentini, Filippo and Wang, Lei and Wetzel, Sebastian J. and Carleo, Giuseppe and Greplová, EliÅ¡ka and Krems, Roman and Marquardt, Florian and Tomza, MichaÅ‚ and Lewenstein, Maciej and Dauphin, Alexandre}, + date = {2023-11-15}, + eprint = {2204.04198}, + eprinttype = {arxiv}, + eprintclass = {cond-mat, physics:quant-ph}, + doi = {10.48550/arXiv.2204.04198}, + url = {http://arxiv.org/abs/2204.04198}, + urldate = {2024-02-28}, + abstract = {In this book, we provide a comprehensive introduction to the most recent advances in the application of machine learning methods in quantum sciences. We cover the use of deep learning and kernel methods in supervised, unsupervised, and reinforcement learning algorithms for phase classification, representation of many-body quantum states, quantum feedback control, and quantum circuits optimization. Moreover, we introduce and discuss more specialized topics such as differentiable programming, generative models, statistical approach to machine learning, and quantum machine learning.}, + pubstate = {preprint}, + keywords = {/unread,condensed matter,educational,equivariant,general ML,Ising,learning material,lecture notes,magnetic structure,ML,ML-QMBP,ML-WFT,NQS,prediction of wavefunction,quantum science,quantum state tomography,spin,spin symmetry,spin texture,symmetrization,symmetry,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Dawid et al_2023_Modern applications of machine learning in quantum sciences.pdf;/Users/wasmer/Zotero/storage/RCZ83DUM/2204.html} +} + @software{dawidTutorialsPreparedSummer2022, title = {Tutorials Prepared for the "{{Summer School}}: {{Machine Learning}} in {{Quantum Physics}} and {{Chemistry}}"}, shorttitle = {Tutorials Prepared for the "{{Summer School}}}, @@ -3147,7 +3608,7 @@ volume = {7}, number = {1}, pages = {1--8}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2057-3960}, doi = {10.1038/s41524-021-00552-2}, url = {https://www.nature.com/articles/s41524-021-00552-2}, @@ -3168,7 +3629,7 @@ volume = {18}, number = {20}, pages = {13754--13769}, - publisher = {{The Royal Society of Chemistry}}, + publisher = {The Royal Society of Chemistry}, issn = {1463-9084}, doi = {10.1039/C6CP00415F}, url = {https://pubs.rsc.org/en/content/articlelanding/2016/cp/c6cp00415f}, @@ -3187,7 +3648,7 @@ volume = {602}, number = {7897}, pages = {414--419}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {1476-4687}, doi = {10.1038/s41586-021-04301-9}, url = {https://www.nature.com/articles/s41586-021-04301-9}, @@ -3203,8 +3664,8 @@ title = {Mathematics for Machine Learning}, author = {Deisenroth, Marc Peter and Faisal, A. Aldo and Ong, Cheng Soon}, date = {2020}, - publisher = {{Cambridge University Press}}, - location = {{Cambridge ; New York, NY}}, + publisher = {Cambridge University Press}, + location = {Cambridge ; New York, NY}, abstract = {"The fundamental mathematical tools needed to understand machine learning include linear algebra, analytic geometry, matrix decompositions, vector calculus, optimization, probability, and statistics. These topics are traditionally taught in disparate courses, making it hard for data science or computer science students, or professionals, to efficiently learn the mathematics. This self-contained textbook bridges the gap between mathematical and machine learning texts, introducing the mathematical concepts with a minimum of prerequisites. It uses these concepts to derive four central machine learning methods: linear regression, principal component analysis, Gaussian mixture models, and support vector machines. For students and others with a mathematical background, these derivations provide a starting point to machine learning texts. For those learning the mathematics for the first time, the methods help build intuition and practical experience with applying mathematical concepts"--}, isbn = {978-1-108-47004-9 978-1-108-45514-5}, keywords = {/unread,educational,General ML,learning material,linear algebra,mathematics,ML,ML theory,online book,probability theory,statistics,textbook} @@ -3219,7 +3680,7 @@ volume = {9}, number = {1}, pages = {1--9}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2057-3960}, doi = {10.1038/s41524-023-01115-3}, url = {https://www.nature.com/articles/s41524-023-01115-3}, @@ -3227,7 +3688,7 @@ abstract = {Density functional theory (DFT) has been a critical component of computational materials research and discovery for decades. However, the computational cost of solving the central Kohn–Sham equation remains a major obstacle for dynamical studies of complex phenomena at-scale. Here, we propose an end-to-end machine learning (ML) model that emulates the essence of DFT by mapping the atomic structure of the system to its electronic charge density, followed by the prediction of other properties such as density of states, potential energy, atomic forces, and stress tensor, by using the atomic structure and charge density as input. Our deep learning model successfully bypasses the explicit solution of the Kohn-Sham equation with orders of magnitude speedup (linear scaling with system size with a small prefactor), while maintaining chemical accuracy. We demonstrate the capability of this ML-DFT concept for an extensive database of organic molecules, polymer chains, and polymer crystals.}, issue = {1}, langid = {english}, - keywords = {\_tablet,ACDC,AGNI desriptor,AML,chemistry,descriptors,emulator,grid-based descriptors,invariance,library,linear scaling,materials,ML,ML-DFT,ML-ESM,molecules,multi-step model,NN,organic chemistry,PBE,prediction of DOS,prediction of electron density,TensorFlow,tensorial target,VASP,with-code,with-data}, + keywords = {\_tablet,ACDC,AGNI desriptor,all-electron,AML,chemistry,Database,descriptors,emulator,grid-based descriptors,invariance,library,linear scaling,materials,ML,ML-DFT,ML-ESM,molecules,multi-output learning,multi-step model,NN,organic chemistry,PBE,polymers,prediction of bandgap,prediction of DOS,prediction of electron density,prediction of energy,prediction of forces,TensorFlow,tensorial target,VASP,with-code,with-data}, file = {/Users/wasmer/Zotero/storage/EHN4XYXG/del Rio et al_2023_A deep learning framework to emulate density functional theory.pdf} } @@ -3241,7 +3702,7 @@ volume = {124}, number = {45}, pages = {9496--9502}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, issn = {1089-5639}, doi = {10.1021/acs.jpca.0c07458}, url = {https://doi.org/10.1021/acs.jpca.0c07458}, @@ -3277,7 +3738,7 @@ volume = {5}, number = {9}, pages = {1031--1041}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2522-5839}, doi = {10.1038/s42256-023-00716-3}, url = {https://www.nature.com/articles/s42256-023-00716-3}, @@ -3308,7 +3769,7 @@ volume = {96}, number = {19}, pages = {195145}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevB.96.195145}, url = {https://link.aps.org/doi/10.1103/PhysRevB.96.195145}, urldate = {2021-05-21}, @@ -3326,7 +3787,7 @@ volume = {107}, number = {6}, pages = {064103}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevB.107.064103}, url = {https://link.aps.org/doi/10.1103/PhysRevB.107.064103}, urldate = {2023-03-01}, @@ -3357,7 +3818,7 @@ volume = {5}, number = {1}, pages = {1--23}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2057-3960}, doi = {10.1038/s41524-019-0173-4}, url = {https://www.nature.com/articles/s41524-019-0173-4}, @@ -3365,7 +3826,12 @@ abstract = {The Materials Genome Initiative (MGI) advanced a new paradigm for materials discovery and design, namely that the pace of new materials deployment could be accelerated through complementary efforts in theory, computation, and experiment. Along with numerous successes, new challenges are inviting researchers to refocus the efforts and approaches that were originally inspired by the MGI. In May 2017, the National Science Foundation sponsored the workshop “Advancing and Accelerating Materials Innovation Through the Synergistic Interaction among Computation, Experiment, and Theory: Opening New Frontiers†to review accomplishments that emerged from investments in science and infrastructure under the MGI, identify scientific opportunities in this new environment, examine how to effectively utilize new materials innovation infrastructure, and discuss challenges in achieving accelerated materials research through the seamless integration of experiment, computation, and theory. This article summarizes key findings from the workshop and provides perspectives that aim to guide the direction of future materials research and its translation into societal impacts.}, issue = {1}, langid = {english}, - annotation = {Bandiera\_abtest: a Cc\_license\_type: cc\_by Cg\_type: Nature Research Journals Primary\_atype: Reviews Subject\_term: Materials science;Nanoscience and technology Subject\_term\_id: materials-science;nanoscience-and-technology}, + annotation = {Bandiera\_abtest: a\\ +Cc\_license\_type: cc\_by\\ +Cg\_type: Nature Research Journals\\ +Primary\_atype: Reviews\\ +Subject\_term: Materials science;Nanoscience and technology\\ +Subject\_term\_id: materials-science;nanoscience-and-technology}, file = {/Users/wasmer/Nextcloud/Zotero/de Pablo et al_2019_New frontiers for the materials genome initiative.pdf;/Users/wasmer/Zotero/storage/PY8DXX7D/s41524-019-0173-4.html} } @@ -3378,7 +3844,7 @@ volume = {121}, number = {16}, pages = {10073--10141}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, issn = {0009-2665}, doi = {10.1021/acs.chemrev.1c00022}, url = {https://doi.org/10.1021/acs.chemrev.1c00022}, @@ -3396,7 +3862,7 @@ volume = {589}, number = {7840}, pages = {59--64}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {1476-4687}, doi = {10.1038/s41586-020-03072-z}, url = {https://www.nature.com/articles/s41586-020-03072-z}, @@ -3432,7 +3898,7 @@ volume = {104}, number = {16}, pages = {L161109}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevB.104.L161109}, url = {https://link.aps.org/doi/10.1103/PhysRevB.104.L161109}, urldate = {2021-12-14}, @@ -3450,7 +3916,7 @@ volume = {151}, number = {14}, pages = {144102}, - publisher = {{American Institute of Physics}}, + publisher = {American Institute of Physics}, issn = {0021-9606}, doi = {10.1063/1.5114618}, url = {https://aip.scitation.org/doi/full/10.1063/1.5114618}, @@ -3469,7 +3935,7 @@ volume = {11}, number = {1}, pages = {3509}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2041-1723}, doi = {10.1038/s41467-020-17265-7}, url = {https://www.nature.com/articles/s41467-020-17265-7}, @@ -3478,7 +3944,12 @@ issue = {1}, langid = {english}, keywords = {BPNN,DFT,library,ML,ML-DFA,MLCF,molecules,NeuralXC,prediction from density,prediction of Exc,prediction of vxc,with-code}, - annotation = {Bandiera\_abtest: a Cc\_license\_type: cc\_by Cg\_type: Nature Research Journals Primary\_atype: Research Subject\_term: Density functional theory;Electronic properties and materials;Molecular dynamics Subject\_term\_id: density-functional-theory;electronic-properties-and-materials;molecular-dynamics}, + annotation = {Bandiera\_abtest: a\\ +Cc\_license\_type: cc\_by\\ +Cg\_type: Nature Research Journals\\ +Primary\_atype: Research\\ +Subject\_term: Density functional theory;Electronic properties and materials;Molecular dynamics\\ +Subject\_term\_id: density-functional-theory;electronic-properties-and-materials;molecular-dynamics}, file = {/Users/wasmer/Nextcloud/Zotero/Dick_Fernandez-Serra_2020_Machine learning accurate exchange and correlation functionals of the.pdf;/Users/wasmer/Zotero/storage/95GAG2CF/s41467-020-17265-7.html} } @@ -3491,7 +3962,7 @@ volume = {129}, number = {13}, pages = {136402}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevLett.129.136402}, url = {https://link.aps.org/doi/10.1103/PhysRevLett.129.136402}, urldate = {2022-10-19}, @@ -3500,6 +3971,19 @@ file = {/Users/wasmer/Nextcloud/Zotero/Di Sante et al_2022_Deep Learning the Functional Renormalization Group.pdf;/Users/wasmer/Zotero/storage/LKT2Z79L/Di Sante et al_2022_Deep Learning the Functional Renormalization Group-supp.pdf;/Users/wasmer/Zotero/storage/PGSNSHSM/PhysRevLett.129.html} } +@thesis{dolgAnalyzingImprovingInitial2024, + type = {mathesis}, + title = {Analyzing and {{Improving}} the {{Initial Charge Density}} in {{FLEUR DFT Calculations}}}, + author = {Dolg, Lefan}, + date = {2024-03-05}, + institution = {RWTH Aachen University}, + location = {Jülich}, + abstract = {Density functional theory is a fundamental tool of materials science. Due to the increasing computational demands, it is in the interest of researchers to optimize the computational process. In my talk I will present the results of my master thesis which focused on improving the initial charge densities in FLEUR calculations. This is done with a high-throughput approach utilizing three different data sets. The optimized densities are analyzed for correlations and the created database is then used to train machine learning models. The talk includes a discussion on HTC through the AiiDA framework and the implementation of ML methods using the sklearn Python package.}, + pagetotal = {68}, + keywords = {AML,Bayesian optimization,charge density,DFT,FLEUR,FZJ,initial guess,ML,ML-DFT,PGI,PGI-1/IAS-1,scikit-learn,thesis}, + file = {/Users/wasmer/Nextcloud/Zotero/Dolg_2024_Analyzing and Improving the Initial Charge Density in FLEUR DFT Calculations.pdf} +} + @online{dominaClusterExpansionConstructed2023, title = {Cluster Expansion Constructed over {{Jacobi-Legendre}} Polynomials for Accurate Force Fields}, author = {Domina, Michelangelo and Patil, Urvesh and Cobelli, Matteo and Sanvito, Stefano}, @@ -3512,10 +3996,28 @@ urldate = {2023-06-27}, abstract = {We introduce a compact cluster expansion method, constructed over Jacobi and Legendre polynomials, to generate highly accurate and flexible machine-learning force fields. The constituent many-body contributions are separated, interpretable and adaptable to replicate the physical knowledge of the system. In fact, the flexibility introduced by the use of the Jacobi polynomials allows us to impose, in a natural way, constrains and symmetries to the cluster expansion. This has the effect of reducing the number of parameters needed for the fit and of enforcing desired behaviours of the potential. For instance, we show that our Jacobi-Legendre cluster expansion can be designed to generate potentials with a repulsive tail at short inter-atomic distances, without the need of imposing any external function. Our method is here continuously compared with available machine-learning potential schemes, such as the atomic cluster expansion and potentials built over the bispectrum. As an example we construct a Jacobi-Legendre potential for carbon, by training a slim and accurate model capable of describing crystalline graphite and diamond, as well as liquid and amorphous elemental carbon.}, pubstate = {preprint}, - keywords = {ABINIT,ACE,ACE-related,AML,bispectrum,carbon,cluster expansion,DFT,forces,JLP,MD,ML,ML-DFT,ML-FF,MLP,phonon,prediction of electron density}, + keywords = {\_tablet,ABINIT,ACE,ACE-related,AML,bispectrum,carbon,cluster expansion,DFT,forces,Jacobi-Legendre,JLP,MD,ML,ML-DFT,ML-FF,MLP,phonon,prediction of electron density}, file = {/Users/wasmer/Nextcloud/Zotero/Domina et al_2023_Cluster expansion constructed over Jacobi-Legendre polynomials for accurate.pdf;/Users/wasmer/Zotero/storage/AYU8VHC7/2208.html} } +@article{dominaClusterExpansionConstructed2023a, + title = {Cluster Expansion Constructed over {{Jacobi-Legendre}} Polynomials for Accurate Force Fields}, + author = {Domina, M. and Patil, U. and Cobelli, M. and Sanvito, S.}, + date = {2023-09-11}, + journaltitle = {Physical Review B}, + shortjournal = {Phys. Rev. B}, + volume = {108}, + number = {9}, + pages = {094102}, + publisher = {American Physical Society}, + doi = {10.1103/PhysRevB.108.094102}, + url = {https://link.aps.org/doi/10.1103/PhysRevB.108.094102}, + urldate = {2024-03-11}, + abstract = {We introduce a compact cluster expansion method constructed over Jacobi and Legendre polynomials to generate highly accurate and flexible machine-learning force fields. The constituent many-body contributions are separated, interpretable, and adaptable to replicate the physical knowledge of the system. In fact, the flexibility introduced by the use of the Jacobi polynomials allows us to impose, in a natural way, constraints and symmetries to the cluster expansion. This has the effect of reducing the number of parameters needed for the fit and of enforcing desired behaviors of the potential. For instance, we show that our Jacobi-Legendre cluster expansion can be designed to generate potentials with a repulsive tail at short interatomic distances, without the need of imposing any external function. Our method is here continuously compared with available machine-learning potential schemes, such as the atomic cluster expansion and potentials built over the bispectrum. As an example, we construct a Jacobi-Legendre potential for carbon by training a slim and accurate model capable of describing crystalline graphite and diamond, as well as liquid and amorphous elemental carbon.}, + keywords = {ABINIT,ACE,ACE-related,AML,bispectrum,carbon,cluster expansion,DFT,forces,Jacobi-Legendre,JLP,MD,ML,ML-DFT,ML-FF,MLP,phonon,prediction of electron density}, + file = {/Users/wasmer/Nextcloud/Zotero/false;/Users/wasmer/Zotero/storage/2KLIJCKI/PhysRevB.108.html} +} + @online{dominaJacobiLegendrePotential2022, title = {The {{Jacobi-Legendre}} Potential}, author = {Domina, Michelangelo and Patil, Urvesh and Cobelli, Matteo and Sanvito, Stefano}, @@ -3528,7 +4030,7 @@ urldate = {2022-09-05}, abstract = {Inspired by the cluster expansion method, we introduce a compact machine-learning potential constructed over Jacobi and Legendre polynomials. The constituent many-body contributions are separated, fully interpretable and adaptable to replicate the physical knowledge of the system, such as a repulsive behaviour at a small inter-atomic distance. Most importantly the potential requires a small number of features to achieve accuracy comparable to that of more numerically heavy and descriptor-rich alternatives. This is here tested for an organic molecule, a crystalline solid and an amorphous compound. Furthermore, we argue that the physical interpretability of the various terms is key to the selection and training of stable potentials.}, pubstate = {preprint}, - keywords = {\_tablet,ACE,descriptors,DFT,invariance,Jacobi-Legendre potential,JLP,linear regression,ML,ML-ESM,MLP,prediction of total energy,SNAP}, + keywords = {\_tablet,ACE,descriptors,DFT,invariance,Jacobi-Legendre,JLP,linear regression,ML,ML-ESM,MLP,prediction of total energy,SNAP}, file = {/Users/wasmer/Nextcloud/Zotero/Domina et al_2022_The Jacobi-Legendre potential.pdf;/Users/wasmer/Zotero/storage/DUUKR6TZ/2208.html} } @@ -3542,12 +4044,12 @@ volume = {105}, number = {21}, pages = {214439}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevB.105.214439}, url = {https://link.aps.org/doi/10.1103/PhysRevB.105.214439}, urldate = {2022-09-05}, abstract = {We introduce a translational and rotational invariant local representation for vector fields, which can be employed in the construction of machine learning energy models of solids and molecules. This allows us to describe, on the same footing, the energy fluctuations due to the atomic motion, the longitudinal and transverse excitations of the vector field, and their mutual interplay. The formalism can then be applied to physical systems where the total energy is determined by a vector density, as in the case of magnetism. Our representation is constructed over the power spectrum of the combined angular momentum describing the local atomic positions and the vector field, and it can be used in conjunction with different machine learning schemes and data taken from accurate ab initio electronic structure theories. We demonstrate the descriptive power of our representation for a range of classical spin Hamiltonian and machine learning algorithms. In particular, we construct energy models based on both linear Ridge regression, as in conventional spectral neighbor analysis potentials, and the Gaussian approximation. These are both built to represent a Heisenberg-type Hamiltonian including a longitudinal energy term and spin-lattice coupling.}, - keywords = {\_tablet,descriptors,DFT,GPR,Heisenberg model,Jij,LRR,magnetism,ML,ML-DFT,ML-ESM,spin-dependent}, + keywords = {\_tablet,AML,descriptors,DFT,equivariant,GPR,Heisenberg model,invariance,Jij,linear regression,LRR,magnetism,ML,ML-DFT,ML-ESM,MLP,prediction of energy,spin-dependent,tensorial target,vector field}, file = {/Users/wasmer/Nextcloud/Zotero/Domina et al_2022_Spectral neighbor representation for vector fields.pdf;/Users/wasmer/Zotero/storage/F4KNYWPX/Domina et al_2022_Spectral neighbor representation for vector fields.pdf;/Users/wasmer/Zotero/storage/QX9ZENU5/PhysRevB.105.html} } @@ -3591,7 +4093,7 @@ volume = {2}, number = {1}, pages = {013808}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevMaterials.2.013808}, url = {https://link.aps.org/doi/10.1103/PhysRevMaterials.2.013808}, urldate = {2023-03-12}, @@ -3648,7 +4150,7 @@ volume = {11}, number = {6}, pages = {2336--2347}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, doi = {10.1021/acs.jpclett.9b03664}, url = {https://doi.org/10.1021/acs.jpclett.9b03664}, urldate = {2023-03-02}, @@ -3662,7 +4164,7 @@ editor = {Dral, Pavlo O.}, date = {2022-09-15}, edition = {1}, - publisher = {{Elsevier}}, + publisher = {Elsevier}, doi = {10.1016/B978-0-323-90049-2.09989-3}, url = {https://www.sciencedirect.com/science/article/pii/B9780323900492099893}, urldate = {2023-09-30}, @@ -3683,7 +4185,7 @@ volume = {99}, number = {1}, pages = {014104}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevB.99.014104}, url = {https://link.aps.org/doi/10.1103/PhysRevB.99.014104}, urldate = {2022-05-11}, @@ -3721,6 +4223,18 @@ file = {/Users/wasmer/Nextcloud/Zotero/Drautz_Ortner_2022_Atomic cluster expansion and wave function representations.pdf;/Users/wasmer/Zotero/storage/6PTQT7NH/2206.html} } +@unpublished{drautzElectronsSimulationMaterials2023, + title = {From Electrons to the Simulation of Materials}, + author = {Drautz, Ralf}, + date = {2023-05-03}, + url = {https://www.youtube.com/watch?v=qv54YtaLWak}, + urldate = {2023-12-11}, + abstract = {Recorded 03 May 2023. Ralf Drautz of Ruhr-Universität Bochum presents "From electrons to the simulation of materials" at IPAM's workshop for Complex Scientific Workflows at Extreme Computational Scales. Abstract: The prediction of complex materials properties became possible recently by workflows that integrate high-throughput density functional theory calculations, training of machine learning potentials and subsequent atomistic simulations. In my talk I will give examples of our work from the computation of phase diagrams to the prediction of the structure of nano clusters. I will then discuss our workflows step by step and explain decisions and/or approximations that need to be made at each step. This will allow me to highlight remaining challenges and to give estimates of required computational resources. Learn more online at: hhttp://www.ipam.ucla.edu/programs/wor...}, + eventtitle = {{{IPAM Workshop Complex Scientific Workflows}} at {{Extreme Computational Scales}}}, + venue = {Institute for Pure and Applied Mathematics, Los Angeles}, + keywords = {/unread} +} + @article{drautzSpinclusterExpansionParametrization2004, title = {Spin-Cluster Expansion: {{Parametrization}} of the General Adiabatic Magnetic Energy Surface with {\emph{Ab Initio}} Accuracy}, shorttitle = {Spin-Cluster Expansion}, @@ -3744,7 +4258,7 @@ volume = {43}, number = {9}, pages = {676--682}, - publisher = {{Cambridge University Press}}, + publisher = {Cambridge University Press}, issn = {0883-7694, 1938-1425}, doi = {10.1557/mrs.2018.208}, url = {https://www.cambridge.org/core/journals/mrs-bulletin/article/nomad-the-fair-concept-for-big-datadriven-materials-science/1EEF321F62D41997CA16AD367B74C4B0}, @@ -3764,7 +4278,7 @@ volume = {2}, number = {3}, pages = {036001}, - publisher = {{IOP Publishing}}, + publisher = {IOP Publishing}, issn = {2515-7639}, doi = {10.1088/2515-7639/ab13bb}, url = {https://doi.org/10.1088/2515-7639/ab13bb}, @@ -3778,8 +4292,8 @@ title = {Density {{Functional Theory}}}, author = {Dreizler, Reiner M. and Gross, Eberhard K. U.}, date = {1990}, - publisher = {{Springer}}, - location = {{Berlin, Heidelberg}}, + publisher = {Springer}, + location = {Berlin, Heidelberg}, doi = {10.1007/978-3-642-86105-5}, url = {http://link.springer.com/10.1007/978-3-642-86105-5}, urldate = {2023-09-21}, @@ -3792,7 +4306,7 @@ title = {Group {{Theory}}}, author = {Dresselhaus, Mildred S. and Dresselhaus, Gene and Jorio, Ado}, date = {2007}, - publisher = {{Springer Berlin Heidelberg}}, + publisher = {Springer Berlin Heidelberg}, url = {https://doi.org/10.1007/978-3-540-32899-5}, urldate = {2022-12-23}, abstract = {Application to the Physics of Condensed Matter}, @@ -3810,7 +4324,7 @@ volume = {525}, number = {7567}, pages = {73--76}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {1476-4687}, doi = {10.1038/nature14964}, url = {https://www.nature.com/articles/nature14964}, @@ -3819,7 +4333,11 @@ issue = {7567}, langid = {english}, keywords = {applications of DFT,DFT,master-thesis,superconductor}, - annotation = {Bandiera\_abtest: a Cg\_type: Nature Research Journals Primary\_atype: Research Subject\_term: Superconducting properties and materials Subject\_term\_id: superconducting-properties-and-materials}, + annotation = {Bandiera\_abtest: a\\ +Cg\_type: Nature Research Journals\\ +Primary\_atype: Research\\ +Subject\_term: Superconducting properties and materials\\ +Subject\_term\_id: superconducting-properties-and-materials}, file = {/Users/wasmer/Nextcloud/Zotero/Drozdov et al_2015_Conventional superconductivity at 203 kelvin at high pressures in the sulfur.pdf;/Users/wasmer/Zotero/storage/CJIZLLVA/nature14964.html} } @@ -3832,7 +4350,7 @@ volume = {13}, number = {1}, pages = {11881}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2045-2322}, doi = {10.1038/s41598-023-38551-6}, url = {https://www.nature.com/articles/s41598-023-38551-6}, @@ -3854,7 +4372,7 @@ volume = {6}, number = {1}, pages = {1--10}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2057-3960}, doi = {10.1038/s41524-020-00406-3}, url = {https://www.nature.com/articles/s41524-020-00406-3}, @@ -3862,7 +4380,12 @@ abstract = {We present a benchmark test suite and an automated machine learning procedure for evaluating supervised machine learning (ML) models for predicting properties of inorganic bulk materials. The test suite, Matbench, is a set of 13\,ML tasks that range in size from 312 to 132k samples and contain data from 10 density functional theory-derived and experimental sources. Tasks include predicting optical, thermal, electronic, thermodynamic, tensile, and elastic properties given a material’s composition and/or crystal structure. The reference algorithm, Automatminer, is a highly-extensible, fully automated ML pipeline for predicting materials properties from materials primitives (such as composition and crystal structure) without user intervention or hyperparameter tuning. We test Automatminer on the Matbench test suite and compare its predictive power with state-of-the-art crystal graph neural networks and a traditional descriptor-based Random Forest model. We find Automatminer achieves the best performance on 8 of 13 tasks in the benchmark. We also show our test suite is capable of exposing predictive advantages of each algorithm—namely, that crystal graph methods appear to outperform traditional machine learning methods given \textasciitilde 104 or greater data points. We encourage evaluating materials ML algorithms on the Matbench benchmark and comparing them against the latest version of Automatminer.}, issue = {1}, langid = {english}, - annotation = {Bandiera\_abtest: a Cc\_license\_type: cc\_by Cg\_type: Nature Research Journals Primary\_atype: Research Subject\_term: Atomistic models;Computational methods Subject\_term\_id: atomistic-models;computational-methods}, + annotation = {Bandiera\_abtest: a\\ +Cc\_license\_type: cc\_by\\ +Cg\_type: Nature Research Journals\\ +Primary\_atype: Research\\ +Subject\_term: Atomistic models;Computational methods\\ +Subject\_term\_id: atomistic-models;computational-methods}, file = {/Users/wasmer/Nextcloud/Zotero/Dunn et al_2020_Benchmarking materials property prediction methods.pdf;/Users/wasmer/Zotero/storage/N76WQWKL/s41524-020-00406-3.html} } @@ -3877,6 +4400,7 @@ url = {http://arxiv.org/abs/1911.03550}, urldate = {2022-05-11}, abstract = {The Atomic Cluster Expansion (Drautz, Phys. Rev. B 99, 2019) provides a framework to systematically derive polynomial basis functions for approximating isometry and permutation invariant functions, particularly with an eye to modelling properties of atomistic systems. Our presentation extends the derivation by proposing a precomputation algorithm that yields immediate guarantees that a complete basis is obtained. We provide a fast recursive algorithm for efficient evaluation and illustrate its performance in numerical tests. Finally, we discuss generalisations and open challenges, particularly from a numerical stability perspective, around basis optimisation and parameter estimation, paving the way towards a comprehensive analysis of the convergence to a high-fidelity reference model.}, + keywords = {ACE,todo-tagging}, file = {/Users/wasmer/Nextcloud/Zotero/Dusson et al_2021_Atomic Cluster Expansion.pdf;/Users/wasmer/Zotero/storage/7WDUQE6K/1911.html} } @@ -3898,6 +4422,39 @@ file = {/Users/wasmer/Nextcloud/Zotero/Du et al_2021_Tuning Fermi Levels in Intrinsic Antiferromagnetic Topological Insulators.pdf} } +@online{duvalFAENetFrameAveraging2023, + title = {{{FAENet}}: {{Frame Averaging Equivariant GNN}} for {{Materials Modeling}}}, + shorttitle = {{{FAENet}}}, + author = {Duval, Alexandre and Schmidt, Victor and Garcia, Alex Hernandez and Miret, Santiago and Malliaros, Fragkiskos D. and Bengio, Yoshua and Rolnick, David}, + date = {2023-04-28}, + eprint = {2305.05577}, + eprinttype = {arxiv}, + eprintclass = {cs}, + doi = {10.48550/arXiv.2305.05577}, + url = {http://arxiv.org/abs/2305.05577}, + urldate = {2023-12-04}, + abstract = {Applications of machine learning techniques for materials modeling typically involve functions known to be equivariant or invariant to specific symmetries. While graph neural networks (GNNs) have proven successful in such tasks, they enforce symmetries via the model architecture, which often reduces their expressivity, scalability and comprehensibility. In this paper, we introduce (1) a flexible framework relying on stochastic frame-averaging (SFA) to make any model E(3)-equivariant or invariant through data transformations. (2) FAENet: a simple, fast and expressive GNN, optimized for SFA, that processes geometric information without any symmetrypreserving design constraints. We prove the validity of our method theoretically and empirically demonstrate its superior accuracy and computational scalability in materials modeling on the OC20 dataset (S2EF, IS2RE) as well as common molecular modeling tasks (QM9, QM7-X). A package implementation is available at https://faenet.readthedocs.io.}, + pubstate = {preprint}, + keywords = {/unread,alternative approaches,alternative for equivariance,AML,approximative equivariance,equivariant,frame averaging,GNN,library,materials,ML,todo-tagging,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Duval et al_2023_FAENet.pdf;/Users/wasmer/Zotero/storage/I9RAQSSH/2305.html} +} + +@online{duvalHitchhikerGuideGeometric2023, + title = {A {{Hitchhiker}}'s {{Guide}} to {{Geometric GNNs}} for {{3D Atomic Systems}}}, + author = {Duval, Alexandre and Mathis, Simon V. and Joshi, Chaitanya K. and Schmidt, Victor and Miret, Santiago and Malliaros, Fragkiskos D. and Cohen, Taco and Lio, Pietro and Bengio, Yoshua and Bronstein, Michael}, + date = {2023-12-12}, + eprint = {2312.07511}, + eprinttype = {arxiv}, + eprintclass = {cs, q-bio, stat}, + doi = {10.48550/arXiv.2312.07511}, + url = {http://arxiv.org/abs/2312.07511}, + urldate = {2024-01-13}, + abstract = {Recent advances in computational modelling of atomic systems, spanning molecules, proteins, and materials, represent them as geometric graphs with atoms embedded as nodes in 3D Euclidean space. In these graphs, the geometric attributes transform according to the inherent physical symmetries of 3D atomic systems, including rotations and translations in Euclidean space, as well as node permutations. In recent years, Geometric Graph Neural Networks have emerged as the preferred machine learning architecture powering applications ranging from protein structure prediction to molecular simulations and material generation. Their specificity lies in the inductive biases they leverage -- such as physical symmetries and chemical properties -- to learn informative representations of these geometric graphs. In this opinionated paper, we provide a comprehensive and self-contained overview of the field of Geometric GNNs for 3D atomic systems. We cover fundamental background material and introduce a pedagogical taxonomy of Geometric GNN architectures:(1) invariant networks, (2) equivariant networks in Cartesian basis, (3) equivariant networks in spherical basis, and (4) unconstrained networks. Additionally, we outline key datasets and application areas and suggest future research directions. The objective of this work is to present a structured perspective on the field, making it accessible to newcomers and aiding practitioners in gaining an intuition for its mathematical abstractions.}, + pubstate = {preprint}, + keywords = {\_tablet,Allegro,AML,benchmarking,best practices,biomolecules,CGCNN,DimeNet,E(n),e3nn,EGNN,equivariant,equivariant alternative,GemNet,geometric deep learning,geometric GNNs,GNN,graph ML,invariance,MACE,materials,MEGNet,ML,ML theory,model comparison,molecules,MPNN,NequIP,PAiNN,reference,review,SchNet,SE(3),SO(3),SphereNet,symmetry,tensor field,TensorNet,theory,transformer,tutorial,with-data}, + file = {/Users/wasmer/Nextcloud/Zotero/Duval et al_2023_A Hitchhiker's Guide to Geometric GNNs for 3D Atomic Systems.pdf;/Users/wasmer/Zotero/storage/2CWI5WGP/2312.html} +} + @unpublished{dymLowDimensionalInvariant2022, title = {Low {{Dimensional Invariant Embeddings}} for {{Universal Geometric Learning}}}, author = {Dym, Nadav and Gortler, Steven J.}, @@ -3905,7 +4462,7 @@ eprint = {2205.02956}, eprinttype = {arxiv}, eprintclass = {cs, math}, - publisher = {{arXiv}}, + publisher = {arXiv}, doi = {10.48550/arXiv.2205.02956}, url = {http://arxiv.org/abs/2205.02956}, urldate = {2022-05-18}, @@ -3923,7 +4480,7 @@ volume = {74}, number = {9}, pages = {096501}, - publisher = {{IOP Publishing}}, + publisher = {IOP Publishing}, issn = {0034-4885}, doi = {10.1088/0034-4885/74/9/096501}, url = {https://doi.org/10.1088/0034-4885/74/9/096501}, @@ -3944,7 +4501,7 @@ url = {http://arxiv.org/abs/2104.14439}, urldate = {2021-05-18}, abstract = {Machine learning potentials have emerged as a powerful tool to extend the time and length scales of first principles-quality simulations. Still, most machine learning potentials cannot distinguish different electronic spin orientations and thus are not applicable to materials in different magnetic states. Here, we propose spin-dependent atom-centered symmetry functions as a new type of descriptor taking the atomic spin degrees of freedom into account. When used as input for a high-dimensional neural network potential (HDNNP), accurate potential energy surfaces of multicomponent systems describing multiple magnetic states can be constructed. We demonstrate the performance of these magnetic HDNNPs for the case of manganese oxide, MnO. We show that the method predicts the magnetically distorted rhombohedral structure in excellent agreement with density functional theory and experiment. Its efficiency allows to determine the N\textbackslash '\{e\}el temperature considering structural fluctuations, entropic effects, and defects. The method is general and is expected to be useful also for other types of systems like oligonuclear transition metal complexes.}, - keywords = {ACSF,ANN,descriptors,HDNNP,Heisenberg model,magnetism,ML,MLP,models,Physics - Computational Physics,spin-dependent,to read 2105}, + keywords = {ACSF,AML,ANN,collinear,descriptors,HDNNP,Heisenberg model,magnetism,ML,MLP,models,prediction of magnetic moment,RuNNer,spin-dependent,with-code}, file = {/Users/wasmer/Nextcloud/Zotero/Eckhoff_Behler_2021_High-Dimensional Neural Network Potentials for Magnetic Systems Using.pdf;/Users/wasmer/Zotero/storage/KW8NBSDW/2104.html} } @@ -3971,7 +4528,7 @@ author = {Eisenbach, Markus and Li, Ying Wai and Liu, Xianglin and Odbadrakh, Od K. and Pei, Zongrui and Stocks, George M. and Yin, Junqi}, date = {2017-12-01}, number = {LSMS; 005587WKSTN00}, - institution = {{Oak Ridge National Laboratory (ORNL), Oak Ridge, TN (United States)}}, + institution = {Oak Ridge National Laboratory (ORNL), Oak Ridge, TN (United States)}, url = {https://www.osti.gov/biblio/1420087}, urldate = {2023-09-19}, abstract = {LSMS is a first principles, Density Functional theory based, electronic structure code targeted mainly at materials applications. LSMS calculates the local spin density approximation to the diagonal part of the electron Green's function. The electron/spin density and energy are easily determined once the Green's function is known. Linear scaling with system size is achieved in the LSMS by using several unique properties of the real space multiple scattering approach to the Green's function.}, @@ -4004,8 +4561,8 @@ date = {2022}, series = {Communications in {{Computer}} and {{Information Science}}}, pages = {75--86}, - publisher = {{Springer Nature Switzerland}}, - location = {{Cham}}, + publisher = {Springer Nature Switzerland}, + location = {Cham}, doi = {10.1007/978-3-031-23606-8_5}, abstract = {The investigation of finite temperature properties using Monte-Carlo (MC) methods requires a large number of evaluations of the system’s Hamiltonian to sample the phase space needed to obtain physical observables as function of temperature. DFT calculations can provide accurate evaluations of the energies, but they are too computationally expensive for routine simulations. To circumvent this problem, machine-learning (ML) based surrogate models have been developed and implemented on high-performance computing (HPC) architectures. In this paper, we describe two ML methods (linear mixing model and HydraGNN) as surrogates for first principles density functional theory (DFT) calculations with classical MC simulations. These two surrogate models are used to learn the dependence of target physical properties from complex compositions and interactions of their constituents. We present the predictive performance of these two surrogate models with respect to their complexity while avoiding the danger of overfitting the model. An important aspect of our approach is the periodic retraining with newly generated first principles data based on the progressive exploration of the system’s phase space by the MC simulation. The numerical results show that HydraGNN model attains superior predictive performance compared to the linear mixing model for magnetic alloy materials.}, isbn = {978-3-031-23606-8}, @@ -4021,21 +4578,27 @@ urldate = {2023-09-19}, abstract = {While modern Monte-Carlo algorithms are highly efficient for computational statistical mechanics in many systems, it is desirable for many materials simulations to utilize energies that are evaluated using density functional theory to capture the complex interactions in multicomponent systems. In the past we have performed calculations by combining our LSMS first principles code with Wang-Landau Monte-Carlo calculations. The number of Monte-Carlo steps limits the applicability of this method even on high-performance computer systems. Thus, we are integrating a machine learning derived surrogate model with Monte-Carlo calculations. Here we present our results of deriving surrogate models from total energy calculations that replicate the behavior of first principles calculations of alloy ordering transitions. In addition to evaluating the attainable speedup, we explore strategies for reducing the dimensionality of the surrogate model as well as the impact of the model on the accuracy of the Monte-Carlo results. *This work is supported in part by the Office of Science of the Department of Energy and by the LDRD Program of Oak Ridge National Laboratory. It used resources of the Oak Ridge Leadership Computing Facility, supported by the Office of Science of the U.S. Department of Energy.}, eventtitle = {{{APS March Meeting}} 2019}, - venue = {{Boston}}, + venue = {Boston}, keywords = {/unread,todo-tagging}, - annotation = {Authors: - -Markus Eisenbach (Oak Ridge National Laboratory) - -Jiaxin Zhang (Oak Ridge National Laboratory) - -Zongrui Pei (Oak Ridge National Laboratory) - -Massimiliano Lupo Pasini (Oak Ridge National Laboratory) - -Ying Wai Li (Oak Ridge National Laboratory) - -Junqi Yin (Oak Ridge National Laboratory)}, + annotation = {Authors:\\ +\\ +Markus Eisenbach\\ +(Oak Ridge National Laboratory)\\ +\\ +Jiaxin Zhang\\ +(Oak Ridge National Laboratory)\\ +\\ +Zongrui Pei\\ +(Oak Ridge National Laboratory)\\ +\\ +Massimiliano Lupo Pasini\\ +(Oak Ridge National Laboratory)\\ +\\ +Ying Wai Li\\ +(Oak Ridge National Laboratory)\\ +\\ +Junqi Yin\\ +(Oak Ridge National Laboratory)}, file = {/Users/wasmer/Zotero/storage/729Z2H6S/F21.html} } @@ -4046,8 +4609,8 @@ Junqi Yin (Oak Ridge National Laboratory)}, date = {2009-11-14}, series = {{{SC}} '09}, pages = {1--8}, - publisher = {{Association for Computing Machinery}}, - location = {{New York, NY, USA}}, + publisher = {Association for Computing Machinery}, + location = {New York, NY, USA}, doi = {10.1145/1654059.1654125}, url = {https://doi.org/10.1145/1654059.1654125}, urldate = {2023-09-19}, @@ -4065,7 +4628,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {104}, number = {3}, pages = {035120}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevB.104.035120}, url = {https://link.aps.org/doi/10.1103/PhysRevB.104.035120}, urldate = {2021-12-05}, @@ -4079,7 +4642,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, title = {White {{Paper}} - {{Leveraging Physics-Based Models}} and {{AI}} for New {{Material Development}}}, author = {Erwin, William and Edkins, Stephen}, date = {2023-06}, - institution = {{Citrine Informatics}}, + institution = {Citrine Informatics}, url = {https://citrine.io/success/white-papers/white-paper-leveraging-physics-based-models-and-ai-for-new-material-development/}, urldate = {2023-08-19}, abstract = {Physics-based models (PBMs) are used extensively in materials research, primarily as a proxy for more time and cost intensive experimental material production and characterization.}, @@ -4144,7 +4707,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {127}, number = {9}, pages = {097202}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevLett.127.097202}, url = {https://link.aps.org/doi/10.1103/PhysRevLett.127.097202}, urldate = {2022-03-29}, @@ -4161,12 +4724,12 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {10}, number = {41}, pages = {9424--9432}, - publisher = {{Royal Society of Chemistry}}, + publisher = {Royal Society of Chemistry}, doi = {10.1039/C9SC02696G}, url = {https://pubs.rsc.org/en/content/articlelanding/2019/sc/c9sc02696g}, urldate = {2021-10-16}, langid = {english}, - keywords = {ML,ML-DFT,ML-ESM,prediction of electron density}, + keywords = {library,ML,ML-DFT,ML-ESM,prediction of electron density,with-code}, file = {/Users/wasmer/Nextcloud/Zotero/Fabrizio et al_2019_Electron density learning of non-covalent systems.pdf} } @@ -4179,7 +4742,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {95}, number = {10}, pages = {104105}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevB.95.104105}, url = {https://link.aps.org/doi/10.1103/PhysRevB.95.104105}, urldate = {2021-07-22}, @@ -4197,7 +4760,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {21}, number = {8}, pages = {3222--3244}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevB.21.3222}, url = {https://link.aps.org/doi/10.1103/PhysRevB.21.3222}, urldate = {2023-09-19}, @@ -4211,7 +4774,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, shorttitle = {Multiple {{Scattering Theory}}}, author = {Faulkner, J. S. and Stocks, G. Malcolm and Wang, Yang}, date = {2018-12-01}, - publisher = {{IOP Publishing}}, + publisher = {IOP Publishing}, url = {https://iopscience.iop.org/book/mono/978-0-7503-1490-9}, urldate = {2023-09-19}, abstract = {{$<$}p{$>$}In 1947, it was discovered that multiple scattering theory can be used to solve the Schrödinger equation for the stationary states of electrons in a solid. Written by experts in the field, Dr J S Faulkner, G M Stocks, and Yang Wang, this book collates the results of numerous studies in the field of multiple scattering theory and provides a comprehensive, systematic approach to MSTs.{$<$}/p{$><$}p{$>$}For many scientists, students and engineers working with multiple scattering programmes, this will be a useful guide that help with the existing knowledge of MST as well as understanding its future implications. For those interested in learning about multiple scattering theory, this book will serve as an introduction for those wanting to use MST for their own calculations.{$<$}/p{$>$}}, @@ -4221,6 +4784,21 @@ Junqi Yin (Oak Ridge National Laboratory)}, file = {/Users/wasmer/Nextcloud/Zotero/Faulkner et al_2018_Multiple Scattering Theory.pdf} } +@book{faulknerMultipleScatteringTheory2018a, + title = {Multiple {{Scattering Theory}}}, + author = {Faulkner, J. S. and Stocks, G. M. and Wang, Y.}, + date = {2018}, + edition = {1}, + publisher = {IOP Publishing}, + doi = {10.1088/2053-2563/aae7d8}, + url = {https://iopscience.iop.org/book/978-0-7503-1490-9}, + urldate = {2021-12-02}, + isbn = {978-0-7503-1490-9}, + langid = {english}, + keywords = {\_tablet}, + file = {/Users/wasmer/Nextcloud/Zotero/Multiple Scattering Theory.pdf;/Users/wasmer/Zotero/storage/UYLUXULV/978-0-7503-1490-9.html} +} + @patent{feinbergSystemsMethodsSpatial2023, type = {patentus}, title = {Systems and {{Methods}} for {{Spatial Graph Convolutions}} with {{Applications}} to {{Drug Discovery}} and {{Molecular Simulation}}}, @@ -4366,7 +4944,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {6}, number = {4}, pages = {040301}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevMaterials.6.040301}, url = {https://link.aps.org/doi/10.1103/PhysRevMaterials.6.040301}, urldate = {2023-03-09}, @@ -4399,8 +4977,8 @@ Junqi Yin (Oak Ridge National Laboratory)}, date = {2023-10-02}, series = {Challenges and {{Advances}} in {{Computational Chemistry}} and {{Physics}}}, pages = {113--160}, - publisher = {{Springer International Publishing}}, - location = {{Cham}}, + publisher = {Springer International Publishing}, + location = {Cham}, doi = {10.1007/978-3-031-37196-7_5}, url = {https://doi.org/10.1007/978-3-031-37196-7_5}, urldate = {2023-10-06}, @@ -4436,7 +5014,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {9}, number = {1}, pages = {1--10}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2057-3960}, doi = {10.1038/s41524-023-01070-z}, url = {https://www.nature.com/articles/s41524-023-01070-z}, @@ -4478,7 +5056,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {3}, number = {4}, pages = {045008}, - publisher = {{IOP Publishing}}, + publisher = {IOP Publishing}, issn = {2632-2153}, doi = {10.1088/2632-2153/ac9956}, url = {https://dx.doi.org/10.1088/2632-2153/ac9956}, @@ -4505,6 +5083,21 @@ Junqi Yin (Oak Ridge National Laboratory)}, file = {/Users/wasmer/Nextcloud/Zotero/Finzi et al_2021_A Practical Method for Constructing Equivariant Multilayer Perceptrons for.pdf;/Users/wasmer/Zotero/storage/CFQ89M8L/2104.html} } +@book{fleuretLittleBookDeep2023, + title = {The {{Little Book}} of {{Deep Learning}}}, + author = {Fleuret, François}, + date = {2023-06}, + edition = {1}, + publisher = {self-published}, + url = {https://fleuret.org/francois/lbdl.html}, + urldate = {2023-12-05}, + abstract = {This book is a short introduction to deep learning for readers with a STEM background, originally designed to be read on a phone screen. It is distributed under a non-commercial Creative Commons license and was downloaded close to 250'000 times in the month following its public release.}, + langid = {english}, + pagetotal = {168}, + keywords = {\_tablet,Deep learning,educational,General ML,learning material,ML theory,online book,textbook}, + file = {/Users/wasmer/Nextcloud/Zotero/Fleuret_2023_The Little Book of Deep Learning.pdf} +} + @article{flores-livasPredictionHotSuperconductivity2019, title = {A {{Prediction}} for “{{Hot}}†{{Superconductivity}}}, author = {Flores-Livas, José A. and Arita, Ryotaro}, @@ -4512,7 +5105,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, journaltitle = {Physics}, volume = {12}, pages = {96}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevLett.123.097001}, url = {https://physics.aps.org/articles/v12/96}, urldate = {2021-10-21}, @@ -4534,7 +5127,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, urldate = {2023-02-23}, abstract = {As the go-to method to solve the electronic structure problem, Kohn-Sham density functional theory (KS-DFT) can be used to obtain the ground-state charge density, total energy, and several other key materials' properties. Unfortunately, the solution of the Kohn-Sham equations is found iteratively. This is a numerically intensive task, limiting the possible size and complexity of the systems to be treated. Machine-learning (ML) models for the charge density can then be used as surrogates to generate the converged charge density and reduce the computational cost of solving the electronic structure problem. We derive a powerful grid-centred structural representation based on the Jacobi and Legendre polynomials that, combined with a linear regression built on a data-efficient workflow, can accurately learn the charge density. Then, we design a machine-learning pipeline that can return energy and forces at the quality of a converged DFT calculation but at a fraction of the computational cost. This can be used as a tool for the fast scanning of the energy landscape and as a starting point to the DFT self-consistent cycle, in both cases maintaining a low computational cost.}, pubstate = {preprint}, - keywords = {\_tablet,AML,DFT,grid-based descriptors,Jacobi-Legendre potential,library,ML,ML-DFT,prediction of electron density,VASP,with-code}, + keywords = {\_tablet,AML,DFT,grid-based descriptors,Jacobi-Legendre,library,ML,ML-Density,ML-DFT,ML-ESM,prediction of electron density,VASP,with-code}, file = {/Users/wasmer/Nextcloud/Zotero/Focassio et al_2023_Linear Jacobi-Legendre expansion of the charge density for machine.pdf;/Users/wasmer/Zotero/storage/HPSZ89R2/2301.html} } @@ -4547,7 +5140,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {9}, number = {1}, pages = {1--10}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2057-3960}, doi = {10.1038/s41524-023-01053-0}, url = {https://www.nature.com/articles/s41524-023-01053-0}, @@ -4555,18 +5148,35 @@ Junqi Yin (Oak Ridge National Laboratory)}, abstract = {Kohn–Sham density functional theory (KS-DFT) is a powerful method to obtain key materials’ properties, but the iterative solution of the KS equations is a numerically intensive task, which limits its application to complex systems. To address this issue, machine learning (ML) models can be used as surrogates to find the ground-state charge density and reduce the computational overheads. We develop a grid-centred structural representation, based on Jacobi and Legendre polynomials combined with a linear regression, to accurately learn the converged DFT charge density. This integrates into a ML pipeline that can return any density-dependent observable, including energy and forces, at the quality of a converged DFT calculation, but at a fraction of the computational cost. Fast scanning of energy landscapes and producing starting densities for the DFT self-consistent cycle are among the applications of our scheme.}, issue = {1}, langid = {english}, - keywords = {\_tablet,AML,Computational methods,DFT,Electronic structure,grid-based descriptors,Jacobi-Legendre potential,library,ML,ML-DFT,prediction of electron density,VASP,with-code}, + keywords = {\_tablet,AML,DFT,grid-based descriptors,Jacobi-Legendre,library,ML,ML-Density,ML-DFT,ML-ESM,prediction of electron density,VASP,with-code}, file = {/Users/wasmer/Nextcloud/Zotero/Focassio et al_2023_Linear Jacobi-Legendre expansion of the charge density for machine2.pdf} } +@online{focassioPerformanceAssessmentUniversal2024, + title = {Performance {{Assessment}} of {{Universal Machine Learning Interatomic Potentials}}: {{Challenges}} and {{Directions}} for {{Materials}}' {{Surfaces}}}, + shorttitle = {Performance {{Assessment}} of {{Universal Machine Learning Interatomic Potentials}}}, + author = {Focassio, Bruno and Freitas, Luis Paulo Mezzina and Schleder, Gabriel R.}, + date = {2024-03-06}, + eprint = {2403.04217}, + eprinttype = {arxiv}, + eprintclass = {cond-mat, physics:physics}, + doi = {10.48550/arXiv.2403.04217}, + url = {http://arxiv.org/abs/2403.04217}, + urldate = {2024-03-14}, + abstract = {Machine learning interatomic potentials (MLIPs) are one of the main techniques in the materials science toolbox, able to bridge ab initio accuracy with the computational efficiency of classical force fields. This allows simulations ranging from atoms, molecules, and biosystems, to solid and bulk materials, surfaces, nanomaterials, and their interfaces and complex interactions. A recent class of advanced MLIPs, which use equivariant representations and deep graph neural networks, is known as universal models. These models are proposed as foundational models suitable for any system, covering most elements from the periodic table. Current universal MLIPs (UIPs) have been trained with the largest consistent dataset available nowadays. However, these are composed mostly of bulk materials' DFT calculations. In this article, we assess the universality of all openly available UIPs, namely MACE, CHGNet, and M3GNet, in a representative task of generalization: calculation of surface energies. We find that the out-of-the-box foundational models have significant shortcomings in this task, with errors correlated to the total energy of surface simulations, having an out-of-domain distance from the training dataset. Our results show that while UIPs are an efficient starting point for fine-tuning specialized models, we envision the potential of increasing the coverage of the materials space towards universal training datasets for MLIPs.}, + pubstate = {preprint}, + keywords = {\_tablet,AML,benchmarking,CHGNet,disordered,foundation models,M3GNet,MACE,materials project,ML,MLP,MLP comparison,MTP,NequIP,surface physics,todo-tagging,universal potential}, + file = {/Users/wasmer/Nextcloud/Zotero/Focassio et al_2024_Performance Assessment of Universal Machine Learning Interatomic Potentials.pdf;/Users/wasmer/Zotero/storage/QLASD4BQ/2403.html} +} + @report{foulkesTopologyEntanglementStrong2020, title = {Topology, {{Entanglement}}, and {{Strong Correlations}}}, author = {Foulkes, W. M. C. and Drautz, Ralf}, - editorb = {Pavarini, Eva and Koch, Erik}, - editorbtype = {redactor}, + editor = {Pavarini, Eva and Koch, Erik}, + editortype = {redactor}, date = {2020}, number = {FZJ-2020-03083}, - institution = {{Forschungszentrum Jülich GmbH Zentralbibliothek, Verlag}}, + institution = {Forschungszentrum Jülich GmbH Zentralbibliothek, Verlag}, url = {https://juser.fz-juelich.de/record/884084/}, urldate = {2022-06-28}, abstract = {Topology and entanglement are key concepts in many-body physics. Understanding the as-sociated emergent phenomena beyond toy models – in the world of real strongly-correlatedmaterials – requires the mastery of a wealth of different methods. These encompass analytical tools such as group theory, first principles techniques based on density-functional theory, materials-specific model-building schemes, as well as advanced modern numerical approaches for solving realistic many-body models. This year’s school provides an overview of the state-of-the art of these methods, their successes and their limitations. After introducing the basics, lectures will present the core concepts of topology and entanglement in many-body systems. To make contact to real materials, strategies for building materials specific models and techniques for their solution will be introduced. Among the latter, the school will cover quantum Monte Carlo methods, construction and optimization of correlated wave-functions, recursion and renormalization group techniques, as well as dynamical mean-field theory. More advanced lectures will give a pedagogical overview ontopological materials and their physics: topological metals, semimetals, and superconductors. Towards the end of the school entanglement in quantum dynamics and perspectives in quantum computation will be discussed. The goal of the school is to introduce advanced graduate students and up to these modern approaches for the realistic modeling of strongly correlated materials. A school of this size and scope requires backing from many sources. This is even more truethis year. As everywhere, the Corona pandemics provided scores of new challenges. Plans had to be changed and real facilities had to be replaced with virtual ones. We are very grateful forall the practical and financial support we have received. The Institute for Advanced Simulationat the Forschungszentrum J ülich and the Jülich Supercomputer Centre provided the major part of the funding and were vital for the organization and reorganization of the school as well as for the production of this book. The Institute for Complex Adaptive Matter (ICAM) supplied additional funds and ideas for successful online formats. The nature of a school makes it desirable to have the lecture notes available when the lecturesare given. This way students get the chance to work through the lectures thoroughly while their memory is still fresh. We are therefore extremely grateful to the lecturers that, despite tight deadlines, provided their manuscripts in time for the production of this book. We are confident that the lecture notes collected here will not only serve the participants of the school but will also be useful for other students entering the exciting field of strongly correlated materials. We are grateful to Mrs. H. Lexis of the Verlag des Forschungszentrum Jülich and to Mrs. D. Mans of the Grafische Betriebe for providing their expert support in producing the present volume on a tight schedule. We heartily thank our students and postdocs who helped with proofreading the manuscripts, often on quite short notice: Elaheh Adibi, Julian Mußhoff, NedaSamani, and Xue-Jing Zhang. Finally, our special thanks go to Dipl.-Ing. R. Hölzle for his invaluable advice on the innu-merable questions concerning the organization of such an endeavor, and to Mrs. L. Snyders forexpertly handling all practical issues. Pavarini, Eva; Koch, Erik}, @@ -4599,7 +5209,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, eprint = {2106.02549}, eprinttype = {arxiv}, eprintclass = {physics}, - publisher = {{arXiv}}, + publisher = {arXiv}, doi = {10.48550/arXiv.2106.02549}, url = {http://arxiv.org/abs/2106.02549}, urldate = {2022-05-18}, @@ -4672,7 +5282,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {14}, number = {10}, pages = {13406--13417}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, issn = {1936-0851}, doi = {10.1021/acsnano.0c05267}, url = {https://doi.org/10.1021/acsnano.0c05267}, @@ -4718,12 +5328,12 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {105}, number = {1}, pages = {014103}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevB.105.014103}, url = {https://link.aps.org/doi/10.1103/PhysRevB.105.014103}, urldate = {2023-05-06}, abstract = {The commonly employed supercell approach for defects in crystalline materials may introduce spurious interactions between the defect and its periodic images. A rich literature is available on how the interaction energies can be estimated, reduced, or corrected. A simple and seemingly straightforward approach is to extrapolate from a series of finite supercell sizes to the infinite-size limit, assuming a smooth polynomial dependence of the energy on inverse supercell size. In this work, we demonstrate by means of explict density-functional theory supercell calculations and simplified models that wave-function overlap and electrostatic interactions lead to more complex dependencies on supercell size than commonly assumed. We show that this complexity cannot be captured by the simple extrapolation approaches and that suitable correction schemes should be employed.}, - keywords = {\_tablet,/unread,2D material,defects,DFT,impurity embedding,interfaces and thin films,KKR,point defects,supercell,surface physics}, + keywords = {/unread,\_tablet,2D material,defects,DFT,impurity embedding,interfaces and thin films,KKR,point defects,supercell,surface physics}, file = {/Users/wasmer/Nextcloud/Zotero/Freysoldt et al_2022_Limitations of empirical supercell extrapolation for calculations of point.pdf;/Users/wasmer/Zotero/storage/EAVNV5DB/PhysRevB.105.html} } @@ -4736,7 +5346,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {20}, number = {6}, pages = {750--761}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {1476-4660}, doi = {10.1038/s41563-020-0777-6}, url = {https://www.nature.com/articles/s41563-020-0777-6}, @@ -4757,7 +5367,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {592}, number = {7854}, pages = {350--352}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, doi = {10.1038/d41586-021-00954-8}, url = {https://www.nature.com/articles/d41586-021-00954-8}, urldate = {2022-10-21}, @@ -4765,7 +5375,9 @@ Junqi Yin (Oak Ridge National Laboratory)}, issue = {7854}, langid = {english}, keywords = {Majorana,Peer review,quantum computing,rec-by-ghosh,reproducibility crisis,skeptics}, - annotation = {Bandiera\_abtest: a Cg\_type: Comment Subject\_term: Quantum physics, Publishing, Peer review}, + annotation = {Bandiera\_abtest: a\\ +Cg\_type: Comment\\ +Subject\_term: Quantum physics, Publishing, Peer review}, file = {/Users/wasmer/Nextcloud/Zotero/Frolov_2021_Quantum computing’s reproducibility crisis.pdf;/Users/wasmer/Zotero/storage/CLEGVGB5/d41586-021-00954-8.html} } @@ -4828,12 +5440,12 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {6}, number = {2}, pages = {023802}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevMaterials.6.023802}, url = {https://link.aps.org/doi/10.1103/PhysRevMaterials.6.023802}, urldate = {2023-04-04}, abstract = {Automatic exhaustive exploration of a large material space by high-performance supercomputers is crucial for developing new functional materials. We demonstrated the efficiency of high-throughput calculations using the all-electron Korringa-Kohn-Rostoker coherent potential approximation method with the density functional theory for the large material space consisting of quaternary high entropy alloys, which are nonstoichiometric and substitutionally disordered materials. The exhaustive calculations were performed for 147 630 systems based on the AkaiKKR program package and supercomputer Fugaku, where the numerical parameters and self-consistent convergence are automatically controlled. The large material database including the total energies, magnetization, Curie temperature, and residual resistivity was constructed by our calculations. We used frequent itemset mining to identify the characteristics of parcels in magnetization and Curie temperature space. We also identified the elements that enhance the magnetization and Curie temperature and clarified the rough dependence of the elements through regression modeling of the residual resistivity.}, - keywords = {\_tablet,/unread,CPA,HTC,KKR}, + keywords = {/unread,\_tablet,CPA,HTC,KKR}, file = {/Users/wasmer/Nextcloud/Zotero/Fukushima et al_2022_Automatic exhaustive calculations of large material space by.pdf;/Users/wasmer/Zotero/storage/VNUQ6LGT/PhysRevMaterials.6.html} } @@ -4846,7 +5458,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {34}, number = {11}, pages = {4848--4855}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, issn = {0897-4756}, doi = {10.1021/acs.chemmater.1c04252}, url = {https://doi.org/10.1021/acs.chemmater.1c04252}, @@ -4864,7 +5476,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {34}, number = {11}, pages = {4848--4855}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, issn = {0897-4756}, doi = {10.1021/acs.chemmater.1c04252}, url = {https://doi.org/10.1021/acs.chemmater.1c04252}, @@ -4874,6 +5486,24 @@ Junqi Yin (Oak Ridge National Laboratory)}, file = {/Users/wasmer/Zotero/storage/D55696WS/acs.chemmater.html} } +@article{furnessAccurateNumericallyEfficient2020, + title = {Accurate and {{Numerically Efficient r2SCAN Meta-Generalized Gradient Approximation}}}, + author = {Furness, James W. and Kaplan, Aaron D. and Ning, Jinliang and Perdew, John P. and Sun, Jianwei}, + date = {2020-10-01}, + journaltitle = {The Journal of Physical Chemistry Letters}, + shortjournal = {J. Phys. Chem. Lett.}, + volume = {11}, + number = {19}, + pages = {8208--8215}, + publisher = {American Chemical Society}, + doi = {10.1021/acs.jpclett.0c02405}, + url = {https://doi.org/10.1021/acs.jpclett.0c02405}, + urldate = {2023-12-05}, + abstract = {The recently proposed rSCAN functional [ J. Chem. Phys. 2019 150, 161101] is a regularized form of the SCAN functional [ Phys. Rev. Lett. 2015 115, 036402] that improves SCAN’s numerical performance at the expense of breaking constraints known from the exact exchange–correlation functional. We construct a new meta-generalized gradient approximation by restoring exact constraint adherence to rSCAN. The resulting functional maintains rSCAN’s numerical performance while restoring the transferable accuracy of SCAN.}, + keywords = {/unread,density functional,DFA,DFT,GGA,meta-GGA,original publication,physics,r2SCAN,rSCAN,SCAN}, + file = {/Users/wasmer/Zotero/storage/J5STXDFJ/Furness et al. - 2020 - Accurate and Numerically Efficient r2SCAN Meta-Gen.pdf} +} + @online{galkinGraphML20222021, title = {Graph {{ML}} in 2022: {{Where Are We Now}}?}, shorttitle = {Graph {{ML}} in 2022}, @@ -4883,7 +5513,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, urldate = {2022-01-02}, abstract = {Hot trends and major advancements}, langid = {english}, - organization = {{Medium}}, + organization = {Medium}, file = {/Users/wasmer/Zotero/storage/8ESSCXA2/graph-ml-in-2022-where-are-we-now-f7f8242599e0.html} } @@ -4893,8 +5523,8 @@ Junqi Yin (Oak Ridge National Laboratory)}, editor = {Gamma, Erich}, date = {1995}, series = {Addison-{{Wesley}} Professional Computing Series}, - publisher = {{Addison-Wesley}}, - location = {{Reading, Mass}}, + publisher = {Addison-Wesley}, + location = {Reading, Mass}, isbn = {978-0-201-63361-0}, pagetotal = {395}, keywords = {OO,Reusability,software engineering,Software patterns} @@ -4924,7 +5554,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {13}, number = {1}, pages = {1572}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2041-1723}, doi = {10.1038/s41467-022-29243-2}, url = {https://www.nature.com/articles/s41467-022-29243-2}, @@ -4961,7 +5591,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {122}, number = {15}, pages = {156001}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevLett.122.156001}, url = {https://link.aps.org/doi/10.1103/PhysRevLett.122.156001}, urldate = {2021-08-21}, @@ -4970,6 +5600,22 @@ Junqi Yin (Oak Ridge National Laboratory)}, file = {/Users/wasmer/Nextcloud/Zotero/Garrido Torres et al_2019_Low-Scaling Algorithm for Nudged Elastic Band Calculations Using a Surrogate.pdf;/Users/wasmer/Zotero/storage/TFSWCWBH/Garrido Torres et al. - 2019 - Low-Scaling Algorithm for Nudged Elastic Band Calc.pdf;/Users/wasmer/Zotero/storage/DWT7X58R/PhysRevLett.122.html} } +@article{garrisonApplyingLargeGraph2023, + title = {Applying {{Large Graph Neural Networks}} to {{Predict Transition Metal Complex Energies Using}} the {{tmQM}}\_{{wB97MV Data Set}}}, + author = {Garrison, Aaron G. and Heras-Domingo, Javier and Kitchin, John R. and family=Passos Gomes, given=Gabriel, prefix=dos, useprefix=true and Ulissi, Zachary W. and Blau, Samuel M.}, + date = {2023-12-04}, + journaltitle = {Journal of Chemical Information and Modeling}, + shortjournal = {J. Chem. Inf. Model.}, + publisher = {American Chemical Society}, + issn = {1549-9596}, + doi = {10.1021/acs.jcim.3c01226}, + url = {https://doi.org/10.1021/acs.jcim.3c01226}, + urldate = {2023-12-07}, + abstract = {Machine learning (ML) methods have shown promise for discovering novel catalysts but are often restricted to specific chemical domains. Generalizable ML models require large and diverse training data sets, which exist for heterogeneous catalysis but not for homogeneous catalysis. The tmQM data set, which contains properties of 86,665 transition metal complexes calculated at the TPSSh/def2-SVP level of density functional theory (DFT), provided a promising training data set for homogeneous catalyst systems. However, we find that ML models trained on tmQM consistently underpredict the energies of a chemically distinct subset of the data. To address this, we present the tmQM\_wB97MV data set, which filters out several structures in tmQM found to be missing hydrogens and recomputes the energies of all other structures at the ωB97M-V/def2-SVPD level of DFT. ML models trained on tmQM\_wB97MV show no pattern of consistently incorrect predictions and much lower errors than those trained on tmQM. The ML models tested on tmQM\_wB97MV were, from best to worst, GemNet-T {$>$} PaiNN ≈ SpinConv {$>$} SchNet. Performance consistently improves when using only neutral structures instead of the entire data set. However, while models saturate with only neutral structures, more data continue to improve the models when including charged species, indicating the importance of accurately capturing a range of oxidation states in future data generation and model development. Furthermore, a fine-tuning approach in which weights were initialized from models trained on OC20 led to drastic improvements in model performance, indicating transferability between ML strategies of heterogeneous and homogeneous systems.}, + keywords = {/unread,AML,benchmarking,GemNet,GNN,materials database,ML,MLP,MLP comparison,model evaluation,PAiNN,SchNet,todo-tagging,with-code,with-data}, + file = {/Users/wasmer/Nextcloud/Zotero/Garrison et al_2023_Applying Large Graph Neural Networks to Predict Transition Metal Complex.pdf} +} + @online{gasteigerDirectionalMessagePassing2022, title = {Directional {{Message Passing}} for {{Molecular Graphs}}}, author = {Gasteiger, Johannes and Groß, Janek and Günnemann, Stephan}, @@ -5060,7 +5706,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {13}, number = {1}, pages = {973}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2041-1723}, doi = {10.1038/s41467-022-28526-y}, url = {https://www.nature.com/articles/s41467-022-28526-y}, @@ -5081,7 +5727,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {3}, number = {1}, pages = {015011}, - publisher = {{IOP Publishing}}, + publisher = {IOP Publishing}, issn = {2632-2153}, doi = {10.1088/2632-2153/ac3149}, url = {https://doi.org/10.1088/2632-2153/ac3149}, @@ -5116,7 +5762,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, urldate = {2023-07-01}, abstract = {Predictive atomistic simulations are increasingly employed for data intensive high throughput studies that take advantage of constantly growing computational resources. To handle the sheer number of individual calculations that are needed in such studies, workflow management packages for atomistic simulations have been developed for a rapidly growing user base. These packages are predominantly designed to handle computationally heavy ab initio calculations, usually with a focus on data provenance and reproducibility. However, in related simulation communities, e.g. the developers of machine learning interatomic potentials (MLIPs), the computational requirements are somewhat different: the types, sizes, and numbers of computational tasks are more diverse, and therefore require additional ways of parallelization and local or remote execution for optimal efficiency. In this work, we present the atomistic simulation and MLIP fitting workflow management package wfl and Python remote execution package ExPyRe to meet these requirements. With wfl and ExPyRe, versatile Atomic Simulation Environment based workflows that perform diverse procedures can be written. This capability is based on a low-level developer-oriented framework, which can be utilized to construct high level functionality for user-friendly programs. Such high level capabilities to automate machine learning interatomic potential fitting procedures are already incorporated in wfl, which we use to showcase its capabilities in this work. We believe that wfl fills an important niche in several growing simulation communities and will aid the development of efficient custom computational tasks.}, langid = {english}, - organization = {{arXiv.org}}, + organization = {arXiv.org}, keywords = {todo-tagging}, file = {/Users/wasmer/Nextcloud/Zotero/GelžinytÄ— et al_2023_wfl Python Toolkit for Creating Machine Learning Interatomic Potentials and.pdf} } @@ -5125,7 +5771,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, title = {A {{Comprehensive Discovery Platform}} for {{Organophosphorus Ligands}} for {{Catalysis}}}, author = {Gensch, Tobias and family=Passos Gomes, given=Gabriel, prefix=dos, useprefix=true and Friederich, Pascal and Peters, Ellyn and Gaudin, Theophile and Pollice, Robert and Jorner, Kjell and Nigam, AkshatKumar and Lindner D'Addario, Michael and Sigman, Matthew S. and Aspuru-Guzik, Alan}, date = {2021-04-27}, - publisher = {{ChemRxiv}}, + publisher = {ChemRxiv}, doi = {10.26434/chemrxiv.12996665.v1}, url = {/articles/preprint/A_Comprehensive_Discovery_Platform_for_Organophosphorus_Ligands_for_Catalysis/12996665/1}, urldate = {2021-05-15}, @@ -5174,8 +5820,8 @@ Junqi Yin (Oak Ridge National Laboratory)}, author = {Géron, Aurélien}, date = {2019}, edition = {Second edition}, - publisher = {{O'Reilly Media, Inc}}, - location = {{Sebastopol, CA}}, + publisher = {O'Reilly Media, Inc}, + location = {Sebastopol, CA}, abstract = {Through a series of recent breakthroughs, deep learning has boosted the entire field of machine learning. Now, even programmers who know close to nothing about this technology can use simple, efficient tools to implement programs capable of learning from data. The updated edition of this best-selling book uses concrete examples, minimal theory, and two production-ready Python frameworks-Scikit-Learn and TensorFlow 2-to help you gain an intuitive understanding of the concepts and tools for building intelligent systems. Practitioners will learn a range of techniques that they can quickly put to use on the job. Part 1 employs Scikit-Learn to introduce fundamental machine learning tasks, such as simple linear regression. Part 2, which has been significantly updated, employs Keras and TensorFlow 2 to guide the reader through more advanced machine learning methods using deep neural networks. With exercises in each chapter to help you apply what you've learned, all you need is programming experience to get started. NEW FOR THE SECOND EDITION:Updated all code to TensorFlow 2 ; Introduced the high-level Keras API ; New and expanded coverage including TensorFlow's Data API, Eager Execution, Estimators API, deploying on Google Cloud ML, handling time series, embeddings and more With Early Release ebooks, you get books in their earliest form-the author's raw and unedited content as he or she writes-so you can take advantage of these technologies long before the official release of these titles. You'll also receive updates when significant changes are made, new chapters are available, and the final ebook bundle is released}, isbn = {978-1-4920-3264-9}, pagetotal = {819}, @@ -5193,7 +5839,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {92}, number = {4}, pages = {045131}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevB.92.045131}, url = {https://link.aps.org/doi/10.1103/PhysRevB.92.045131}, urldate = {2023-04-04}, @@ -5209,14 +5855,18 @@ Junqi Yin (Oak Ridge National Laboratory)}, journaltitle = {Nature Reviews Physics}, shortjournal = {Nat Rev Phys}, pages = {1--1}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2522-5820}, doi = {10.1038/s42254-021-00373-8}, url = {https://www.nature.com/articles/s42254-021-00373-8}, urldate = {2021-09-11}, abstract = {Luca Ghiringhelli introduces an AI toolkit that can be used with materials databases to discover new materials, or new properties of known materials.}, langid = {english}, - annotation = {Bandiera\_abtest: a Cg\_type: Nature Research Journals Primary\_atype: Research Highlights Subject\_term: Computational methods;Scientific data Subject\_term\_id: computational-methods;scientific-data}, + annotation = {Bandiera\_abtest: a\\ +Cg\_type: Nature Research Journals\\ +Primary\_atype: Research Highlights\\ +Subject\_term: Computational methods;Scientific data\\ +Subject\_term\_id: computational-methods;scientific-data}, file = {/Users/wasmer/Nextcloud/Zotero/Ghiringhelli_2021_An AI-toolkit to develop and share research into new materials.pdf;/Users/wasmer/Zotero/storage/LTJNU3SG/s42254-021-00373-8.html} } @@ -5230,7 +5880,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {114}, number = {10}, pages = {105503}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevLett.114.105503}, url = {https://link.aps.org/doi/10.1103/PhysRevLett.114.105503}, urldate = {2021-05-15}, @@ -5249,7 +5899,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {3}, number = {1}, pages = {1--9}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2057-3960}, doi = {10.1038/s41524-017-0048-5}, url = {https://www.nature.com/articles/s41524-017-0048-5}, @@ -5258,7 +5908,12 @@ Junqi Yin (Oak Ridge National Laboratory)}, issue = {1}, langid = {english}, keywords = {materials database,materials informatics,materials metadata,NOMAD}, - annotation = {Bandiera\_abtest: a Cc\_license\_type: cc\_by Cg\_type: Nature Research Journals Primary\_atype: Reviews Subject\_term: Condensed-matter physics;Theory and computation Subject\_term\_id: condensed-matter-physics;theory-and-computation}, + annotation = {Bandiera\_abtest: a\\ +Cc\_license\_type: cc\_by\\ +Cg\_type: Nature Research Journals\\ +Primary\_atype: Reviews\\ +Subject\_term: Condensed-matter physics;Theory and computation\\ +Subject\_term\_id: condensed-matter-physics;theory-and-computation}, file = {/Users/wasmer/Nextcloud/Zotero/Ghiringhelli et al_2017_Towards efficient data exchange and sharing for big-data driven materials.pdf;/Users/wasmer/Zotero/storage/G3CTM9SN/s41524-017-0048-5.html} } @@ -5305,7 +5960,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {6}, number = {11}, pages = {113804}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevMaterials.6.113804}, url = {https://link.aps.org/doi/10.1103/PhysRevMaterials.6.113804}, urldate = {2023-04-04}, @@ -5323,7 +5978,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {8}, number = {1}, pages = {1--7}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2057-3960}, doi = {10.1038/s41524-022-00891-8}, url = {https://www.nature.com/articles/s41524-022-00891-8}, @@ -5371,7 +6026,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, title = {Modern {{Condensed Matter Physics}}}, author = {Girvin, Steven M. and Yang, Kun}, date = {2019-02-28}, - publisher = {{Cambridge University Press}}, + publisher = {Cambridge University Press}, doi = {10.1017/9781316480649}, url = {https://www.cambridge.org/highereducation/books/modern-condensed-matter-physics/F0A27AC5DEA8A40EA6EA5D727ED8B14E}, urldate = {2022-06-18}, @@ -5391,7 +6046,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {95}, number = {21}, pages = {214302}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevB.95.214302}, url = {https://link.aps.org/doi/10.1103/PhysRevB.95.214302}, urldate = {2021-10-19}, @@ -5437,7 +6092,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {1}, number = {2}, pages = {pgac039}, - publisher = {{Proceedings of the National Academy of Sciences}}, + publisher = {Proceedings of the National Academy of Sciences}, doi = {10.1093/pnasnexus/pgac039}, url = {https://www.pnas.org/doi/full/10.1093/pnasnexus/pgac039}, urldate = {2022-07-02}, @@ -5478,7 +6133,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, journaltitle = {Frontiers in Chemistry}, shortjournal = {Front. Chem.}, volume = {7}, - publisher = {{Frontiers}}, + publisher = {Frontiers}, issn = {2296-2646}, doi = {10.3389/fchem.2019.00377}, url = {https://www.frontiersin.org/articles/10.3389/fchem.2019.00377/full?utm_source=ad&utm_medium=fb&utm_campaign=ba_sci_fchem}, @@ -5514,7 +6169,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {14}, number = {1}, pages = {2848}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2041-1723}, doi = {10.1038/s41467-023-38468-8}, url = {https://www.nature.com/articles/s41467-023-38468-8}, @@ -5522,19 +6177,19 @@ Junqi Yin (Oak Ridge National Laboratory)}, abstract = {The combination of deep learning and ab initio calculation has shown great promise in revolutionizing future scientific research, but how to design neural network models incorporating a priori knowledge and symmetry requirements is a key challenging subject. Here we propose an E(3)-equivariant deep-learning framework to represent density functional theory (DFT) Hamiltonian as a function of material structure, which can naturally preserve the Euclidean symmetry even in the presence of spin–orbit coupling. Our DeepH-E3 method enables efficient electronic structure calculation at ab initio accuracy by learning from DFT data of small-sized structures, making the routine study of large-scale supercells ({$>$}104 atoms) feasible. The method can reach sub-meV prediction accuracy at high training efficiency, showing state-of-the-art performance in our experiments. The work is not only of general significance to deep-learning method development but also creates opportunities for materials research, such as building a Moiré-twisted material database.}, issue = {1}, langid = {english}, - keywords = {\_tablet,AML,bismuth selenide,bismuth telluride,DeepH,E(3),e3nn,ENN,equivariant,library,magnetism,materials,ML,ML-DFT,ML-ESM,non-collinear,PAW,PBE,prediction of Hamiltonian matrix,PyTorch,SOC,spin-dependent,topological insulator,twisted bilayer graphene,VASP,vdW materials,with-code,with-data}, + keywords = {\_tablet,AML,bismuth selenide,bismuth telluride,DeepH,E(3),e3nn,ENN,equivariant,library,materials,ML,ML-DFT,ML-ESM,PAW,PBE,prediction of Hamiltonian matrix,PyTorch,SOC,spin-dependent,topological insulator,twisted bilayer graphene,VASP,vdW materials,with-code,with-data}, file = {/Users/wasmer/Nextcloud/Zotero/Gong et al_2023_General framework for E(3)-equivariant neural network representation of density.pdf;/Users/wasmer/Nextcloud/Zotero/Gong et al_2023_General framework for E(3)-equivariant neural network representation of density2.pdf;/Users/wasmer/Nextcloud/Zotero/Gong et al_2023_General framework for E(3)-equivariant neural network representation of density3.pdf;/Users/wasmer/Nextcloud/Zotero/Gong et al_2023_General framework for E(3)-equivariant neural network representation of density4.pdf} } @book{gonisMultipleScatteringSolids2000, title = {Multiple {{Scattering}} in {{Solids}}}, author = {Gonis, Antonios and Butler, William H.}, - editorb = {Berry, R. Stephen and Birman, Joseph L. and Lynn, Jeffrey W. and Silverman, Mark P. and Stanley, H. Eugene and Voloshin, Mikhail}, - editorbtype = {redactor}, + editor = {Berry, R. Stephen and Birman, Joseph L. and Lynn, Jeffrey W. and Silverman, Mark P. and Stanley, H. Eugene and Voloshin, Mikhail}, + editortype = {redactor}, date = {2000}, series = {Graduate {{Texts}} in {{Contemporary Physics}}}, - publisher = {{Springer}}, - location = {{New York, NY}}, + publisher = {Springer}, + location = {New York, NY}, doi = {10.1007/978-1-4612-1290-4}, url = {https://link.springer.com/10.1007/978-1-4612-1290-4}, urldate = {2023-09-19}, @@ -5593,7 +6248,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, urldate = {2021-09-23}, abstract = {Style guides for Google-originated open-source projects}, langid = {american}, - organization = {{styleguide}}, + organization = {styleguide}, keywords = {coding style guide,Python,software engineering}, file = {/Users/wasmer/Zotero/storage/HRL7NEIR/pyguide.html} } @@ -5620,8 +6275,8 @@ Junqi Yin (Oak Ridge National Laboratory)}, author = {Gorelick, Micha and Ozsvald, Ian}, date = {2020}, edition = {Second edition}, - publisher = {{O'Reilly}}, - location = {{Beijing}}, + publisher = {O'Reilly}, + location = {Beijing}, isbn = {978-1-4920-5502-0}, langid = {english}, pagetotal = {444}, @@ -5651,7 +6306,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {2}, number = {2}, pages = {025028}, - publisher = {{IOP Publishing}}, + publisher = {IOP Publishing}, issn = {2632-2153}, doi = {10.1088/2632-2153/abdaf7}, url = {https://doi.org/10.1088/2632-2153/abdaf7}, @@ -5714,7 +6369,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, date = {2022-12-01}, journaltitle = {Journal of Chemical Theory and Computation}, shortjournal = {J. Chem. Theory Comput.}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, issn = {1549-9618}, doi = {10.1021/acs.jctc.2c00850}, url = {https://doi.org/10.1021/acs.jctc.2c00850}, @@ -5733,7 +6388,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {151}, number = {20}, pages = {204105}, - publisher = {{American Institute of Physics}}, + publisher = {American Institute of Physics}, issn = {0021-9606}, doi = {10.1063/1.5128375}, url = {https://aip.scitation.org/doi/full/10.1063/1.5128375}, @@ -5752,7 +6407,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {151}, number = {20}, pages = {204105}, - publisher = {{American Institute of Physics}}, + publisher = {American Institute of Physics}, issn = {0021-9606}, doi = {10.1063/1.5128375}, url = {https://aip.scitation.org/doi/full/10.1063/1.5128375}, @@ -5771,7 +6426,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {12}, number = {6}, pages = {2078--2090}, - publisher = {{The Royal Society of Chemistry}}, + publisher = {The Royal Society of Chemistry}, issn = {2041-6539}, doi = {10.1039/D0SC04934D}, url = {https://pubs.rsc.org/en/content/articlelanding/2021/sc/d0sc04934d}, @@ -5807,7 +6462,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {120}, number = {3}, pages = {036002}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevLett.120.036002}, url = {https://link.aps.org/doi/10.1103/PhysRevLett.120.036002}, urldate = {2021-10-19}, @@ -5825,7 +6480,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {5}, number = {1}, pages = {57--64}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, issn = {2374-7943}, doi = {10.1021/acscentsci.8b00551}, url = {https://doi.org/10.1021/acscentsci.8b00551}, @@ -5841,8 +6496,8 @@ Junqi Yin (Oak Ridge National Laboratory)}, author = {Grus, Joel}, date = {2019}, edition = {Second edition}, - publisher = {{O'Reilly Media}}, - location = {{Sebastopol, CA}}, + publisher = {O'Reilly Media}, + location = {Sebastopol, CA}, isbn = {978-1-4920-4113-9}, pagetotal = {384}, keywords = {data science,general,practice,python}, @@ -5850,13 +6505,44 @@ Junqi Yin (Oak Ridge National Laboratory)}, file = {/home/johannes/Books/data_science/general_practice/Grus_DataScienceFromScratchPython_2e-2019.epub} } +@online{gruverFineTunedLanguageModels2024, + title = {Fine-{{Tuned Language Models Generate Stable Inorganic Materials}} as {{Text}}}, + author = {Gruver, Nate and Sriram, Anuroop and Madotto, Andrea and Wilson, Andrew Gordon and Zitnick, C. Lawrence and Ulissi, Zachary}, + date = {2024-02-06}, + eprint = {2402.04379}, + eprinttype = {arxiv}, + eprintclass = {cond-mat}, + doi = {10.48550/arXiv.2402.04379}, + url = {http://arxiv.org/abs/2402.04379}, + urldate = {2024-05-07}, + abstract = {We propose fine-tuning large language models for generation of stable materials. While unorthodox, fine-tuning large language models on text-encoded atomistic data is simple to implement yet reliable, with around 90\% of sampled structures obeying physical constraints on atom positions and charges. Using energy above hull calculations from both learned ML potentials and gold-standard DFT calculations, we show that our strongest model (fine-tuned LLaMA-2 70B) can generate materials predicted to be metastable at about twice the rate (49\% vs 28\%) of CDVAE, a competing diffusion model. Because of text prompting's inherent flexibility, our models can simultaneously be used for unconditional generation of stable material, infilling of partial structures and text-conditional generation. Finally, we show that language models' ability to capture key symmetries of crystal structures improves with model scale, suggesting that the biases of pretrained LLMs are surprisingly well-suited for atomistic data.}, + pubstate = {preprint}, + keywords = {AML,crystal structure prediction,fine-tuning,language models,LLM,materials discovery,Meta Research,ML,pretrained models,with-code,with-data}, + file = {/Users/wasmer/Nextcloud/Zotero/Gruver et al_2024_Fine-Tuned Language Models Generate Stable Inorganic Materials as Text.pdf;/Users/wasmer/Zotero/storage/J53W542H/2402.html} +} + +@book{gubanovMagnetismElectronicStructure1992, + title = {Magnetism and the {{Electronic Structure}} of {{Crystals}}}, + author = {Gubanov, V. A. and Liechtenstein, A. I. and Postnikov, A. V.}, + date = {1992}, + series = {Springer {{Series}} in {{Solid-State Sciences}}}, + edition = {1}, + volume = {98}, + url = {https://link.springer.com/book/10.1007/978-3-642-84411-9}, + urldate = {2022-06-18}, + isbn = {978-3-642-84413-3}, + langid = {english}, + keywords = {\_tablet,condensed matter,defects,DFT,magnetism}, + file = {/Users/wasmer/Nextcloud/Zotero/Magnetism and the Electronic Structure of Crystals.pdf;/Users/wasmer/Zotero/storage/QVJRNHRA/978-3-642-84411-9.html} +} + @book{guidrySymmetryBrokenSymmetry2022, title = {Symmetry, {{Broken Symmetry}}, and {{Topology}} in {{Modern Physics}}: {{A First Course}}}, shorttitle = {Symmetry, {{Broken Symmetry}}, and {{Topology}} in {{Modern Physics}}}, author = {Guidry, Mike and Sun, Yang}, date = {2022-03-31}, edition = {1}, - publisher = {{Cambridge University Press}}, + publisher = {Cambridge University Press}, doi = {10.1017/9781009000949}, url = {https://www.cambridge.org/highereducation/books/symmetry-broken-symmetry-and-topology-in-modern-physics/794C53F5AFDB06E0EE6F4310C8346DFD}, urldate = {2023-09-30}, @@ -5897,6 +6583,25 @@ Junqi Yin (Oak Ridge National Laboratory)}, file = {/Users/wasmer/Nextcloud/Zotero/Gutmann_2022_Pen and Paper Exercises in Machine Learning.pdf;/Users/wasmer/Zotero/storage/KMSFX6RY/2206.html} } +@article{gygiAllElectronPlaneWaveElectronic2023, + title = {All-{{Electron Plane-Wave Electronic Structure Calculations}}}, + author = {Gygi, François}, + date = {2023-02-28}, + journaltitle = {Journal of Chemical Theory and Computation}, + shortjournal = {J. Chem. Theory Comput.}, + volume = {19}, + number = {4}, + pages = {1300--1309}, + publisher = {American Chemical Society}, + issn = {1549-9618}, + doi = {10.1021/acs.jctc.2c01191}, + url = {https://doi.org/10.1021/acs.jctc.2c01191}, + urldate = {2024-04-05}, + abstract = {We demonstrate the use of the plane wave basis for all-electron electronic structure calculations. The approach relies on the definition of an analytic, norm-conserving, regularized Coulomb potential, and a scalable implementation of the plane wave method capable of handling large energy cutoffs (up to 80 kRy in the examples shown). The method is applied to the computation of electronic properties of isolated atoms as well as the diamond and silicon crystals, MgO, solid argon, and a configuration of 64 water molecules extracted from a first-principles molecular dynamics simulation. The computed energies, band gaps, ionic forces, and stress tensors provide reference results for the validation of pseudopotentials and/or localized basis sets. A calculation of the all-electron band structure of diamond and silicon using the SCAN meta-GGA density functional allows for a validation of calculations based on pseudopotentials derived using the PBE exchange-correlation functional. In the case of (H2O)64, the computed ionic forces provide a reference from which the errors incurred in pseudopotential calculations and in localized Gaussian basis sets calculations can be estimated.}, + keywords = {all-electron,core electrons,DFT,DFT numerics,FLAPW,physics,plane-wave,pseudopotential,review,review-of-DFT}, + file = {/Users/wasmer/Nextcloud/Zotero/Gygi_2023_All-Electron Plane-Wave Electronic Structure Calculations.pdf} +} + @article{gyorffyCoherentPotentialApproximationNonoverlappingMuffinTinPotential1972, title = {Coherent-{{Potential Approximation}} for a {{Nonoverlapping-Muffin-Tin-Potential Model}} of {{Random Substitutional Alloys}}}, author = {Gyorffy, B. L.}, @@ -5906,7 +6611,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {5}, number = {6}, pages = {2382--2384}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevB.5.2382}, url = {https://link.aps.org/doi/10.1103/PhysRevB.5.2382}, urldate = {2023-09-19}, @@ -5922,7 +6627,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {2}, number = {2}, pages = {327--338}, - publisher = {{Royal Society of Chemistry}}, + publisher = {Royal Society of Chemistry}, doi = {10.1039/D2DD00113F}, url = {https://pubs.rsc.org/en/content/articlelanding/2023/dd/d2dd00113f}, urldate = {2023-08-19}, @@ -5939,7 +6644,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {4}, number = {1}, pages = {1--9}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2057-3960}, doi = {10.1038/s41524-018-0120-9}, url = {https://www.nature.com/articles/s41524-018-0120-9}, @@ -6048,7 +6753,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {13}, number = {7}, pages = {1600}, - publisher = {{Multidisciplinary Digital Publishing Institute}}, + publisher = {Multidisciplinary Digital Publishing Institute}, issn = {1996-1944}, doi = {10.3390/ma13071600}, url = {https://www.mdpi.com/1996-1944/13/7/1600}, @@ -6070,7 +6775,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {82}, number = {4}, pages = {3045--3067}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/RevModPhys.82.3045}, url = {https://link.aps.org/doi/10.1103/RevModPhys.82.3045}, urldate = {2023-06-15}, @@ -6087,7 +6792,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {372}, number = {2011}, pages = {20130270}, - publisher = {{Royal Society}}, + publisher = {Royal Society}, doi = {10.1098/rsta.2013.0270}, url = {https://royalsocietypublishing.org/doi/10.1098/rsta.2013.0270}, urldate = {2022-05-18}, @@ -6096,6 +6801,22 @@ Junqi Yin (Oak Ridge National Laboratory)}, file = {/Users/wasmer/Nextcloud/Zotero/Hasnip et al_2014_Density functional theory in the solid state.pdf} } +@online{hazraPredictingOneParticleDensity2024, + title = {Predicting {{The One-Particle Density Matrix With Machine Learning}}}, + author = {Hazra, S. and Patil, U. and Sanvito, S.}, + date = {2024-01-12}, + eprint = {2401.06533}, + eprinttype = {arxiv}, + eprintclass = {physics}, + doi = {10.48550/arXiv.2401.06533}, + url = {http://arxiv.org/abs/2401.06533}, + urldate = {2024-03-08}, + abstract = {Two of the most widely used electronic structure theory methods, namely Hartree-Fock and Kohn-Sham density functional theory, both requires the iterative solution of a set of Schr\textbackslash "odinger-like equations. The speed of convergence of such self-consistent field process depends on the complexity of the system under investigation and on the initial guess for the density matrix. An initial density matrix close to the ground-state one will effectively allow one to cut out many of the self-consistent steps necessary to achieve convergence. Here, we predict the density matrix of Kohn-Sham density functional theory by constructing a neural network, which uses the atomic positions as only information. Such neural network provides an initial guess for the density matrix far superior to any other recipes available, in particular for molecules with metallic bonds. Furthermore, the quality of such neural-network density matrix is good enough for the evaluation of interatomic forces. This allows us to run accelerated \{\textbackslash it ab-initio\} molecular dynamics with little to no self-consistent steps.}, + pubstate = {preprint}, + keywords = {\_tablet,alternative approaches,alternative for ML-DFT,AML,BLYP,charge density,density,density matrix,DFT,DFT speedup,DFT speedup with ML,DIIS,HFT,hybrid AI/simulation,initial guess,invariance,MD,ML,ML-Density,ML-DFT,ML-ESM,ML-WFT,molecules,not spin-dependent,prediction of density matrix,prediction of electron density,PySCF,RDMFT,SCF,surrogate model}, + file = {/Users/wasmer/Nextcloud/Zotero/Hazra et al_2024_Predicting The One-Particle Density Matrix With Machine Learning.pdf;/Users/wasmer/Zotero/storage/LVFIN4XI/2401.html} +} + @article{hegdeMachinelearnedApproximationsDensity2017, title = {Machine-Learned Approximations to {{Density Functional Theory Hamiltonians}}}, author = {Hegde, Ganesh and Bowen, R. Chris}, @@ -6105,7 +6826,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {7}, number = {1}, pages = {42669}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2045-2322}, doi = {10.1038/srep42669}, url = {https://www.nature.com/articles/srep42669}, @@ -6128,7 +6849,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, doi = {10.48550/arXiv.2007.01988}, url = {http://arxiv.org/abs/2007.01988}, urldate = {2023-08-19}, - abstract = {A central challenge in high throughput density functional theory (HT-DFT) calculations is selecting a combination of input parameters and post-processing techniques that can be used across all materials classes, while also managing accuracy-cost tradeoffs. To investigate the effects of these parameter choices, we consolidate three large HT-DFT databases: Automatic-FLOW (AFLOW), the Materials Project (MP), and the Open Quantum Materials Database (OQMD), and compare reported properties across each pair of databases for materials calculated using the same initial crystal structure. We find that HT-DFT formation energies and volumes are generally more reproducible than band gaps and total magnetizations; for instance, a notable fraction of records disagree on whether a material is metallic (up to 7\%) or magnetic (up to 15\%). The variance between calculated properties is as high as 0.105 eV/atom (median relative absolute difference, or MRAD, of 6\%) for formation energy, 0.65 \{\textbackslash AA\}\$\^3\$/atom (MRAD of 4\%) for volume, 0.21 eV (MRAD of 9\%) for band gap, and 0.15 \$\textbackslash mu\_\{\textbackslash rm B\}\$/formula unit (MRAD of 8\%) for total magnetization, comparable to the differences between DFT and experiment. We trace some of the larger discrepancies to choices involving pseudopotentials, the DFT+U formalism, and elemental reference states, and argue that further standardization of HT-DFT would be beneficial to reproducibility.}, + abstract = {A central challenge in high throughput density functional theory (HT-DFT) calculations is selecting a combination of input parameters and post-processing techniques that can be used across all materials classes, while also managing accuracy-cost tradeoffs. To investigate the effects of these parameter choices, we consolidate three large HT-DFT databases: Automatic-FLOW (AFLOW), the Materials Project (MP), and the Open Quantum Materials Database (OQMD), and compare reported properties across each pair of databases for materials calculated using the same initial crystal structure. We find that HT-DFT formation energies and volumes are generally more reproducible than band gaps and total magnetizations; for instance, a notable fraction of records disagree on whether a material is metallic (up to 7\%) or magnetic (up to 15\%). The variance between calculated properties is as high as 0.105 eV/atom (median relative absolute difference, or MRAD, of 6\%) for formation energy, 0.65 \{\textbackslash AA\}\$\textasciicircum 3\$/atom (MRAD of 4\%) for volume, 0.21 eV (MRAD of 9\%) for band gap, and 0.15 \$\textbackslash mu\_\{\textbackslash rm B\}\$/formula unit (MRAD of 8\%) for total magnetization, comparable to the differences between DFT and experiment. We trace some of the larger discrepancies to choices involving pseudopotentials, the DFT+U formalism, and elemental reference states, and argue that further standardization of HT-DFT would be beneficial to reproducibility.}, pubstate = {preprint}, keywords = {AFLOWLIB,Citrine Informatics,DFT,magnetization,materials database,materials project,OQMD,reproducibility,todo-tagging,uncertainty quantification}, file = {/Users/wasmer/Nextcloud/Zotero/Hegde et al_2022_Quantifying uncertainty in high-throughput density functional theory.pdf;/Users/wasmer/Zotero/storage/CCU4ZPKZ/2007.html} @@ -6144,7 +6865,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {7}, number = {5}, pages = {053805}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevMaterials.7.053805}, url = {https://link.aps.org/doi/10.1103/PhysRevMaterials.7.053805}, urldate = {2023-10-08}, @@ -6161,7 +6882,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {1}, number = {4}, pages = {045021}, - publisher = {{IOP Publishing}}, + publisher = {IOP Publishing}, issn = {2632-2153}, doi = {10.1088/2632-2153/aba9ef}, url = {https://doi.org/10.1088/2632-2153/aba9ef}, @@ -6181,7 +6902,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {109}, number = {7}, pages = {076801}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevLett.109.076801}, url = {https://link.aps.org/doi/10.1103/PhysRevLett.109.076801}, urldate = {2022-05-13}, @@ -6194,7 +6915,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, title = {Benchmarking and {{Optimization}} of {{UF}}{\textsuperscript{3}} {{Machine Learning Potential}} on {{Solids}}}, booktitle = {Bulletin of the {{American Physical Society}}}, author = {Hennig, Richard G.}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, url = {https://meetings.aps.org/Meeting/MAR23/Session/N53.7}, urldate = {2023-05-06}, eventtitle = {{{APS March Meeting}} 2023}, @@ -6206,7 +6927,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, title = {Investigation of Transition Metal Complex Representations for Machine Learning Structure-Property Relationships}, booktitle = {Bulletin of the {{American Physical Society}}}, author = {Hennig, Richard G.}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, url = {https://meetings.aps.org/Meeting/MAR23/Session/T00.315}, urldate = {2023-05-06}, abstract = {Molecular magnets have potential applications in quantum computing, spintronics, and sensor development. These molecules display spin anisotropy below their characteristic blocking temperature. Contenders for single molecular magnets are monometallic transition metal complexes. Modeling of these complexes demand high computational cost and is difficult due to strong coupling effects. We investigate the performance of crystal graph neural networks (CGNN) for the prediction of properties using a dataset containing nearly 87,000 transition metal complexes. These properties have been calculated using the TPSSh-D3BJ exchange-correlation functional. Here, we see if the CGNN can predict the HOMO/LUMO gap, metal ion charge, and a variety of other computed energies. We then compare the model performance of the CGNN against neural networks trained with structural descriptor representations, such as the smooth overlap of atomic positions (SOAP). A completed model can be used to filter complexes in a high throughput screening. This work provides the first steps in the development of a machine-learning model for the property prediction of transition metal complexes for single molecular magnet applications. *This work is funded by the DOE}, @@ -6219,7 +6940,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, title = {Machine Learning and {{Monte Carlo}} Simulations of the {{Gibbs}} Free Energy of the {{Fe-C}} System in a Magnetic Field}, booktitle = {Bulletin of the {{American Physical Society}}}, author = {Hennig, Richard G.}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, url = {https://meetings.aps.org/Meeting/MAR23/Session/D44.2}, urldate = {2023-05-06}, abstract = {To model the thermodynamics and kinetics of steels in high magnetic fields requires knowledge of the magnetic Gibbs free energy, G, which involves millions of energy evaluations for the potential energy landscapes as a function of the applied field. Density-functional theory (DFT) calculations provide sufficient accuracy but are computationally very demanding. To overcome this barrier, we apply the ultra-fast force field (UF3) machine learning model [1] to approximate the DFT energy landscape. A DFT database is assembled through VASP, focusing on the energies and forces as a function of magnetic field for bcc and fcc Fe(C) with different structural and magnetic configurations. The UF3 models are trained and validated on this database to quickly evaluate the energies of ensembles. The resulting UF3 models are then utilized in the subsequent Monte Carlo simulations. Thermodynamic integration is utilized to combine the simulations at different temperatures to achieve the magnetic G models for the two Fe(C) phases as a function of temperature, atomic fraction of carbon, and magnetic field. Our calculations show that the applied magnetic field of around 10 T results in a change in the transition temperature of tens of kelvins. [1] S. R. Xie et al, arXiv:2110.00624 (2021).}, @@ -6355,7 +7076,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {12}, number = {10}, pages = {891--897}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {1755-4349}, doi = {10.1038/s41557-020-0544-y}, url = {https://www.nature.com/articles/s41557-020-0544-y}, @@ -6377,7 +7098,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {151}, number = {8}, pages = {084103}, - publisher = {{American Institute of Physics}}, + publisher = {American Institute of Physics}, issn = {0021-9606}, doi = {10.1063/1.5108803}, url = {https://aip.scitation.org/doi/full/10.1063/1.5108803}, @@ -6424,17 +7145,56 @@ Junqi Yin (Oak Ridge National Laboratory)}, file = {/Users/wasmer/Nextcloud/Zotero/He et al_2019_Topological insulator.pdf} } -@article{hicksAFLOWSYMPlatformComplete2018, - title = {{{AFLOW-SYM}}: Platform for the Complete, Automatic and Self-Consistent Symmetry Analysis of Crystals}, - shorttitle = {{{AFLOW-SYM}}}, - author = {Hicks, D. and Oses, C. and Gossett, E. and Gomez, G. and Taylor, R. H. and Toher, C. and Mehl, M. J. and Levy, O. and Curtarolo, S.}, +@article{hibat-allahInvestigatingTopologicalOrder2023, + title = {Investigating {{Topological Order}} Using {{Recurrent Neural Networks}}}, + author = {Hibat-Allah, Mohamed and Melko, Roger G. and Carrasquilla, Juan}, + date = {2023-08-22}, + journaltitle = {Physical Review B}, + shortjournal = {Phys. Rev. B}, + volume = {108}, + number = {7}, + eprint = {2303.11207}, + eprinttype = {arxiv}, + eprintclass = {cond-mat, physics:physics, physics:quant-ph}, + pages = {075152}, + issn = {2469-9950, 2469-9969}, + doi = {10.1103/PhysRevB.108.075152}, + url = {http://arxiv.org/abs/2303.11207}, + urldate = {2024-02-28}, + abstract = {Recurrent neural networks (RNNs), originally developed for natural language processing, hold great promise for accurately describing strongly correlated quantum many-body systems. Here, we employ 2D RNNs to investigate two prototypical quantum many-body Hamiltonians exhibiting topological order. Specifically, we demonstrate that RNN wave functions can effectively capture the topological order of the toric code and a Bose-Hubbard spin liquid on the kagome lattice by estimating their topological entanglement entropies. We also find that RNNs favor coherent superpositions of minimally-entangled states over minimally-entangled states themselves. Overall, our findings demonstrate that RNN wave functions constitute a powerful tool to study phases of matter beyond Landau's symmetry-breaking paradigm.}, + keywords = {/unread,ML,todo-tagging,topological}, + file = {/Users/wasmer/Nextcloud/Zotero/Hibat-Allah et al_2023_Investigating Topological Order using Recurrent Neural Networks.pdf;/Users/wasmer/Zotero/storage/CJG7Q2MQ/2303.html} +} + +@article{hibat-allahRecurrentNeuralNetwork2020, + title = {Recurrent Neural Network Wave Functions}, + author = {Hibat-Allah, Mohamed and Ganahl, Martin and Hayward, Lauren E. and Melko, Roger G. and Carrasquilla, Juan}, + date = {2020-06-17}, + journaltitle = {Physical Review Research}, + shortjournal = {Phys. Rev. Res.}, + volume = {2}, + number = {2}, + pages = {023358}, + publisher = {American Physical Society}, + doi = {10.1103/PhysRevResearch.2.023358}, + url = {https://link.aps.org/doi/10.1103/PhysRevResearch.2.023358}, + urldate = {2024-02-28}, + abstract = {A core technology that has emerged from the artificial intelligence revolution is the recurrent neural network (RNN). Its unique sequence-based architecture provides a tractable likelihood estimate with stable training paradigms, a combination that has precipitated many spectacular advances in natural language processing and neural machine translation. This architecture also makes a good candidate for a variational wave function, where the RNN parameters are tuned to learn the approximate ground state of a quantum Hamiltonian. In this paper, we demonstrate the ability of RNNs to represent several many-body wave functions, optimizing the variational parameters using a stochastic approach. Among other attractive features of these variational wave functions, their autoregressive nature allows for the efficient calculation of physical estimators by providing independent samples. We demonstrate the effectiveness of RNN wave functions by calculating ground-state energies, correlation functions, and entanglement entropies for several quantum spin models of interest to condensed-matter physicists in one and two spatial dimensions.}, + keywords = {ML,ML-QMBP,prediction of wavefunction,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Hibat-Allah et al_2020_Recurrent neural network wave functions.pdf;/Users/wasmer/Zotero/storage/YUMK3X2S/PhysRevResearch.2.html} +} + +@article{hicksAFLOWSYMPlatformComplete2018, + title = {{{AFLOW-SYM}}: Platform for the Complete, Automatic and Self-Consistent Symmetry Analysis of Crystals}, + shorttitle = {{{AFLOW-SYM}}}, + author = {Hicks, D. and Oses, C. and Gossett, E. and Gomez, G. and Taylor, R. H. and Toher, C. and Mehl, M. J. and Levy, O. and Curtarolo, S.}, date = {2018-05-01}, journaltitle = {Acta Crystallographica Section A: Foundations and Advances}, shortjournal = {Acta Cryst A}, volume = {74}, number = {3}, pages = {184--203}, - publisher = {{International Union of Crystallography}}, + publisher = {International Union of Crystallography}, issn = {2053-2733}, doi = {10.1107/S2053273318003066}, url = {https://scripts.iucr.org/cgi-bin/paper?ae5042}, @@ -6467,7 +7227,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, title = {Materials {{Informatics}} - {{Augmenting Materials Research}} with {{Data-driven Design}} and {{Machine Learning}}}, author = {Himanen, Lauri}, date = {2020}, - publisher = {{Aalto University}}, + publisher = {Aalto University}, issn = {1799-4942 (electronic)}, url = {https://aaltodoc.aalto.fi:443/handle/123456789/43027}, urldate = {2021-05-13}, @@ -6505,8 +7265,8 @@ Junqi Yin (Oak Ridge National Laboratory)}, date = {2023-09-10}, series = {{{PEARC}} '23}, pages = {448--449}, - publisher = {{Association for Computing Machinery}}, - location = {{New York, NY, USA}}, + publisher = {Association for Computing Machinery}, + location = {New York, NY, USA}, doi = {10.1145/3569951.3597581}, url = {https://dl.acm.org/doi/10.1145/3569951.3597581}, urldate = {2023-09-21}, @@ -6540,7 +7300,8 @@ Junqi Yin (Oak Ridge National Laboratory)}, url = {https://zenodo.org/record/3609779}, urldate = {2023-04-27}, abstract = {We provide here small Maple scripts for the calculation of Hubbard matrices and their subsequent downfolding by Loewdin's partitioning}, - organization = {{Zenodo}}, + langid = {english}, + organization = {Zenodo}, keywords = {/unread,downfolding,Hubbard model,Loewdin's partitioning}, file = {/Users/wasmer/Zotero/storage/62QDRCHY/3609779.html} } @@ -6586,7 +7347,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {148}, number = {24}, pages = {241743}, - publisher = {{American Institute of Physics}}, + publisher = {American Institute of Physics}, issn = {0021-9606}, doi = {10.1063/1.5025668}, url = {https://aip.scitation.org/doi/full/10.1063/1.5025668}, @@ -6600,7 +7361,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, title = {Physics-Based {{Deep Learning}}}, author = {Holl, Philipp and Mueller, Maximilian and Schnell, Patrick and Trost, Felix and Thuerey, Nils and Um, Kiwon}, date = {2021}, - publisher = {{WWW}}, + publisher = {WWW}, url = {https://physicsbaseddeeplearning.org}, urldate = {2022-10-02}, abstract = {Welcome to the Physics-based Deep Learning Book (v0.2) 👋 TL;DR: This document contains a practical and comprehensive introduction of everything related to deep learning in the context of physical simulations. As much as possible, all topics come with hands-on code examples in the form of Jupyter notebooks to quickly get started. Beyond standard supervised learning from data, we’ll look at physical loss constraints, more tightly coupled learning algorithms with differentiable simulations, training algorithms tailored to physics problems, as well as reinforcement learning and uncertainty modeling. We live in exciting times: these methods have a huge potential to fundamentally change what computer simulations can achieve.}, @@ -6642,7 +7403,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, url = {https://doi.org/10.1063/5.0012407}, urldate = {2023-05-06}, abstract = {We present a machine learning approach for accurately predicting formation energies of binary compounds in the context of crystal structure predictions. The success of any machine learning model depends significantly on the choice of representation used to encode the relevant physical information into machine-learnable data. We test different representation schemes based on partial radial and angular distribution functions (RDF+ADF) on Al–Ni and Cd–Te structures generated using our genetic algorithm for structure prediction. We observe a remarkable improvement in predictive accuracy upon transitioning from global to atom-centered representations, resulting in a threefold decrease in prediction errors. We show that a support vector regression model using a combination of atomic radial and angular distribution functions performs best at the formation energy prediction task, providing small root mean squared errors of 3.9\,meV/atom and 10.9\,meV/atom for Al–Ni and Cd–Te, respectively. We test the performance of our models against common traditional descriptors and find that RDF- and ADF-based representations significantly outperform many of those in the prediction of formation energies. The high accuracy of predictions makes our machine learning models great candidates for the exploration of energy landscapes.}, - keywords = {\_tablet,/unread,ACDC,ACSF,ADF descriptor,AML,benchmarking,binary systems,CFID,Coulomb matrix,crystal structure prediction,descriptor comparison,descriptors,LAMMPS,MBTR,MD,ML,OFM descriptor,PES,prediction of energy,prediction of formation energy,RDF descriptor,SB descriptors,SOAP,structure prediction}, + keywords = {/unread,\_tablet,ACDC,ACSF,ADF descriptor,AML,benchmarking,binary systems,CFID,Coulomb matrix,crystal structure prediction,descriptor comparison,descriptors,LAMMPS,MBTR,MD,ML,OFM descriptor,PES,prediction of energy,prediction of formation energy,RDF descriptor,SB descriptors,SOAP,structure prediction}, file = {/Users/wasmer/Nextcloud/Zotero/Honrao et al_2020_Augmenting machine learning of energy landscapes with local structural.pdf;/Users/wasmer/Zotero/storage/VFKSDW8H/Augmenting-machine-learning-of-energy-landscapes.html} } @@ -6663,6 +7424,21 @@ Junqi Yin (Oak Ridge National Laboratory)}, file = {/Users/wasmer/Nextcloud/Zotero/Hoogeboom et al_2022_Equivariant Diffusion for Molecule Generation in 3D.pdf;/Users/wasmer/Zotero/storage/K6KWYTSV/2203.html} } +@inproceedings{horschEuropeanStandardizationEfforts2023, + title = {European Standardization Efforts from {{FAIR}} toward Explainable-{{AI-ready}} Data Documentation in Materials Modelling}, + booktitle = {2023 3rd {{International Conference}} on {{Applied Artificial Intelligence}} ({{ICAPAI}})}, + author = {Horsch, Martin Thomas and Schembera, Björn and Preisig, Heinz A.}, + date = {2023-05}, + pages = {1--6}, + doi = {10.1109/ICAPAI58366.2023.10193944}, + url = {https://ieeexplore.ieee.org/document/10193944}, + urldate = {2024-04-05}, + abstract = {Security critical AI applications require a standardized and interoperable data and metadata documentation that makes the source data explainable-AI ready (XAIR). Within the domain of materials modelling and characterization, European initiatives have proposed a series of metadata standards and procedural recommendations that were accepted as CEN workshop agreements (CWAs): CWA 17284 MODA, CWA 17815 CHADA, and CWA 17960 ModGra. It is discussed how these standards have been ontologized, and gaps are identified as regards the epistemic grounding metadata, i.e., an annotation of data and claims by something that substantiates whether, why, and to what extent they are indeed knowledge and can be relied upon.}, + eventtitle = {2023 3rd {{International Conference}} on {{Applied Artificial Intelligence}} ({{ICAPAI}})}, + keywords = {AI,AiiDA,computational science,CWA,EMMC,EMMO,FAIR,IT security,materials,materials and molecular modeling,metadata,ML,ontology,provenance,RDM,RSE,simulation,standardization,XAIR}, + file = {/Users/wasmer/Nextcloud/Zotero/Horsch et al_2023_European standardization efforts from FAIR toward explainable-AI-ready data.pdf;/Users/wasmer/Zotero/storage/ETXXB8B6/10193944.html} +} + @article{huAisNetUniversalInteratomic2023, title = {{{AisNet}}: {{A Universal Interatomic Potential Neural Network}} with {{Encoded Local Environment Features}}}, shorttitle = {{{AisNet}}}, @@ -6673,7 +7449,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {63}, number = {6}, pages = {1756--1765}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, issn = {1549-9596}, doi = {10.1021/acs.jcim.3c00077}, url = {https://doi.org/10.1021/acs.jcim.3c00077}, @@ -6690,7 +7466,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {381}, number = {6654}, pages = {170--175}, - publisher = {{American Association for the Advancement of Science}}, + publisher = {American Association for the Advancement of Science}, doi = {10.1126/science.abn3445}, url = {https://www.science.org/doi/10.1126/science.abn3445}, urldate = {2023-07-14}, @@ -6726,7 +7502,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {7}, number = {1}, pages = {1--10}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2397-7132}, doi = {10.1038/s41699-023-00369-1}, url = {https://www.nature.com/articles/s41699-023-00369-1}, @@ -6747,7 +7523,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {7}, number = {1}, pages = {300}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2052-4463}, doi = {10.1038/s41597-020-00638-4}, url = {https://www.nature.com/articles/s41597-020-00638-4}, @@ -6756,7 +7532,12 @@ Junqi Yin (Oak Ridge National Laboratory)}, issue = {1}, langid = {english}, keywords = {AiiDA,original publication}, - annotation = {Bandiera\_abtest: a Cc\_license\_type: cc\_by Cg\_type: Nature Research Journals Primary\_atype: Research Subject\_term: Computational methods;Research management Subject\_term\_id: computational-methods;research-management}, + annotation = {Bandiera\_abtest: a\\ +Cc\_license\_type: cc\_by\\ +Cg\_type: Nature Research Journals\\ +Primary\_atype: Research\\ +Subject\_term: Computational methods;Research management\\ +Subject\_term\_id: computational-methods;research-management}, file = {/Users/wasmer/Nextcloud/Zotero/Huber et al_2020_AiiDA 1.pdf;/Users/wasmer/Zotero/storage/SQ25VE8T/s41597-020-00638-4.html} } @@ -6783,7 +7564,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {7}, number = {1}, pages = {1--12}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2057-3960}, doi = {10.1038/s41524-021-00594-6}, url = {https://www.nature.com/articles/s41524-021-00594-6}, @@ -6915,8 +7696,8 @@ Junqi Yin (Oak Ridge National Laboratory)}, date = {2015-05-27}, series = {{{SIGMOD}} '15}, pages = {277--281}, - publisher = {{Association for Computing Machinery}}, - location = {{New York, NY, USA}}, + publisher = {Association for Computing Machinery}, + location = {New York, NY, USA}, doi = {10.1145/2723372.2731084}, url = {https://doi.org/10.1145/2723372.2731084}, urldate = {2022-10-02}, @@ -6935,7 +7716,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {148}, number = {24}, pages = {241730}, - publisher = {{American Institute of Physics}}, + publisher = {American Institute of Physics}, issn = {0021-9606}, doi = {10.1063/1.5024611}, url = {https://aip.scitation.org/doi/10.1063/1.5024611}, @@ -6963,7 +7744,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {2}, number = {7}, pages = {418--419}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2662-8457}, doi = {10.1038/s43588-022-00270-9}, url = {https://www.nature.com/articles/s43588-022-00270-9}, @@ -6980,7 +7761,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, title = {Challenges in {{Machine Learning}} for {{Materials}} - {{AI White Paper}}}, author = {Informatics, Citrine}, date = {2021-04}, - institution = {{Citrine Informatics}}, + institution = {Citrine Informatics}, url = {https://citrine.io/success/white-papers/white-paper-challenges-in-machine-learning-for-materials/}, urldate = {2023-08-19}, abstract = {Learn about challenges in Machine Learning for Materials. See how Citrine has overcome these challenges and why off-the-shelf open-source AI will require a lot of tailoring to make it work in this space.}, @@ -6998,7 +7779,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {14}, number = {20}, pages = {5438--5452}, - publisher = {{The Royal Society of Chemistry}}, + publisher = {The Royal Society of Chemistry}, issn = {2041-6539}, doi = {10.1039/D2SC04815A}, url = {https://pubs.rsc.org/en/content/articlelanding/2023/sc/d2sc04815a}, @@ -7012,13 +7793,13 @@ Junqi Yin (Oak Ridge National Laboratory)}, @book{inuiGroupTheoryIts1990, title = {Group {{Theory}} and {{Its Applications}} in {{Physics}}}, author = {Inui, Teturo and Tanabe, Yukito and Onodera, Yositaka}, - editorb = {Cardona, Manuel and Fulde, Peter and Von Klitzing, Klaus and Queisser, Hans-Joachim and Lotsch, Helmut K. V.}, - editorbtype = {redactor}, + editor = {Cardona, Manuel and Fulde, Peter and Von Klitzing, Klaus and Queisser, Hans-Joachim and Lotsch, Helmut K. V.}, + editortype = {redactor}, date = {1990}, series = {Springer {{Series}} in {{Solid-State Sciences}}}, volume = {78}, - publisher = {{Springer}}, - location = {{Berlin, Heidelberg}}, + publisher = {Springer}, + location = {Berlin, Heidelberg}, doi = {10.1007/978-3-642-80021-4}, url = {http://link.springer.com/10.1007/978-3-642-80021-4}, urldate = {2023-09-20}, @@ -7048,6 +7829,49 @@ Junqi Yin (Oak Ridge National Laboratory)}, file = {/Users/wasmer/Nextcloud/Zotero/Ismail-Beigi_Arias_2000_New Algebraic Formulation of Density Functional Calculation.pdf;/Users/wasmer/Zotero/storage/JRMEXIH4/9909130.html} } +@article{jablonka14ExamplesHow2023, + title = {14 Examples of How {{LLMs}} Can Transform Materials Science and Chemistry: A Reflection on a Large Language Model Hackathon}, + shorttitle = {14 Examples of How {{LLMs}} Can Transform Materials Science and Chemistry}, + author = {Jablonka, Kevin Maik and Ai, Qianxiang and Al-Feghali, Alexander and Badhwar, Shruti and Bocarsly, Joshua D. and Bran, Andres M. and Bringuier, Stefan and Brinson, L. Catherine and Choudhary, Kamal and Circi, Defne and Cox, Sam and family=Jong, given=Wibe A., prefix=de, useprefix=false and Evans, Matthew L. and Gastellu, Nicolas and Genzling, Jerome and Gil, MarÃa Victoria and Gupta, Ankur K. and Hong, Zhi and Imran, Alishba and Kruschwitz, Sabine and Labarre, Anne and Lála, Jakub and Liu, Tao and Ma, Steven and Majumdar, Sauradeep and Merz, Garrett W. and Moitessier, Nicolas and Moubarak, Elias and Mouriño, Beatriz and Pelkie, Brenden and Pieler, Michael and Ramos, Mayk Caldas and Ranković, Bojana and Rodriques, Samuel G. and Sanders, Jacob N. and Schwaller, Philippe and Schwarting, Marcus and Shi, Jiale and Smit, Berend and Smith, Ben E. and Herck, Joren Van and Völker, Christoph and Ward, Logan and Warren, Sean and Weiser, Benjamin and Zhang, Sylvester and Zhang, Xiaoqi and Zia, Ghezal Ahmad and Scourtas, Aristana and Schmidt, K. J. and Foster, Ian and White, Andrew D. and Blaiszik, Ben}, + date = {2023-10-09}, + journaltitle = {Digital Discovery}, + shortjournal = {Digital Discovery}, + volume = {2}, + number = {5}, + pages = {1233--1250}, + publisher = {RSC}, + issn = {2635-098X}, + doi = {10.1039/D3DD00113J}, + url = {https://pubs.rsc.org/en/content/articlelanding/2023/dd/d3dd00113j}, + urldate = {2024-03-08}, + abstract = {Large-language models (LLMs) such as GPT-4 caught the interest of many scientists. Recent studies suggested that these models could be useful in chemistry and materials science. To explore these possibilities, we organized a hackathon. This article chronicles the projects built as part of this hackathon. Participants employed LLMs for various applications, including predicting properties of molecules and materials, designing novel interfaces for tools, extracting knowledge from unstructured data, and developing new educational applications. The diverse topics and the fact that working prototypes could be generated in less than two days highlight that LLMs will profoundly impact the future of our fields. The rich collection of ideas and projects also indicates that the applications of LLMs are not limited to materials science and chemistry but offer potential benefits to a wide range of scientific disciplines.}, + langid = {english}, + keywords = {/unread,AML,chemistry,GPT,GPT-4,hackathon,language models,library,LLM,materials,ML,transformer,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Jablonka et al_2023_14 examples of how LLMs can transform materials science and chemistry.pdf;/Users/wasmer/Zotero/storage/FF8ZNRTK/Jablonka et al. - 2023 - 14 examples of how LLMs can transform materials sc.pdf} +} + +@article{jablonka14ExamplesHow2023a, + title = {14 {{Examples}} of {{How LLMs Can Transform Materials Science}} and {{Chemistry}}: {{A Reflection}} on a {{Large Language Model Hackathon}}}, + shorttitle = {14 {{Examples}} of {{How LLMs Can Transform Materials Science}} and {{Chemistry}}}, + author = {Jablonka, Kevin Maik and Ai, Qianxiang and Al-Feghali, Alexander and Badhwar, Shruti and Bocarsly, Joshua D. and Bran, Andres M. and Bringuier, Stefan and Brinson, L. Catherine and Choudhary, Kamal and Circi, Defne and Cox, Sam and family=Jong, given=Wibe A., prefix=de, useprefix=true and Evans, Matthew L. and Gastellu, Nicolas and Genzling, Jerome and Gil, MarÃa Victoria and Gupta, Ankur K. and Hong, Zhi and Imran, Alishba and Kruschwitz, Sabine and Labarre, Anne and Lála, Jakub and Liu, Tao and Ma, Steven and Majumdar, Sauradeep and Merz, Garrett W. and Moitessier, Nicolas and Moubarak, Elias and Mouriño, Beatriz and Pelkie, Brenden and Pieler, Michael and Ramos, Mayk Caldas and Ranković, Bojana and Rodriques, Samuel G. and Sanders, Jacob N. and Schwaller, Philippe and Schwarting, Marcus and Shi, Jiale and Smit, Berend and Smith, Ben E. and Van Herck, Joren and Völker, Christoph and Ward, Logan and Warren, Sean and Weiser, Benjamin and Zhang, Sylvester and Zhang, Xiaoqi and Zia, Ghezal Ahmad and Scourtas, Aristana and Schmidt, K. J. and Foster, Ian and White, Andrew D. and Blaiszik, Ben}, + date = {2023-07-14}, + journaltitle = {Digital Discovery}, + shortjournal = {Digital Discovery}, + volume = {2}, + number = {5}, + eprint = {2306.06283}, + eprinttype = {arxiv}, + eprintclass = {cond-mat, physics:physics}, + pages = {1233--1250}, + issn = {2635-098X}, + doi = {10.1039/D3DD00113J}, + url = {http://arxiv.org/abs/2306.06283}, + urldate = {2024-03-08}, + abstract = {Large-language models (LLMs) such as GPT-4 caught the interest of many scientists. Recent studies suggested that these models could be useful in chemistry and materials science. To explore these possibilities, we organized a hackathon. This article chronicles the projects built as part of this hackathon. Participants employed LLMs for various applications, including predicting properties of molecules and materials, designing novel interfaces for tools, extracting knowledge from unstructured data, and developing new educational applications. The diverse topics and the fact that working prototypes could be generated in less than two days highlight that LLMs will profoundly impact the future of our fields. The rich collection of ideas and projects also indicates that the applications of LLMs are not limited to materials science and chemistry but offer potential benefits to a wide range of scientific disciplines.}, + keywords = {/unread,AML,chemistry,GPT,GPT-4,hackathon,language models,library,LLM,materials,ML,transformer,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Jablonka et al_2023_14 Examples of How LLMs Can Transform Materials Science and Chemistry2.pdf;/Users/wasmer/Zotero/storage/9QTXVXSW/2306.html} +} + @article{jablonkaBigDataSciencePorous2020, title = {Big-{{Data Science}} in {{Porous Materials}}: {{Materials Genomics}} and {{Machine Learning}}}, shorttitle = {Big-{{Data Science}} in {{Porous Materials}}}, @@ -7058,7 +7882,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {120}, number = {16}, pages = {8066--8129}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, issn = {0009-2665}, doi = {10.1021/acs.chemrev.0c00004}, url = {https://doi.org/10.1021/acs.chemrev.0c00004}, @@ -7102,6 +7926,27 @@ Junqi Yin (Oak Ridge National Laboratory)}, file = {/Users/wasmer/Nextcloud/Zotero/Jacobs et al_2020_The Materials Simulation Toolkit for Machine learning (MAST-ML).pdf;/Users/wasmer/Zotero/storage/5R5YHQE4/S0927025620300355.html} } +@article{jacobssonOpenaccessDatabaseAnalysis2022, + title = {An Open-Access Database and Analysis Tool for Perovskite Solar Cells Based on the {{FAIR}} Data Principles}, + author = {Jacobsson, T. Jesper and Hultqvist, Adam and GarcÃa-Fernández, Alberto and Anand, Aman and Al-Ashouri, Amran and Hagfeldt, Anders and Crovetto, Andrea and Abate, Antonio and Ricciardulli, Antonio Gaetano and Vijayan, Anuja and Kulkarni, Ashish and Anderson, Assaf Y. and Darwich, Barbara Primera and Yang, Bowen and Coles, Brendan L. and Perini, Carlo A. R. and Rehermann, Carolin and Ramirez, Daniel and Fairen-Jimenez, David and Di Girolamo, Diego and Jia, Donglin and Avila, Elena and Juarez-Perez, Emilio J. and Baumann, Fanny and Mathies, Florian and González, G. S. Anaya and Boschloo, Gerrit and Nasti, Giuseppe and Paramasivam, Gopinath and MartÃnez-Denegri, Guillermo and Näsström, Hampus and Michaels, Hannes and Köbler, Hans and Wu, Hua and Benesperi, Iacopo and Dar, M. Ibrahim and Bayrak Pehlivan, Ilknur and Gould, Isaac E. and Vagott, Jacob N. and Dagar, Janardan and Kettle, Jeff and Yang, Jie and Li, Jinzhao and Smith, Joel A. and Pascual, Jorge and Jerónimo-Rendón, Jose J. and Montoya, Juan Felipe and Correa-Baena, Juan-Pablo and Qiu, Junming and Wang, Junxin and Sveinbjörnsson, Kári and Hirselandt, Katrin and Dey, Krishanu and Frohna, Kyle and Mathies, Lena and Castriotta, Luigi A. and Aldamasy, Mahmoud H. and Vasquez-Montoya, Manuel and Ruiz-Preciado, Marco A. and Flatken, Marion A. and Khenkin, Mark V. and Grischek, Max and Kedia, Mayank and Saliba, Michael and Anaya, Miguel and Veldhoen, Misha and Arora, Neha and Shargaieva, Oleksandra and Maus, Oliver and Game, Onkar S. and Yudilevich, Ori and Fassl, Paul and Zhou, Qisen and Betancur, Rafael and Munir, Rahim and Patidar, Rahul and Stranks, Samuel D. and Alam, Shahidul and Kar, Shaoni and Unold, Thomas and Abzieher, Tobias and Edvinsson, Tomas and David, Tudur Wyn and Paetzold, Ulrich W. and Zia, Waqas and Fu, Weifei and Zuo, Weiwei and Schröder, Vincent R. F. and Tress, Wolfgang and Zhang, Xiaoliang and Chiang, Yu-Hsien and Iqbal, Zafar and Xie, Zhiqiang and Unger, Eva}, + date = {2022-01}, + journaltitle = {Nature Energy}, + shortjournal = {Nat Energy}, + volume = {7}, + number = {1}, + pages = {107--115}, + publisher = {Nature Publishing Group}, + issn = {2058-7546}, + doi = {10.1038/s41560-021-00941-3}, + url = {https://www.nature.com/articles/s41560-021-00941-3}, + urldate = {2024-01-02}, + abstract = {Large datasets are now ubiquitous as technology enables higher-throughput experiments, but rarely can a research field truly benefit from the research data generated due to inconsistent formatting, undocumented storage or improper dissemination. Here we extract all the meaningful device data from peer-reviewed papers on metal-halide perovskite solar cells published so far and make them available in a database. We collect data from over 42,400 photovoltaic devices with up to 100 parameters per device. We then develop open-source and accessible procedures to analyse the data, providing examples of insights that can be gleaned from the analysis of a large dataset. The database, graphics and analysis tools are made available to the community and will continue to evolve as an open-source initiative. This approach of extensively capturing the progress of an entire field, including sorting, interactive exploration and graphical representation of the data, will be applicable to many fields in materials science, engineering and biosciences.}, + issue = {1}, + langid = {english}, + keywords = {/unread,database analysis,FAIR,materials,materials database,materials informatics,perovskites,RDM,with-code,with-data}, + file = {/Users/wasmer/Nextcloud/Zotero/Jacobsson et al_2022_An open-access database and analysis tool for perovskite solar cells based on.pdf} +} + @article{jainCommentaryMaterialsProject2013, title = {Commentary: {{The Materials Project}}: {{A}} Materials Genome Approach to Accelerating Materials Innovation}, shorttitle = {Commentary}, @@ -7111,7 +7956,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {1}, number = {1}, pages = {011002}, - publisher = {{American Institute of Physics}}, + publisher = {American Institute of Physics}, doi = {10.1063/1.4812323}, url = {https://aip.scitation.org/doi/10.1063%2F1.4812323}, urldate = {2021-10-15}, @@ -7237,7 +8082,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {615}, number = {7951}, pages = {221--222}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, doi = {10.1038/d41586-023-00599-9}, url = {https://www.nature.com/articles/d41586-023-00599-9}, urldate = {2023-03-10}, @@ -7245,7 +8090,9 @@ Junqi Yin (Oak Ridge National Laboratory)}, issue = {7951}, langid = {english}, keywords = {/unread,superconductor}, - annotation = {Bandiera\_abtest: a Cg\_type: News And Views Subject\_term: Condensed-matter physics, Physics, Materials science}, + annotation = {Bandiera\_abtest: a\\ +Cg\_type: News And Views\\ +Subject\_term: Condensed-matter physics, Physics, Materials science}, file = {/Users/wasmer/Nextcloud/Zotero/Jin_Ceperley_2023_Hopes raised for room-temperature superconductivity, but doubts remain.pdf} } @@ -7258,7 +8105,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {122}, number = {22}, pages = {225701}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevLett.122.225701}, url = {https://link.aps.org/doi/10.1103/PhysRevLett.122.225701}, urldate = {2023-09-05}, @@ -7272,8 +8119,8 @@ Junqi Yin (Oak Ridge National Laboratory)}, shorttitle = {Numerical {{Python}}}, author = {Johansson, Robert}, date = {2019}, - publisher = {{Apress}}, - location = {{Berkeley, CA}}, + publisher = {Apress}, + location = {Berkeley, CA}, doi = {10.1007/978-1-4842-4246-9}, url = {http://link.springer.com/10.1007/978-1-4842-4246-9}, urldate = {2021-05-04}, @@ -7287,11 +8134,11 @@ Junqi Yin (Oak Ridge National Laboratory)}, title = {Computational Nanoscience: Do It Yourself! : {{Winter School}}, 14-22 {{February}} 2006, {{Forschungszentrum Julich}}, {{Germany}} : Lecture Notes}, shorttitle = {Computational Nanoscience}, editor = {{John von Neumann-Institut für Computing} and {Winter School} and Grotendorst, Johannes and Blügel, Stefan and Marx, Dominik and {John von Neumann-Institut für Computing}}, - editora = {Mavropoulos, Phivos and Zeller, Rudolf and Lounis, Samir and Dederichs, Peter H.}, - editoratype = {collaborator}, + namea = {Mavropoulos, Phivos and Zeller, Rudolf and Lounis, Samir and Dederichs, Peter H.}, + nameatype = {collaborator}, date = {2006}, - publisher = {{John von Neumann Institut for Computing}}, - location = {{Julich, Germany}}, + publisher = {John von Neumann Institut for Computing}, + location = {Julich, Germany}, url = {http://hdl.handle.net/2128/2943}, isbn = {978-3-00-017350-9}, langid = {english}, @@ -7310,7 +8157,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {87}, number = {3}, pages = {897--923}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/RevModPhys.87.897}, url = {https://link.aps.org/doi/10.1103/RevModPhys.87.897}, urldate = {2023-06-30}, @@ -7345,7 +8192,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {8}, number = {1}, pages = {1--10}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2057-3960}, doi = {10.1038/s41524-022-00863-y}, url = {https://www.nature.com/articles/s41524-022-00863-y}, @@ -7439,7 +8286,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {7}, number = {4}, pages = {044407}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevMaterials.7.044407}, url = {https://link.aps.org/doi/10.1103/PhysRevMaterials.7.044407}, urldate = {2023-06-01}, @@ -7457,7 +8304,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {12}, number = {1}, pages = {1--8}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {1884-4057}, doi = {10.1038/s41427-020-0211-1}, url = {https://www.nature.com/articles/s41427-020-0211-1}, @@ -7478,12 +8325,12 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {13}, number = {11}, pages = {2540--2547}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, doi = {10.1021/acs.jpclett.2c00371}, url = {https://doi.org/10.1021/acs.jpclett.2c00371}, urldate = {2022-07-10}, abstract = {Kohn–Sham regularizer (KSR) is a differentiable machine learning approach to finding the exchange-correlation functional in Kohn–Sham density functional theory that works for strongly correlated systems. Here we test KSR for a weak correlation. We propose spin-adapted KSR (sKSR) with trainable local, semilocal, and nonlocal approximations found by minimizing density and total energy loss. We assess the atoms-to-molecules generalizability by training on one-dimensional (1D) H, He, Li, Be, and Be2+ and testing on 1D hydrogen chains, LiH, BeH2, and helium hydride complexes. The generalization error from our semilocal approximation is comparable to other differentiable approaches, but our nonlocal functional outperforms any existing machine learning functionals, predicting ground-state energies of test systems with a mean absolute error of 2.7 mH.}, - keywords = {DFT,Kohn-Sham regularizer,ML,ML-DFA,ML-DFT,ML-ESM,spin-dependent,spin-polarized}, + keywords = {DFT,JAX,Kohn-Sham regularizer,library,ML,ML-DFA,ML-DFT,ML-ESM,prediction from density,prediction of Exc,regularization,spin-dependent,spin-polarized,with-code}, file = {/Users/wasmer/Nextcloud/Zotero/Kalita et al_2022_How Well Does Kohn–Sham Regularizer Work for Weakly Correlated Systems.pdf;/Users/wasmer/Zotero/storage/TCWGCAZA/acs.jpclett.html} } @@ -7496,7 +8343,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {54}, number = {4}, pages = {818--826}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, issn = {0001-4842}, doi = {10.1021/acs.accounts.0c00742}, url = {https://doi.org/10.1021/acs.accounts.0c00742}, @@ -7529,7 +8376,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {1}, number = {2}, pages = {025003}, - publisher = {{IOP Publishing}}, + publisher = {IOP Publishing}, issn = {2632-2153}, doi = {10.1088/2632-2153/ab5929}, url = {https://dx.doi.org/10.1088/2632-2153/ab5929}, @@ -7549,7 +8396,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {107}, number = {17}, pages = {174106}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevB.107.174106}, url = {https://link.aps.org/doi/10.1103/PhysRevB.107.174106}, urldate = {2023-06-30}, @@ -7619,7 +8466,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {2}, number = {4}, pages = {1112--1125}, - publisher = {{Royal Society of Chemistry}}, + publisher = {Royal Society of Chemistry}, doi = {10.1039/D2DD00133K}, url = {https://pubs.rsc.org/en/content/articlelanding/2023/dd/d2dd00133k}, urldate = {2023-08-19}, @@ -7633,7 +8480,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, booktitle = {Bulletin of the {{American Physical Society}}}, author = {Kavanagh, Séan R}, date = {2023-03}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, url = {https://meetings.aps.org/Meeting/MAR23/Session/D41.2}, urldate = {2023-08-25}, eventtitle = {{{APS March Meeting}} 2023}, @@ -7650,7 +8497,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {19}, number = {3}, pages = {333--337}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {1745-2481}, doi = {10.1038/s41567-022-01888-2}, url = {https://www.nature.com/articles/s41567-022-01888-2}, @@ -7671,7 +8518,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {9}, number = {1}, pages = {1--10}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2057-3960}, doi = {10.1038/s41524-023-01062-z}, url = {https://www.nature.com/articles/s41524-023-01062-z}, @@ -7712,7 +8559,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {13}, number = {11}, pages = {1045--1055}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {1745-2481}, doi = {10.1038/nphys4302}, url = {https://www.nature.com/articles/nphys4302}, @@ -7720,7 +8567,11 @@ Junqi Yin (Oak Ridge National Laboratory)}, abstract = {The physical description of all materials is rooted in quantum mechanics, which describes how atoms bond and electrons interact at a fundamental level. Although these quantum effects can in many cases be approximated by a classical description at the macroscopic level, in recent years there has been growing interest in material systems where quantum effects remain manifest over a wider range of energy and length scales. Such quantum materials include superconductors, graphene, topological insulators, Weyl semimetals, quantum spin liquids, and spin ices. Many of them derive their properties from reduced dimensionality, in particular from confinement of electrons to two-dimensional sheets. Moreover, they tend to be materials in which electrons cannot be considered as independent particles but interact strongly and give rise to collective excitations known as quasiparticles. In all cases, however, quantum-mechanical effects fundamentally alter properties of the material. This Review surveys the electronic properties of quantum materials through the prism of the electron wavefunction, and examines how its entanglement and topology give rise to a rich variety of quantum states and phases; these are less classically describable than conventional ordered states also driven by quantum mechanics, such as ferromagnetism.}, issue = {11}, langid = {english}, - annotation = {Bandiera\_abtest: a Cg\_type: Nature Research Journals Primary\_atype: Reviews Subject\_term: Quantum physics;Theoretical physics Subject\_term\_id: quantum-physics;theoretical-physics}, + annotation = {Bandiera\_abtest: a\\ +Cg\_type: Nature Research Journals\\ +Primary\_atype: Reviews\\ +Subject\_term: Quantum physics;Theoretical physics\\ +Subject\_term\_id: quantum-physics;theoretical-physics}, file = {/Users/wasmer/Nextcloud/Zotero/Keimer_Moore_2017_The physics of quantum materials.pdf} } @@ -7733,7 +8584,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {121}, number = {16}, pages = {9816--9872}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, issn = {0009-2665}, doi = {10.1021/acs.chemrev.1c00107}, url = {https://doi.org/10.1021/acs.chemrev.1c00107}, @@ -7787,7 +8638,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {6}, number = {1}, pages = {1--9}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2057-3960}, doi = {10.1038/s41524-020-00401-8}, url = {https://www.nature.com/articles/s41524-020-00401-8}, @@ -7825,7 +8676,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {4}, number = {1}, pages = {1--12}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2399-3650}, doi = {10.1038/s42005-021-00587-3}, url = {https://www.nature.com/articles/s42005-021-00587-3}, @@ -7833,10 +8684,24 @@ Junqi Yin (Oak Ridge National Laboratory)}, abstract = {The anomalous Hall effect has been indispensable in our understanding of numerous magnetic phenomena. This concerns both ferromagnetic materials, as well as diverse classes of antiferromagnets, where in addition to the anomalous and recently discovered crystal Hall effect, the topological Hall effect in noncoplanar antiferromagnets has been a subject of intensive research in the past decades. Here, we uncover a distinct flavor of the Hall effect emerging in generic canted spin systems. We demonstrate that upon canting, the anomalous Hall effect acquires a contribution which is sensitive to the sense of imprinted vector chirality among spins. We explore the origins and basic properties of corresponding chiral Hall effect, and closely tie it to the symmetry properties of the system. Our findings suggest that the chiral Hall effect and corresponding chiral magneto-optical effects emerge as useful tools in characterizing an interplay of structure and chirality in complex magnets, as well as in tracking their chiral dynamics and fluctuations.}, issue = {1}, langid = {english}, - keywords = {Magnetic properties and materials,Spintronics}, + keywords = {AFM,chirality,collinear,crystal structure,crystal symmetry,Ferromagnetism,FZJ,Hall AHE,Hall CHE,Hall effect,Hall THE,magnetism,non-collinear,PGI,PGI-1/IAS-1,physics,symmetry,symmetry breaking}, file = {/Users/wasmer/Nextcloud/Zotero/Kipp et al_2021_The chiral Hall effect in canted ferromagnets and antiferromagnets.pdf} } +@online{kippMachineLearningInspired2024, + title = {Machine Learning Inspired Models for {{Hall}} Effects in Non-Collinear Magnets}, + author = {Kipp, Jonathan and Lux, Fabian R. and Pürling, Thorben and Morrison, Abigail and Blügel, Stefan and Pinna, Daniele and Mokrousov, Yuriy}, + date = {2024-01-05}, + doi = {10.48550/arXiv.2401.03044}, + url = {https://arxiv.org/abs/2401.03044v1}, + urldate = {2024-01-12}, + abstract = {The anomalous Hall effect has been front and center in solid state research and material science for over a century now, and the complex transport phenomena in nontrivial magnetic textures have gained an increasing amount of attention, both in theoretical and experimental studies. However, a clear path forward to capturing the influence of magnetization dynamics on anomalous Hall effect even in smallest frustrated magnets or spatially extended magnetic textures is still intensively sought after. In this work, we present an expansion of the anomalous Hall tensor into symmetrically invariant objects, encoding the magnetic configuration up to arbitrary power of spin. We show that these symmetric invariants can be utilized in conjunction with advanced regularization techniques in order to build models for the electric transport in magnetic textures which are, on one hand, complete with respect to the point group symmetry of the underlying lattice, and on the other hand, depend on a minimal number of order parameters only. Here, using a four-band tight-binding model on a honeycomb lattice, we demonstrate that the developed method can be used to address the importance and properties of higher-order contributions to transverse transport. The efficiency and breadth enabled by this method provides an ideal systematic approach to tackle the inherent complexity of response properties of noncollinear magnets, paving the way to the exploration of electric transport in intrinsically frustrated magnets as well as large-scale magnetic textures.}, + langid = {english}, + pubstate = {preprint}, + keywords = {2D,2D material,AML,electric transport,feature selection,group theory,Hall AHE,Hall effect,higher order,invariance,linear regression,magnetic structure,magnetic supperlattice,magnetism,materials,ML,non-collinear,PCA,physics,point group,spin invariant,spin-dependent,spintronics,SVD,symmetrization,symmetry,TB,tensor decomposition,tight binding}, + file = {/Users/wasmer/Nextcloud/Zotero/Kipp et al_2024_Machine learning inspired models for Hall effects in non-collinear magnets.pdf} +} + @article{kirkpatrickPushingFrontiersDensity2021, title = {Pushing the Frontiers of Density Functionals by Solving the Fractional Electron Problem}, author = {Kirkpatrick, James and McMorrow, Brendan and Turban, David H. P. and Gaunt, Alexander L. and Spencer, James S. and Matthews, Alexander G. D. G. and Obika, Annette and Thiry, Louis and Fortunato, Meire and Pfau, David and Castellanos, Lara Román and Petersen, Stig and Nelson, Alexander W. R. and Kohli, Pushmeet and Mori-Sánchez, Paula and Hassabis, Demis and Cohen, Aron J.}, @@ -7845,7 +8710,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {374}, number = {6573}, pages = {1385--1389}, - publisher = {{American Association for the Advancement of Science}}, + publisher = {American Association for the Advancement of Science}, doi = {10.1126/science.abj6511}, url = {https://www.science.org/doi/10.1126/science.abj6511}, urldate = {2022-05-13}, @@ -7910,8 +8775,8 @@ Junqi Yin (Oak Ridge National Laboratory)}, date = {2021-06-11}, series = {P-{{RECS}} '21}, pages = {9--14}, - publisher = {{Association for Computing Machinery}}, - location = {{New York, NY, USA}}, + publisher = {Association for Computing Machinery}, + location = {New York, NY, USA}, doi = {10.1145/3456287.3465477}, url = {https://dl.acm.org/doi/10.1145/3456287.3465477}, urldate = {2023-05-15}, @@ -7944,7 +8809,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {10}, number = {1}, pages = {015021}, - publisher = {{American Institute of Physics}}, + publisher = {American Institute of Physics}, doi = {10.1063/1.5111045}, url = {https://aip.scitation.org/doi/abs/10.1063/1.5111045}, urldate = {2021-05-13}, @@ -7952,6 +8817,19 @@ Junqi Yin (Oak Ridge National Laboratory)}, file = {/Users/wasmer/Nextcloud/Zotero/Kocer et al_2020_Continuous and optimally complete description of chemical environments using.pdf;/Users/wasmer/Zotero/storage/NZCCBRZE/1.html} } +@book{kochenderferAlgorithmsOptimization2019, + title = {Algorithms for Optimization}, + author = {Kochenderfer, Mykel J. and Wheeler, Tim Allan}, + date = {2019}, + publisher = {The MIT press}, + location = {Cambridge}, + url = {https://algorithmsbook.com/optimization/}, + abstract = {A comprehensive introduction to optimization with a focus on practical algorithms for the design of engineering systems. This book offers a comprehensive introduction to optimization with a focus on practical algorithms. The book approaches optimization from an engineering perspective, where the objective is to design a system that optimizes a set of metrics subject to constraints. Readers will learn about computational approaches for a range of challenges, including searching high-dimensional spaces, handling problems where there are multiple competing objectives, and accommodating uncertainty in the metrics. Figures, examples, and exercises convey the intuition behind the mathematical approaches. The text provides concrete implementations in the Julia programming language. Topics covered include derivatives and their generalization to multiple dimensions; local descent and first- and second-order methods that inform local descent; stochastic methods, which introduce randomness into the optimization process; linear constrained optimization, when both the objective function and the constraints are linear; surrogate models, probabilistic surrogate models, and using probabilistic surrogate models to guide optimization; optimization under uncertainty; uncertainty propagation; expression optimization; and multidisciplinary design optimization. Appendixes offer an introduction to the Julia language, test functions for evaluating algorithm performance, and mathematical concepts used in the derivation and analysis of the optimization methods discussed in the text. The book can be used by advanced undergraduates and graduate students in mathematics, statistics, computer science, any engineering field, (including electrical engineering and aerospace engineering), and operations research, and as a reference for professionals. -- Provided by publisher}, + isbn = {978-0-262-03942-0}, + langid = {english}, + keywords = {/unread,educational,Julia,learning material,mathematics,online book,optimization,textbook,with-code} +} + @article{koFourthgenerationHighdimensionalNeural2021, title = {A Fourth-Generation High-Dimensional Neural Network Potential with Accurate Electrostatics Including Non-Local Charge Transfer}, author = {Ko, Tsz Wai and Finkler, Jonas A. and Goedecker, Stefan and Behler, Jörg}, @@ -7961,7 +8839,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {12}, number = {1}, pages = {398}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2041-1723}, doi = {10.1038/s41467-020-20427-2}, url = {https://www.nature.com/articles/s41467-020-20427-2}, @@ -7970,7 +8848,12 @@ Junqi Yin (Oak Ridge National Laboratory)}, issue = {1}, langid = {english}, keywords = {CENT,HDNNP,rec-by-bluegel}, - annotation = {Bandiera\_abtest: a Cc\_license\_type: cc\_by Cg\_type: Nature Research Journals Primary\_atype: Research Subject\_term: Computational methods;Density functional theory;Method development;Molecular dynamics Subject\_term\_id: computational-methods;density-functional-theory;method-development;molecular-dynamics}, + annotation = {Bandiera\_abtest: a\\ +Cc\_license\_type: cc\_by\\ +Cg\_type: Nature Research Journals\\ +Primary\_atype: Research\\ +Subject\_term: Computational methods;Density functional theory;Method development;Molecular dynamics\\ +Subject\_term\_id: computational-methods;density-functional-theory;method-development;molecular-dynamics}, file = {/Users/wasmer/Nextcloud/Zotero/Ko et al_2021_A fourth-generation high-dimensional neural network potential with accurate.pdf;/Users/wasmer/Zotero/storage/2Z8H4HHW/s41467-020-20427-2.html} } @@ -7983,7 +8866,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {12}, number = {1}, pages = {398}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2041-1723}, doi = {10.1038/s41467-020-20427-2}, url = {https://www.nature.com/articles/s41467-020-20427-2}, @@ -8004,7 +8887,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {12}, number = {1}, pages = {398}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2041-1723}, doi = {10.1038/s41467-020-20427-2}, url = {https://www.nature.com/articles/s41467-020-20427-2}, @@ -8025,7 +8908,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {54}, number = {4}, pages = {808--817}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, issn = {0001-4842}, doi = {10.1021/acs.accounts.0c00689}, url = {https://doi.org/10.1021/acs.accounts.0c00689}, @@ -8044,7 +8927,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {54}, number = {4}, pages = {808--817}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, issn = {0001-4842}, doi = {10.1021/acs.accounts.0c00689}, url = {https://doi.org/10.1021/acs.accounts.0c00689}, @@ -8077,7 +8960,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {94}, number = {5}, pages = {1111--1120}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRev.94.1111}, url = {https://link.aps.org/doi/10.1103/PhysRev.94.1111}, urldate = {2023-09-19}, @@ -8096,7 +8979,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {7}, number = {1}, pages = {1192}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2045-2322}, doi = {10.1038/s41598-017-01251-z}, url = {https://www.nature.com/articles/s41598-017-01251-z}, @@ -8117,7 +9000,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {13}, number = {1}, pages = {949}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2041-1723}, doi = {10.1038/s41467-022-28543-x}, url = {https://www.nature.com/articles/s41467-022-28543-x}, @@ -8138,7 +9021,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {3}, number = {11}, pages = {845--849}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {1755-4349}, doi = {10.1038/nchem.1171}, url = {https://www.nature.com/articles/nchem.1171}, @@ -8177,7 +9060,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {61}, number = {1}, pages = {7--13}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, issn = {1549-9596}, doi = {10.1021/acs.jcim.0c00971}, url = {https://doi.org/10.1021/acs.jcim.0c00971}, @@ -8215,7 +9098,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, abstract = {Machine learning applications often need large amounts of training data to perform well. Whereas unlabeled data can be easily gathered, the labeling process is difficult, time-consuming, or expensive in most applications. Active learning can help solve this problem by querying labels for those data points that will improve the performance the most. Thereby, the goal is that the learning algorithm performs sufficiently well with fewer labels. We provide a library called scikit-activeml that covers the most relevant query strategies and implements tools to work with partially labeled data. It is programmed in Python and builds on top of scikit-learn.}, langid = {english}, pubstate = {preprint}, - keywords = {\_tablet,/unread,active learning,General ML,library,Python,scikit-learn}, + keywords = {/unread,\_tablet,active learning,General ML,library,Python,scikit-learn}, file = {/Users/wasmer/Nextcloud/Zotero/Kottke et al_2021_scikit-activeml.pdf} } @@ -8254,6 +9137,42 @@ Junqi Yin (Oak Ridge National Laboratory)}, file = {/Users/wasmer/Nextcloud/Zotero/Kovacs et al_2023_Evaluation of the MACE Force Field Architecture.pdf;/Users/wasmer/Zotero/storage/NJT7SCKS/2305.html} } +@article{kovacsLinearAtomicCluster2021, + title = {Linear {{Atomic Cluster Expansion Force Fields}} for {{Organic Molecules}}: {{Beyond RMSE}}}, + shorttitle = {Linear {{Atomic Cluster Expansion Force Fields}} for {{Organic Molecules}}}, + author = {Kovács, Dávid Péter and family=Oord, given=Cas, prefix=van der, useprefix=false and Kucera, Jiri and Allen, Alice E. A. and Cole, Daniel J. and Ortner, Christoph and Csányi, Gábor}, + date = {2021-12-14}, + journaltitle = {Journal of Chemical Theory and Computation}, + shortjournal = {J. Chem. Theory Comput.}, + volume = {17}, + number = {12}, + pages = {7696--7711}, + publisher = {American Chemical Society}, + issn = {1549-9618}, + doi = {10.1021/acs.jctc.1c00647}, + url = {https://doi.org/10.1021/acs.jctc.1c00647}, + urldate = {2023-12-10}, + abstract = {We demonstrate that fast and accurate linear force fields can be built for molecules using the atomic cluster expansion (ACE) framework. The ACE models parametrize the potential energy surface in terms of body-ordered symmetric polynomials making the functional form reminiscent of traditional molecular mechanics force fields. We show that the four- or five-body ACE force fields improve on the accuracy of the empirical force fields by up to a factor of 10, reaching the accuracy typical of recently proposed machine-learning-based approaches. We not only show state of the art accuracy and speed on the widely used MD17 and ISO17 benchmark data sets, but we also go beyond RMSE by comparing a number of ML and empirical force fields to ACE on more important tasks such as normal-mode prediction, high-temperature molecular dynamics, dihedral torsional profile prediction, and even bond breaking. We also demonstrate the smoothness, transferability, and extrapolation capabilities of ACE on a new challenging benchmark data set comprised of a potential energy surface of a flexible druglike molecule.}, + keywords = {/unread,ACE,AML,ANI,benchmarking,descriptor comparison,descriptors,FCHL,GAP,MD17,ML,MLP,MLP comparison,model comparison,molecules,organic chemistry,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Kovács et al_2021_Linear Atomic Cluster Expansion Force Fields for Organic Molecules.pdf} +} + +@online{kovacsMACEOFF23TransferableMachine2023, + title = {{{MACE-OFF23}}: {{Transferable Machine Learning Force Fields}} for {{Organic Molecules}}}, + shorttitle = {{{MACE-OFF23}}}, + author = {Kovács, Dávid Péter and Moore, J. Harry and Browning, Nicholas J. and Batatia, Ilyes and Horton, Joshua T. and Kapil, Venkat and Magdău, Ioan-Bogdan and Cole, Daniel J. and Csányi, Gábor}, + date = {2023-12-23}, + eprint = {2312.15211v1}, + eprinttype = {arxiv}, + url = {https://arxiv.org/abs/2312.15211v1}, + urldate = {2024-01-02}, + abstract = {Classical empirical force fields have dominated biomolecular simulation for over 50 years. Although widely used in drug discovery, crystal structure prediction, and biomolecular dynamics, they generally lack the accuracy and transferability required for predictive modelling. In this paper, we introduce MACE-OFF23, a transferable force field for organic molecules created using state-of-the-art machine learning technology and first-principles reference data computed with a high level of quantum mechanical theory. MACE-OFF23 demonstrates the remarkable capabilities of local, short-range models by accurately predicting a wide variety of gas and condensed phase properties of molecular systems. It produces accurate, easy-to-converge dihedral torsion scans of unseen molecules, as well as reliable descriptions of molecular crystals and liquids, including quantum nuclear effects. We further demonstrate the capabilities of MACE-OFF23 by determining free energy surfaces in explicit solvent, as well as the folding dynamics of peptides. Finally, we simulate a fully solvated small protein, observing accurate secondary structure and vibrational spectrum. These developments enable first-principles simulations of molecular systems for the broader chemistry community at high accuracy and low computational cost.}, + langid = {english}, + pubstate = {preprint}, + keywords = {AML,GNN,ML,ML-FF,MLP,molecules,MPNN,organic chemistry,smal organic molecules,transfer learning}, + file = {/Users/wasmer/Nextcloud/Zotero/Kovács et al_2023_MACE-OFF23.pdf} +} + @online{krennPredictingFutureAI2022, title = {Predicting the {{Future}} of {{AI}} with {{AI}}: {{High-quality}} Link Prediction in an Exponentially Growing Knowledge Network}, shorttitle = {Predicting the {{Future}} of {{AI}} with {{AI}}}, @@ -8280,7 +9199,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {4}, number = {12}, pages = {761--769}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2522-5820}, doi = {10.1038/s42254-022-00518-3}, url = {https://www.nature.com/articles/s42254-022-00518-3}, @@ -8302,7 +9221,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {1}, number = {4}, pages = {045024}, - publisher = {{IOP Publishing}}, + publisher = {IOP Publishing}, issn = {2632-2153}, doi = {10.1088/2632-2153/aba947}, url = {https://doi.org/10.1088/2632-2153/aba947}, @@ -8334,7 +9253,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, editor = {Kronmüller, Helmut and Parkin, Stuart}, date = {2007-07-27}, edition = {1}, - publisher = {{Wiley}}, + publisher = {Wiley}, doi = {10.1002/9780470022184}, url = {https://onlinelibrary.wiley.com/doi/book/10.1002/9780470022184}, urldate = {2023-11-01}, @@ -8368,7 +9287,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {121}, number = {5}, pages = {2780--2815}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, issn = {0009-2665}, doi = {10.1021/acs.chemrev.0c00732}, url = {https://doi.org/10.1021/acs.chemrev.0c00732}, @@ -8410,7 +9329,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, urldate = {2023-06-12}, abstract = {We present an update of the DScribe package, a Python library for atomistic descriptors. The update extends DScribe's descriptor selection with the Valle-Oganov materials fingerprint and provides descriptor derivatives to enable more advanced machine learning tasks, such as force prediction and structure optimization. For all descriptors, numeric derivatives are now available in DSribe. For the many-body tensor representation (MBTR) and the Smooth Overlap of Atomic Positions (SOAP), we have also implemented analytic derivatives. We demonstrate the effectiveness of the descriptor derivatives for machine learning models of Cu clusters and perovskite alloys.}, pubstate = {preprint}, - keywords = {\_tablet,/unread,ACSF,analytical derivatives,derivatives,descriptors,DScribe,library,materials,Matrix descriptors,MBTR,ML,Open source,Python,rec-by-ruess,SOAP,Valle-Oganov descriptor,with-code}, + keywords = {/unread,\_tablet,ACSF,analytical derivatives,derivatives,descriptors,DScribe,library,materials,Matrix descriptors,MBTR,ML,Open source,Python,rec-by-ruess,SOAP,Valle-Oganov descriptor,with-code}, file = {/Users/wasmer/Nextcloud/Zotero/Laakso et al_2023_Updates to the DScribe Library.pdf;/Users/wasmer/Zotero/storage/JNKQJJYE/2303.html} } @@ -8430,6 +9349,22 @@ Junqi Yin (Oak Ridge National Laboratory)}, file = {/Users/wasmer/Nextcloud/Zotero/Labrie-Boulay et al_2023_Machine learning-based spin structure detection.pdf;/Users/wasmer/Zotero/storage/SEU5QPUQ/2303.html} } +@online{lakshminarayananSimpleScalablePredictive2017, + title = {Simple and {{Scalable Predictive Uncertainty Estimation}} Using {{Deep Ensembles}}}, + author = {Lakshminarayanan, Balaji and Pritzel, Alexander and Blundell, Charles}, + date = {2017-11-03}, + eprint = {1612.01474}, + eprinttype = {arxiv}, + eprintclass = {cs, stat}, + doi = {10.48550/arXiv.1612.01474}, + url = {http://arxiv.org/abs/1612.01474}, + urldate = {2023-12-05}, + abstract = {Deep neural networks (NNs) are powerful black box predictors that have recently achieved impressive performance on a wide spectrum of tasks. Quantifying predictive uncertainty in NNs is a challenging and yet unsolved problem. Bayesian NNs, which learn a distribution over weights, are currently the state-of-the-art for estimating predictive uncertainty; however these require significant modifications to the training procedure and are computationally expensive compared to standard (non-Bayesian) NNs. We propose an alternative to Bayesian NNs that is simple to implement, readily parallelizable, requires very little hyperparameter tuning, and yields high quality predictive uncertainty estimates. Through a series of experiments on classification and regression benchmarks, we demonstrate that our method produces well-calibrated uncertainty estimates which are as good or better than approximate Bayesian NNs. To assess robustness to dataset shift, we evaluate the predictive uncertainty on test examples from known and unknown distributions, and show that our method is able to express higher uncertainty on out-of-distribution examples. We demonstrate the scalability of our method by evaluating predictive uncertainty estimates on ImageNet.}, + pubstate = {preprint}, + keywords = {/unread,active learning,deep ensembles,ensemble learning,General ML,library,ML,original publication,uncertainty quantification,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Lakshminarayanan et al_2017_Simple and Scalable Predictive Uncertainty Estimation using Deep Ensembles.pdf;/Users/wasmer/Zotero/storage/AV8HUIKB/1612.html} +} + @online{lamGraphCastLearningSkillful2022, title = {{{GraphCast}}: {{Learning}} Skillful Medium-Range Global Weather Forecasting}, shorttitle = {{{GraphCast}}}, @@ -8447,6 +9382,22 @@ Junqi Yin (Oak Ridge National Laboratory)}, file = {/Users/wasmer/Nextcloud/Zotero/Lam et al_2022_GraphCast.pdf;/Users/wasmer/Zotero/storage/8UD54ESE/2212.html} } +@online{langeNeuralNetworkApproach2023, + title = {Neural Network Approach to Quasiparticle Dispersions in Doped Antiferromagnets}, + author = {Lange, Hannah and Döschl, Fabian and Carrasquilla, Juan and Bohrdt, Annabelle}, + date = {2023-10-12}, + eprint = {2310.08578}, + eprinttype = {arxiv}, + eprintclass = {cond-mat, physics:quant-ph}, + doi = {10.48550/arXiv.2310.08578}, + url = {http://arxiv.org/abs/2310.08578}, + urldate = {2024-02-28}, + abstract = {Numerically simulating spinful, fermionic systems is of great interest from the perspective of condensed matter physics. However, the exponential growth of the Hilbert space dimension with system size renders an exact parameterization of large quantum systems prohibitively demanding. This is a perfect playground for neural networks, owing to their immense representative power that often allows to use only a fraction of the parameters that are needed in exact methods. Here, we investigate the ability of neural quantum states (NQS) to represent the bosonic and fermionic \$t-J\$ model - the high interaction limit of the Fermi-Hubbard model - on different 1D and 2D lattices. Using autoregressive recurrent neural networks (RNNs) with 2D tensorized gated recurrent units, we study the ground state representations upon doping the half-filled system with holes. Moreover, we present a method to calculate dispersion relations from the neural network state representation, applicable to any neural network architecture and any lattice geometry, that allows to infer the low-energy physics from the NQS. To demonstrate our approach, we calculate the dispersion of a single hole in the \$t-J\$ model on different 1D and 2D square and triangular lattices. Furthermore, we analyze the strengths and weaknesses of the RNN approach for fermionic systems, pointing the way for an accurate and efficient parameterization of fermionic quantum systems using neural quantum states.}, + pubstate = {preprint}, + keywords = {/unread,Heisenberg model,ML,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Lange et al_2023_Neural network approach to quasiparticle dispersions in doped antiferromagnets.pdf;/Users/wasmer/Zotero/storage/8FDGYN7V/2310.html} +} + @online{langerHeatFluxSemilocal2023, title = {Heat Flux for Semi-Local Machine-Learning Potentials}, author = {Langer, Marcel F. and Knoop, Florian and Carbogno, Christian and Scheffler, Matthias and Rupp, Matthias}, @@ -8486,7 +9437,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {8}, number = {1}, pages = {1--14}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2057-3960}, doi = {10.1038/s41524-022-00721-x}, url = {https://www.nature.com/articles/s41524-022-00721-x}, @@ -8506,7 +9457,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {29}, number = {27}, pages = {273002}, - publisher = {{IOP Publishing}}, + publisher = {IOP Publishing}, issn = {0953-8984}, doi = {10.1088/1361-648X/aa680e}, url = {https://doi.org/10.1088/1361-648x/aa680e}, @@ -8516,6 +9467,25 @@ Junqi Yin (Oak Ridge National Laboratory)}, file = {/Users/wasmer/Nextcloud/Zotero/Larsen et al_2017_The atomic simulation environment—a Python library for working with atoms.pdf} } +@article{laszloffyTopologicalSuperconductivityFirst2023, + title = {Topological Superconductivity from First Principles. {{II}}. {{Effects}} from Manipulation of Spin Spirals: {{Topological}} Fragmentation, Braiding, and Quasi-{{Majorana}} Bound States}, + shorttitle = {Topological Superconductivity from First Principles. {{II}}. {{Effects}} from Manipulation of Spin Spirals}, + author = {Lászlóffy, András and Nyári, Bendegúz and Csire, Gábor and Szunyogh, László and Újfalussy, Balázs}, + date = {2023-10-23}, + journaltitle = {Physical Review B}, + shortjournal = {Phys. Rev. B}, + volume = {108}, + number = {13}, + pages = {134513}, + publisher = {American Physical Society}, + doi = {10.1103/PhysRevB.108.134513}, + url = {https://link.aps.org/doi/10.1103/PhysRevB.108.134513}, + urldate = {2024-01-01}, + abstract = {Recent advances in electron spin resonance techniques have allowed the manipulation of the spin of individual atoms, making magnetic atomic chains on superconducting hosts one of the most promising platform where topological superconductivity can be engineered. Motivated by this progress, we provide a detailed, quantitative description of the effects of manipulating spins in realistic nanowires by applying a first-principles-based computational approach to a recent experiment: an iron chain deposited on top of an Au/Nb heterostructure. As a continuation of the preceding paper, experimentally relevant computational experiments are performed in spin spiral chains that shed light on several concerns about practical applications and add new aspects to the interpretation of recent experiments. We explore the stability of topological zero-energy states, the formation and distinction of topologically trivial and nontrivial zero energy edge states, the effect of local changes in the exchange fields, the emergence of topological fragmentation, and the shift of Majorana zero modes along the superconducting nanowires, opening avenues toward the implementation of a braiding operation.}, + keywords = {/unread,Budapest KKR group,GF2023 workshop,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Lászlóffy et al_2023_Topological superconductivity from first principles.pdf;/Users/wasmer/Zotero/storage/P2497E9P/PhysRevB.108.html} +} + @online{laszloffyTopologicalSuperconductivityFirstprinciples2023, title = {Topological Superconductivity from First-Principles {{II}}: {{Effects}} from Manipulation of Spin Spirals \$-\$ {{Topological}} Fragmentation, Braiding, and {{Quasi-Majorana Bound States}}}, shorttitle = {Topological Superconductivity from First-Principles {{II}}}, @@ -8559,7 +9529,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {13}, number = {1}, pages = {6039}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2041-1723}, doi = {10.1038/s41467-022-33128-9}, url = {https://www.nature.com/articles/s41467-022-33128-9}, @@ -8588,7 +9558,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {31}, number = {9}, pages = {3591--3591}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, issn = {0897-4756}, doi = {10.1021/acs.chemmater.9b01629}, url = {https://doi.org/10.1021/acs.chemmater.9b01629}, @@ -8622,7 +9592,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {31}, number = {1}, pages = {26--51}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, issn = {0897-4756}, doi = {10.1021/acs.chemmater.8b04383}, url = {https://doi.org/10.1021/acs.chemmater.8b04383}, @@ -8642,7 +9612,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {15}, number = {3}, pages = {1593--1604}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, issn = {1549-9618}, doi = {10.1021/acs.jctc.8b01089}, url = {https://doi.org/10.1021/acs.jctc.8b01089}, @@ -8661,7 +9631,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {157}, number = {17}, pages = {174114}, - publisher = {{American Institute of Physics}}, + publisher = {American Institute of Physics}, issn = {0021-9606}, doi = {10.1063/5.0121187}, url = {https://aip.scitation.org/doi/10.1063/5.0121187}, @@ -8698,7 +9668,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {3}, number = {6}, pages = {063801}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevMaterials.3.063801}, url = {https://link.aps.org/doi/10.1103/PhysRevMaterials.3.063801}, urldate = {2022-07-05}, @@ -8716,7 +9686,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {39}, number = {1}, pages = {1--24}, - publisher = {{Taylor \& Francis}}, + publisher = {Taylor \& Francis}, issn = {1040-8436}, doi = {10.1080/10408436.2013.772503}, url = {https://doi.org/10.1080/10408436.2013.772503}, @@ -8734,7 +9704,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {351}, number = {6280}, pages = {aad3000}, - publisher = {{American Association for the Advancement of Science}}, + publisher = {American Association for the Advancement of Science}, doi = {10.1126/science.aad3000}, url = {https://www.science.org/doi/full/10.1126/science.aad3000}, urldate = {2023-09-21}, @@ -8743,6 +9713,27 @@ Junqi Yin (Oak Ridge National Laboratory)}, file = {/Users/wasmer/Zotero/storage/9ZLZBQ2M/Lejaeghere et al. - 2016 - Reproducibility in density functional theory calcu.pdf} } +@article{lemmImprovedDecisionMaking2023, + title = {Improved Decision Making with Similarity Based Machine Learning: Applications in Chemistry}, + shorttitle = {Improved Decision Making with Similarity Based Machine Learning}, + author = {Lemm, Dominik and family=Rudorff, given=Guido Falk, prefix=von, useprefix=false and family=Lilienfeld, given=O. Anatole, prefix=von, useprefix=false}, + date = {2023-12}, + journaltitle = {Machine Learning: Science and Technology}, + shortjournal = {Mach. Learn.: Sci. Technol.}, + volume = {4}, + number = {4}, + pages = {045043}, + publisher = {IOP Publishing}, + issn = {2632-2153}, + doi = {10.1088/2632-2153/ad0fa3}, + url = {https://dx.doi.org/10.1088/2632-2153/ad0fa3}, + urldate = {2023-12-06}, + abstract = {Despite the fundamental progress in autonomous molecular and materials discovery, data scarcity throughout chemical compound space still severely hampers the use of modern ready-made machine learning models as they rely heavily on the paradigm, ‘the bigger the data the better’. Presenting similarity based machine learning (SML), we show an approach to select data and train a model on-the-fly for specific queries, enabling decision making in data scarce scenarios in chemistry. By solely relying on query and training data proximity to choose training points, only a fraction of data is necessary to converge to competitive performance. After introducing SML for the harmonic oscillator and the Rosenbrock function, we describe applications to scarce data scenarios in chemistry which include quantum mechanics based molecular design and organic synthesis planning. Finally, we derive a relationship between the intrinsic dimensionality and volume of feature space, governing the overall model accuracy.}, + langid = {english}, + keywords = {/unread,AML,BoB,descriptors,FCHL,kernel methods,kernel PCA,kNN,KRR,ML,similarity learning,small data}, + file = {/Users/wasmer/Nextcloud/Zotero/Lemm et al_2023_Improved decision making with similarity based machine learning.pdf} +} + @online{lessigAtmoRepStochasticModel2023, title = {{{AtmoRep}}: {{A}} Stochastic Model of Atmosphere Dynamics Using Large Scale Representation Learning}, shorttitle = {{{AtmoRep}}}, @@ -8783,7 +9774,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {17}, number = {11}, pages = {7203--7214}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, issn = {1549-9618}, doi = {10.1021/acs.jctc.1c00576}, url = {https://doi.org/10.1021/acs.jctc.1c00576}, @@ -8837,12 +9828,26 @@ Junqi Yin (Oak Ridge National Laboratory)}, doi = {10.48550/arXiv.2306.12059}, url = {http://arxiv.org/abs/2306.12059}, urldate = {2023-08-19}, - abstract = {Equivariant Transformers such as Equiformer have demonstrated the efficacy of applying Transformers to the domain of 3D atomistic systems. However, they are still limited to small degrees of equivariant representations due to their computational complexity. In this paper, we investigate whether these architectures can scale well to higher degrees. Starting from Equiformer, we first replace \$SO(3)\$ convolutions with eSCN convolutions to efficiently incorporate higher-degree tensors. Then, to better leverage the power of higher degrees, we propose three architectural improvements -- attention re-normalization, separable \$S\^2\$ activation and separable layer normalization. Putting this all together, we propose EquiformerV2, which outperforms previous state-of-the-art methods on the large-scale OC20 dataset by up to \$12\textbackslash\%\$ on forces, \$4\textbackslash\%\$ on energies, offers better speed-accuracy trade-offs, and \$2\textbackslash times\$ reduction in DFT calculations needed for computing adsorption energies.}, + abstract = {Equivariant Transformers such as Equiformer have demonstrated the efficacy of applying Transformers to the domain of 3D atomistic systems. However, they are still limited to small degrees of equivariant representations due to their computational complexity. In this paper, we investigate whether these architectures can scale well to higher degrees. Starting from Equiformer, we first replace \$SO(3)\$ convolutions with eSCN convolutions to efficiently incorporate higher-degree tensors. Then, to better leverage the power of higher degrees, we propose three architectural improvements -- attention re-normalization, separable \$S\textasciicircum 2\$ activation and separable layer normalization. Putting this all together, we propose EquiformerV2, which outperforms previous state-of-the-art methods on the large-scale OC20 dataset by up to \$12\textbackslash\%\$ on forces, \$4\textbackslash\%\$ on energies, offers better speed-accuracy trade-offs, and \$2\textbackslash times\$ reduction in DFT calculations needed for computing adsorption energies.}, pubstate = {preprint}, - keywords = {todo-tagging}, + keywords = {ablation study,AML,attention,Equiformer,equivariant,eSCN,GemNet,ML,MLP,OC20,Open Catalyst,QM9,SO(3),todo-tagging,transformer,with-code}, file = {/Users/wasmer/Nextcloud/Zotero/Liao et al_2023_EquiformerV2.pdf;/Users/wasmer/Zotero/storage/MHXITTSP/2306.html} } +@online{liaoEquiformerV2ImprovedEquivariant2024, + title = {{{EquiformerV2}}: {{Improved Equivariant Transformer}} for {{Scaling}} to {{Higher-Degree Representations}}}, + shorttitle = {{{EquiformerV2}}}, + author = {Liao, Yi-Lun and Wood, Brandon and Das, Abhishek and Smidt, Tess}, + date = {2024-03-06}, + url = {https://arxiv.org/abs/2306.12059v3}, + urldate = {2024-05-08}, + abstract = {Equivariant Transformers such as Equiformer have demonstrated the efficacy of applying Transformers to the domain of 3D atomistic systems. However, they are limited to small degrees of equivariant representations due to their computational complexity. In this paper, we investigate whether these architectures can scale well to higher degrees. Starting from Equiformer, we first replace \$SO(3)\$ convolutions with eSCN convolutions to efficiently incorporate higher-degree tensors. Then, to better leverage the power of higher degrees, we propose three architectural improvements -- attention re-normalization, separable \$S\textasciicircum 2\$ activation and separable layer normalization. Putting this all together, we propose EquiformerV2, which outperforms previous state-of-the-art methods on large-scale OC20 dataset by up to \$9\textbackslash\%\$ on forces, \$4\textbackslash\%\$ on energies, offers better speed-accuracy trade-offs, and \$2\textbackslash times\$ reduction in DFT calculations needed for computing adsorption energies. Additionally, EquiformerV2 trained on only OC22 dataset outperforms GemNet-OC trained on both OC20 and OC22 datasets, achieving much better data efficiency. Finally, we compare EquiformerV2 with Equiformer on QM9 and OC20 S2EF-2M datasets to better understand the performance gain brought by higher degrees.}, + langid = {english}, + pubstate = {preprint}, + keywords = {ablation study,AML,attention,Equiformer,equivariant,eSCN,GemNet,ML,MLP,OC20,Open Catalyst,QM9,SO(3),transformer,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Liao et al_2023_EquiformerV3.pdf} +} + @article{liCriticalExaminationRobustness2023, title = {A Critical Examination of Robustness and Generalizability of Machine Learning Prediction of Materials Properties}, author = {Li, Kangming and DeCost, Brian and Choudhary, Kamal and Greenwood, Michael and Hattrick-Simpers, Jason}, @@ -8852,7 +9857,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {9}, number = {1}, pages = {1--9}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2057-3960}, doi = {10.1038/s41524-023-01012-9}, url = {https://www.nature.com/articles/s41524-023-01012-9}, @@ -8864,6 +9869,27 @@ Junqi Yin (Oak Ridge National Laboratory)}, file = {/Users/wasmer/Nextcloud/Zotero/Li et al_2023_A critical examination of robustness and generalizability of machine learning.pdf} } +@article{liCriticalExaminationRobustness2023a, + title = {A Critical Examination of Robustness and Generalizability of Machine Learning Prediction of Materials Properties}, + author = {Li, Kangming and DeCost, Brian and Choudhary, Kamal and Greenwood, Michael and Hattrick-Simpers, Jason}, + date = {2023-04-07}, + journaltitle = {npj Computational Materials}, + shortjournal = {npj Comput Mater}, + volume = {9}, + number = {1}, + pages = {1--9}, + publisher = {Nature Publishing Group}, + issn = {2057-3960}, + doi = {10.1038/s41524-023-01012-9}, + url = {https://www.nature.com/articles/s41524-023-01012-9}, + urldate = {2023-12-05}, + abstract = {Recent advances in machine learning (ML) have led to substantial performance improvement in material database benchmarks, but an excellent benchmark score may not imply good generalization performance. Here we show that ML models trained on Materials Project 2018 can have severely degraded performance on new compounds in Materials Project 2021 due to the distribution shift. We discuss how to foresee the issue with a few simple tools. Firstly, the uniform manifold approximation and projection (UMAP) can be used to investigate the relation between the training and test data within the feature space. Secondly, the disagreement between multiple ML models on the test data can illuminate out-of-distribution samples. We demonstrate that the UMAP-guided and query by committee acquisition strategies can greatly improve prediction accuracy by adding only 1\% of the test data. We believe this work provides valuable insights for building databases and models that enable better robustness and generalizability.}, + issue = {1}, + langid = {english}, + keywords = {/unread,AML,best practices,database generation,materials project,ML,model drift,model evaluation,model robustness,query by committee,todo-tagging,UMAP,unsupervised learning,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Li et al_2023_A critical examination of robustness and generalizability of machine learning2.pdf} +} + @article{liDeeplearningDensityFunctional2022, title = {Deep-Learning Density Functional Theory {{Hamiltonian}} for Efficient Ab Initio Electronic-Structure Calculation}, author = {Li, He and Wang, Zun and Zou, Nianlong and Ye, Meng and Xu, Runzhang and Gong, Xiaoxun and Duan, Wenhui and Xu, Yong}, @@ -8873,7 +9899,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {2}, number = {6}, pages = {367--377}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2662-8457}, doi = {10.1038/s43588-022-00265-6}, url = {https://www.nature.com/articles/s43588-022-00265-6}, @@ -8881,10 +9907,28 @@ Junqi Yin (Oak Ridge National Laboratory)}, abstract = {The marriage of density functional theory (DFT) and deep-learning methods has the potential to revolutionize modern computational materials science. Here we develop a deep neural network approach to represent the DFT Hamiltonian (DeepH) of crystalline materials, aiming to bypass the computationally demanding self-consistent field iterations of DFT and substantially improve the efficiency of ab initio electronic-structure calculations. A general framework is proposed to deal with the large dimensionality and gauge (or rotation) covariance of the DFT Hamiltonian matrix by virtue of locality, and this is realized by a message-passing neural network for deep learning. High accuracy, high efficiency and good transferability of the DeepH method are generally demonstrated for various kinds of material system and physical property. The method provides a solution to the accuracy–efficiency dilemma of DFT and opens opportunities to explore large-scale material systems, as evidenced by a promising application in the study of twisted van der Waals materials.}, issue = {6}, langid = {english}, - keywords = {\_tablet,2D material,AML,Berry phase,CNT,DeepH,defects,DFT,disordered,e3nn,equivariant,GGA,graphene,heterostructures,library,local coordinates,magnetism,materials,ML,ML-DFT,ML-ESM,MoS2,MPNN,near-sightedness,non-collinear,OpenMX,PBE,PCA,prediction of bandstructure,prediction of Berry phase,prediction of Hamiltonian matrix,SOC,spin-dependent,twisted bilayer graphene,vdW,vdW materials,with-code,with-data}, + keywords = {\_tablet,2D material,AML,Berry phase,CNT,DeepH,defects,DFT,disordered,e3nn,equivariant,GGA,graphene,heterostructures,library,local coordinates,materials,ML,ML-DFT,ML-ESM,MoS2,MPNN,near-sightedness,OpenMX,PBE,PCA,prediction of bandstructure,prediction of Berry phase,prediction of Hamiltonian matrix,SOC,twisted bilayer graphene,vdW,vdW materials,with-code,with-data}, file = {/Users/wasmer/Nextcloud/Zotero/Li et al_2022_Deep-learning density functional theory Hamiltonian for efficient ab initio.pdf} } +@article{liDeepLearningDensityFunctional2024, + title = {Deep-{{Learning Density Functional Perturbation Theory}}}, + author = {Li, He and Tang, Zechen and Fu, Jingheng and Dong, Wen-Han and Zou, Nianlong and Gong, Xiaoxun and Duan, Wenhui and Xu, Yong}, + date = {2024-02-28}, + journaltitle = {Physical Review Letters}, + shortjournal = {Phys. Rev. Lett.}, + volume = {132}, + number = {9}, + pages = {096401}, + publisher = {American Physical Society}, + doi = {10.1103/PhysRevLett.132.096401}, + url = {https://link.aps.org/doi/10.1103/PhysRevLett.132.096401}, + urldate = {2024-04-18}, + abstract = {Calculating perturbation response properties of materials from first principles provides a vital link between theory and experiment, but is bottlenecked by the high computational cost. Here, a general framework is proposed to perform density functional perturbation theory (DFPT) calculations by neural networks, greatly improving the computational efficiency. Automatic differentiation is applied on neural networks, facilitating accurate computation of derivatives. High efficiency and good accuracy of the approach are demonstrated by studying electron-phonon coupling and related physical quantities. This work brings deep-learning density functional theory and DFPT into a unified framework, creating opportunities for developing ab initio artificial intelligence.}, + keywords = {/unread}, + file = {/Users/wasmer/Nextcloud/Zotero/Li et al_2024_Deep-Learning Density Functional Perturbation Theory.pdf} +} + @article{liDeeplearningElectronicstructureCalculation2023, title = {Deep-Learning Electronic-Structure Calculation of Magnetic Superstructures}, author = {Li, He and Tang, Zechen and Gong, Xiaoxun and Zou, Nianlong and Duan, Wenhui and Xu, Yong}, @@ -8894,7 +9938,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {3}, number = {4}, pages = {321--327}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2662-8457}, doi = {10.1038/s43588-023-00424-3}, url = {https://www.nature.com/articles/s43588-023-00424-3}, @@ -8902,7 +9946,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, abstract = {Ab initio studies of magnetic superstructures are indispensable to research on emergent quantum materials, but are currently bottlenecked by the formidable computational cost. Here, to break this bottleneck, we have developed a deep equivariant neural network framework to represent the density functional theory Hamiltonian of magnetic materials for efficient electronic-structure calculation. A neural network architecture incorporating a priori knowledge of fundamental physical principles, especially the nearsightedness principle and the equivariance requirements of Euclidean and time-reversal symmetries (\$\$E(3)\textbackslash times \textbackslash\{I,\{\{\{\textbackslash mathcal\{T\}\}\}\}\textbackslash\}\$\$), is designed, which is critical to capture the subtle magnetic effects. Systematic experiments on spin-spiral, nanotube and moiré magnets were performed, making the challenging study of magnetic skyrmions feasible.}, issue = {4}, langid = {english}, - keywords = {\_tablet,AML,constrained DFT,DeepH,DFT,E(3),e3nn,ENN,equivariant,Heisenberg model,Jij,library,magnetic interactions,magnetic structure,magnetic supperlattice,magnetism,ML,ML-DFT,ML-ESM,MPNN,near-sightedness,OpenMX,PBE,prediction from magnetic configuration,prediction from structure,prediction of Hamiltonian matrix,prediction of Jij,prediction of magnetic moment,skyrmions,spin spiral,spin-dependent,transition metals,TRS,twisted bilayer,with-code,with-data}, + keywords = {\_tablet,AML,collinear,constrained DFT,DeepH,DFT,E(3),e3nn,ENN,equivariant,Heisenberg model,Jij,library,magnetic interactions,magnetic structure,magnetic supperlattice,magnetism,ML,ML-DFT,ML-ESM,MPNN,near-sightedness,non-collinear,OpenMX,PBE,prediction from magnetic configuration,prediction from structure,prediction of Hamiltonian matrix,prediction of Jij,prediction of magnetic moment,skyrmions,spin spiral,spin-dependent,transition metals,TRS,twisted bilayer,with-code,with-data}, file = {/Users/wasmer/Nextcloud/Zotero/Li et al_2023_Deep-learning electronic-structure calculation of magnetic superstructures.pdf} } @@ -8957,6 +10001,46 @@ Junqi Yin (Oak Ridge National Laboratory)}, file = {/Users/wasmer/Zotero/storage/F46FQTHF/Liechtenstein et al. - 1987 - Local spin density functional approach to the theo.pdf;/Users/wasmer/Zotero/storage/23L5VB4T/0304885387907219.html} } +@article{liExploitingRedundancyLarge2023, + title = {Exploiting Redundancy in Large Materials Datasets for Efficient Machine Learning with Less Data}, + author = {Li, Kangming and Persaud, Daniel and Choudhary, Kamal and DeCost, Brian and Greenwood, Michael and Hattrick-Simpers, Jason}, + date = {2023-11-10}, + journaltitle = {Nature Communications}, + shortjournal = {Nat Commun}, + volume = {14}, + number = {1}, + pages = {7283}, + publisher = {Nature Publishing Group}, + issn = {2041-1723}, + doi = {10.1038/s41467-023-42992-y}, + url = {https://www.nature.com/articles/s41467-023-42992-y}, + urldate = {2023-12-05}, + abstract = {Extensive efforts to gather materials data have largely overlooked potential data redundancy. In this study, we present evidence of a significant degree of redundancy across multiple large datasets for various material properties, by revealing that up to 95\% of data can be safely removed from machine learning training with little impact on in-distribution prediction performance. The redundant data is related to over-represented material types and does not mitigate the severe performance degradation on out-of-distribution samples. In addition, we show that uncertainty-based active learning algorithms can construct much smaller but equally informative datasets. We discuss the effectiveness of informative data in improving prediction performance and robustness and provide insights into efficient data acquisition and machine learning training. This work challenges the “bigger is better†mentality and calls for attention to the information richness of materials data rather than a narrow emphasis on data volume.}, + issue = {1}, + langid = {english}, + keywords = {/unread,AML,best practices,data redundancy,materials database,materials informatics,ML,small data,todo-tagging,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Li et al_2023_Exploiting redundancy in large materials datasets for efficient machine.pdf} +} + +@article{liInitioArtificialIntelligence2023, + title = {Ab Initio Artificial Intelligence: {{Future}} Research of {{Materials Genome Initiative}}}, + shorttitle = {Ab Initio Artificial Intelligence}, + author = {Li, He and Xu, Yong and Duan, Wenhui}, + date = {2023}, + journaltitle = {Materials Genome Engineering Advances}, + volume = {1}, + number = {2}, + pages = {e16}, + issn = {2940-9497}, + doi = {10.1002/mgea.16}, + url = {https://onlinelibrary.wiley.com/doi/abs/10.1002/mgea.16}, + urldate = {2024-04-18}, + abstract = {The marriage of artificial intelligence (AI) and Materials Genome Initiative (MGI) could profoundly change the landscape of modern materials research, leading to a new paradigm of data-driven and AI-driven materials discovery. In this perspective, we will give an overview on the central role of AI in the MGI research. In particular, an emerging research field of ab initio AI, which applies state-of-the-art AI techniques to help solve bottleneck problems of ab initio computation, will be introduced. The development of ab initio AI will greatly accelerate high-throughput computation, promote the construction of large materials database, and open new opportunities for future research of MGI.}, + langid = {english}, + keywords = {\_tablet,AI4Science,AML,autoencoder,DFT,diffusion model,DM21,Electronic structure,FermiNet,inverse design,materials discovery,Materials genome initiative,MD,ML,ML-DFA,ML-DFT,ML-ESM,perspective,QMC,review,VAE}, + file = {/Users/wasmer/Nextcloud/Zotero/Li et al_2023_Ab initio artificial intelligence.pdf;/Users/wasmer/Zotero/storage/SH6A3KT6/mgea.html} +} + @article{liKohnShamEquationsRegularizer2021, title = {Kohn-{{Sham Equations}} as {{Regularizer}}: {{Building Prior Knowledge}} into {{Machine-Learned Physics}}}, shorttitle = {Kohn-{{Sham Equations}} as {{Regularizer}}}, @@ -8967,12 +10051,12 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {126}, number = {3}, pages = {036401}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevLett.126.036401}, url = {https://link.aps.org/doi/10.1103/PhysRevLett.126.036401}, urldate = {2022-07-07}, abstract = {Including prior knowledge is important for effective machine learning models in physics and is usually achieved by explicitly adding loss terms or constraints on model architectures. Prior knowledge embedded in the physics computation itself rarely draws attention. We show that solving the Kohn-Sham equations when training neural networks for the exchange-correlation functional provides an implicit regularization that greatly improves generalization. Two separations suffice for learning the entire one-dimensional H2 dissociation curve within chemical accuracy, including the strongly correlated region. Our models also generalize to unseen types of molecules and overcome self-interaction error.}, - keywords = {autodiff,CNN,DFT,JAX,JAX-DFT,Kohn-Sham regularizer,ML,ML-DFA,ML-DFT,ML-ESM,molecules,original publication,prediction from density,regularization,RNN}, + keywords = {autodiff,CNN,DFT,JAX,JAX-DFT,Kohn-Sham regularizer,library,ML,ML-DFA,ML-DFT,ML-ESM,molecules,original publication,prediction from density,prediction of Exc,regularization,RNN,with-code}, file = {/Users/wasmer/Nextcloud/Zotero/Li et al_2021_Kohn-Sham Equations as Regularizer.pdf;/Users/wasmer/Zotero/storage/CAFV9KV8/Li et al_2021_Kohn-Sham Equations as Regularizer.pdf;/Users/wasmer/Zotero/storage/QQA9HJV3/Li et al. - 2021 - Kohn-Sham Equations as Regularizer Building Prior.gif;/Users/wasmer/Zotero/storage/2MCFRSEU/PhysRevLett.126.html} } @@ -8985,7 +10069,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {140}, number = {17}, pages = {174712}, - publisher = {{American Institute of Physics}}, + publisher = {American Institute of Physics}, issn = {0021-9606}, doi = {10.1063/1.4874158}, url = {https://aip.scitation.org/doi/10.1063/1.4874158}, @@ -9016,7 +10100,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, shorttitle = {Theoretical Prediction of Properties of Atomistic Systems}, author = {Lindmaa, Alexander}, date = {2017}, - publisher = {{Linköping University Electronic Press}}, + publisher = {Linköping University Electronic Press}, url = {http://urn.kb.se/resolve?urn=urn:nbn:se:liu:diva-139767}, urldate = {2021-06-26}, abstract = {The prediction of ground state properties of atomistic systems is of vital importance in technological advances as well as in the physical sciences. Fundamentally, these predictions are based on a ...}, @@ -9041,6 +10125,22 @@ Junqi Yin (Oak Ridge National Laboratory)}, file = {/Users/wasmer/Nextcloud/Zotero/Lin et al_2023_Efficient Approximations of Complete Interatomic Potentials for Crystal.pdf;/Users/wasmer/Zotero/storage/E3N59FIA/2306.html} } +@online{liNeuralnetworkDensityFunctional2024, + title = {Neural-Network Density Functional Theory}, + author = {Li, Yang and Tang, Zechen and Chen, Zezhou and Sun, Minghui and Zhao, Boheng and Li, He and Tao, Honggeng and Yuan, Zilong and Duan, Wenhui and Xu, Yong}, + date = {2024-03-17}, + eprint = {2403.11287}, + eprinttype = {arxiv}, + eprintclass = {cond-mat, physics:physics}, + doi = {10.48550/arXiv.2403.11287}, + url = {http://arxiv.org/abs/2403.11287}, + urldate = {2024-04-18}, + abstract = {Deep-learning density functional theory (DFT) shows great promise to significantly accelerate material discovery and potentially revolutionize materials research, which demands a close combination between neural networks and DFT computation. However, current research in this field primarily relies on supervised learning, making the developments of neural networks and DFT isolated from each other. In this work, we present a theoretical framework of neural-network DFT, which unifies the optimization of neural networks with the variational computation of DFT, enabling physics-informed unsupervised learning. Moreover, we develop a differential DFT code incorporated with deep-learning DFT Hamiltonian, and introduce algorithms of automatic differentiation and backpropagation to DFT, demonstrating the concept of neural-network DFT. The advanced neural-network architecture not only surpasses conventional approaches in accuracy and efficiency, but offers a new paradigm for developing deep-learning DFT methods.}, + pubstate = {preprint}, + keywords = {/unread,\_tablet,Condensed Matter - Materials Science,Physics - Computational Physics}, + file = {/Users/wasmer/Nextcloud/Zotero/Li et al_2024_Neural-network density functional theory.pdf;/Users/wasmer/Zotero/storage/J4JJX4HR/2403.html} +} + @article{lingHighDimensionalMaterialsProcess2017, title = {High-{{Dimensional Materials}} and {{Process Optimization Using Data-Driven Experimental Design}} with {{Well-Calibrated Uncertainty Estimates}}}, author = {Ling, Julia and Hutchinson, Maxwell and Antono, Erin and Paradiso, Sean and Meredig, Bryce}, @@ -9067,7 +10167,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, journaltitle = {Acta Numerica}, volume = {28}, pages = {405--539}, - publisher = {{Cambridge University Press}}, + publisher = {Cambridge University Press}, issn = {0962-4929, 1474-0508}, doi = {10.1017/S0962492919000047}, url = {https://www.cambridge.org/core/journals/acta-numerica/article/numerical-methods-for-kohnsham-density-functional-theory/755DFB88349DD5F1EE1E360AD61661BF}, @@ -9117,7 +10217,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {118}, number = {15}, pages = {e2101344118}, - publisher = {{Proceedings of the National Academy of Sciences}}, + publisher = {Proceedings of the National Academy of Sciences}, doi = {10.1073/pnas.2101344118}, url = {https://www.pnas.org/doi/10.1073/pnas.2101344118}, urldate = {2022-07-08}, @@ -9151,7 +10251,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {28}, number = {35}, pages = {355501}, - publisher = {{IOP Publishing}}, + publisher = {IOP Publishing}, issn = {0953-8984}, doi = {10.1088/0953-8984/28/35/355501}, url = {https://dx.doi.org/10.1088/0953-8984/28/35/355501}, @@ -9189,7 +10289,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {121}, number = {38}, pages = {7273--7281}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, issn = {1089-5639}, doi = {10.1021/acs.jpca.7b07045}, url = {https://doi.org/10.1021/acs.jpca.7b07045}, @@ -9199,11 +10299,28 @@ Junqi Yin (Oak Ridge National Laboratory)}, file = {/Users/wasmer/Nextcloud/Zotero/Liu et al_2017_Improving the Performance of Long-Range-Corrected Exchange-Correlation.pdf;/Users/wasmer/Zotero/storage/76EWRKPT/acs.jpca.html} } +@online{liuKANKolmogorovArnoldNetworks2024, + title = {{{KAN}}: {{Kolmogorov-Arnold Networks}}}, + shorttitle = {{{KAN}}}, + author = {Liu, Ziming and Wang, Yixuan and Vaidya, Sachin and Ruehle, Fabian and Halverson, James and SoljaÄić, Marin and Hou, Thomas Y. and Tegmark, Max}, + date = {2024-05-02}, + eprint = {2404.19756}, + eprinttype = {arxiv}, + eprintclass = {cond-mat, stat}, + doi = {10.48550/arXiv.2404.19756}, + url = {http://arxiv.org/abs/2404.19756}, + urldate = {2024-05-05}, + abstract = {Inspired by the Kolmogorov-Arnold representation theorem, we propose Kolmogorov-Arnold Networks (KANs) as promising alternatives to Multi-Layer Perceptrons (MLPs). While MLPs have fixed activation functions on nodes ("neurons"), KANs have learnable activation functions on edges ("weights"). KANs have no linear weights at all -- every weight parameter is replaced by a univariate function parametrized as a spline. We show that this seemingly simple change makes KANs outperform MLPs in terms of accuracy and interpretability. For accuracy, much smaller KANs can achieve comparable or better accuracy than much larger MLPs in data fitting and PDE solving. Theoretically and empirically, KANs possess faster neural scaling laws than MLPs. For interpretability, KANs can be intuitively visualized and can easily interact with human users. Through two examples in mathematics and physics, KANs are shown to be useful collaborators helping scientists (re)discover mathematical and physical laws. In summary, KANs are promising alternatives for MLPs, opening opportunities for further improving today's deep learning models which rely heavily on MLPs.}, + pubstate = {preprint}, + keywords = {/unread,AI4Science,alternative approaches,alternative to MLP,Deep learning,General ML,hot topic,KAN,ML,original publication,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Liu et al_2024_KAN.pdf;/Users/wasmer/Zotero/storage/TUFV5YR7/2404.html} +} + @article{liuLargeScaleDataset2022, title = {Large Scale Dataset of Real Space Electronic Charge Density of Cubic Inorganic Materials from Density Functional Theory ({{DFT}}) Calculations}, author = {Liu, Yu}, date = {2022-02-14T11:52:45+00:00}, - publisher = {{figshare}}, + publisher = {figshare}, doi = {10.6084/m9.figshare.c.5368343.v1}, url = {https://springernature.figshare.com/collections/Large_scale_dataset_of_real_space_electronic_charge_density_of_cubic_inorganic_materials_from_density_functional_theory_DFT_calculations/5368343}, urldate = {2023-04-14}, @@ -9223,7 +10340,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, doi = {10.48550/arXiv.1912.13460}, url = {http://arxiv.org/abs/1912.13460}, urldate = {2023-09-19}, - abstract = {The development of machine learning sheds new light on the problem of statistical thermodynamics in multicomponent alloys. However, a data-driven approach to construct the effective Hamiltonian requires sufficiently large data sets, which is expensive to calculate with conventional density functional theory (DFT). To solve this problem, we propose to use the atomic local energy as the target variable, and harness the power of the linear-scaling DFT to accelerate the data generating process. Using the large amounts of DFT data sets, various complex models are devised and applied to learn the effective Hamiltonians of a range of refractory high entropy alloys (HEAs). The testing \$R\^2\$ scores of the effective pair interaction model are higher than 0.99, demonstrating that the pair interactions within the 6-th coordination shell provide an excellent description of the atomic local energies for all the four HEAs. This model is further improved by including nonlinear and multi-site interactions. In particular, the deep neural networks (DNNs) built directly in the local configuration space (therefore no hand-crafted features) are employed to model the effective Hamiltonian. The results demonstrate that neural networks are promising for the modeling of effective Hamiltonian due to its excellent representation power.}, + abstract = {The development of machine learning sheds new light on the problem of statistical thermodynamics in multicomponent alloys. However, a data-driven approach to construct the effective Hamiltonian requires sufficiently large data sets, which is expensive to calculate with conventional density functional theory (DFT). To solve this problem, we propose to use the atomic local energy as the target variable, and harness the power of the linear-scaling DFT to accelerate the data generating process. Using the large amounts of DFT data sets, various complex models are devised and applied to learn the effective Hamiltonians of a range of refractory high entropy alloys (HEAs). The testing \$R\textasciicircum 2\$ scores of the effective pair interaction model are higher than 0.99, demonstrating that the pair interactions within the 6-th coordination shell provide an excellent description of the atomic local energies for all the four HEAs. This model is further improved by including nonlinear and multi-site interactions. In particular, the deep neural networks (DNNs) built directly in the local configuration space (therefore no hand-crafted features) are employed to model the effective Hamiltonian. The results demonstrate that neural networks are promising for the modeling of effective Hamiltonian due to its excellent representation power.}, pubstate = {preprint}, keywords = {/unread,todo-tagging}, file = {/Users/wasmer/Nextcloud/Zotero/Liu et al_2019_Machine Learning the Effective Hamiltonian in High Entropy Alloys.pdf;/Users/wasmer/Zotero/storage/GU92DRKM/1912.html} @@ -9258,7 +10375,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, doi = {10.48550/arXiv.1906.02889}, url = {http://arxiv.org/abs/1906.02889}, urldate = {2023-09-19}, - abstract = {The development of machine learning sheds new light on the traditionally complicated problem of thermodynamics in multicomponent alloys. Successful application of such a method, however, strongly depends on the quality of the data and model. Here we propose a scheme to improve the representativeness of the data by utilizing the short-range order (SRO) parameters to survey the configuration space. Using the improved data, a pair interaction model is trained for the NbMoTaW high entropy alloy using linear regression. Benefiting from the physics incorporated into the model, the learned effective Hamiltonian demonstrates excellent predictability over the whole configuration space. By including pair interactions within the 6th nearest-neighbor shell, this model achieves an \$R\^2\$ testing score of 0.997 and root mean square error of 0.43 meV. We further perform a detailed analysis on the effects of training data, testing data, and model parameters. The results reveal the vital importance of representative data and physical model. On the other hand, we also examined the performance neural networks, which is found to demonstrate a strong tendency to overfit the data.}, + abstract = {The development of machine learning sheds new light on the traditionally complicated problem of thermodynamics in multicomponent alloys. Successful application of such a method, however, strongly depends on the quality of the data and model. Here we propose a scheme to improve the representativeness of the data by utilizing the short-range order (SRO) parameters to survey the configuration space. Using the improved data, a pair interaction model is trained for the NbMoTaW high entropy alloy using linear regression. Benefiting from the physics incorporated into the model, the learned effective Hamiltonian demonstrates excellent predictability over the whole configuration space. By including pair interactions within the 6th nearest-neighbor shell, this model achieves an \$R\textasciicircum 2\$ testing score of 0.997 and root mean square error of 0.43 meV. We further perform a detailed analysis on the effects of training data, testing data, and model parameters. The results reveal the vital importance of representative data and physical model. On the other hand, we also examined the performance neural networks, which is found to demonstrate a strong tendency to overfit the data.}, pubstate = {preprint}, keywords = {/unread,todo-tagging}, file = {/Users/wasmer/Nextcloud/Zotero/Liu et al_2019_Machine learning modeling of high entropy alloy.pdf;/Users/wasmer/Zotero/storage/RIJJA86L/1906.html} @@ -9297,7 +10414,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, urldate = {2023-06-15}, abstract = {Topological insulators (TIs) provide intriguing prospects for the future of spintronics due to their large spin–orbit coupling and dissipationless, counter-propagating conduction channels in the surface state. The combination of topological properties and magnetic order can lead to new quantum states including the quantum anomalous Hall effect that was first experimentally realized in Cr-doped (Bi,Sb)2Te3 films. Since magnetic doping can introduce detrimental effects, requiring very low operational temperatures, alternative approaches are explored. Proximity coupling to magnetically ordered systems is an obvious option, with the prospect to raise the temperature for observing the various quantum effects. Here, an overview of proximity coupling and interfacial effects in TI heterostructures is presented, which provides a versatile materials platform for tuning the magnetic and topological properties of these exciting materials. An introduction is first given to the heterostructure growth by molecular beam epitaxy and suitable structural, electronic, and magnetic characterization techniques. Going beyond transition-metal-doped and undoped TI heterostructures, examples of heterostructures are discussed, including rare-earth-doped TIs, magnetic insulators, and antiferromagnets, which lead to exotic phenomena such as skyrmions and exchange bias. Finally, an outlook on novel heterostructures such as intrinsic magnetic TIs and systems including 2D materials is given.}, langid = {english}, - keywords = {\_tablet,/unread,Hall effect,Hall QAHE,magnetic TIs,review,topological insulator}, + keywords = {/unread,\_tablet,Hall effect,Hall QAHE,magnetic TIs,review,topological insulator}, file = {/Users/wasmer/Nextcloud/Zotero/Liu_Hesjedal_2021_Magnetic Topological Insulator Heterostructures.pdf;/Users/wasmer/Zotero/storage/VEP2MG97/adma.html} } @@ -9329,7 +10446,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {124}, number = {32}, pages = {17811--17818}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, issn = {1932-7447}, doi = {10.1021/acs.jpcc.0c04225}, url = {https://doi.org/10.1021/acs.jpcc.0c04225}, @@ -9344,7 +10461,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, title = {Overcoming {{Data Scarcity}} in {{Deep Learning}} of {{Scientific Problems}}}, author = {Loh, Charlotte Chang Le}, date = {2021-09}, - institution = {{Massachusetts Institute of Technology}}, + institution = {Massachusetts Institute of Technology}, url = {https://dspace.mit.edu/handle/1721.1/140165}, urldate = {2022-05-18}, abstract = {Data-driven approaches such as machine learning have been increasingly applied to the natural sciences, e.g. for property prediction and optimization or material discovery. An essential criteria to ensure the success of such methods is the need for extensive amounts of labeled data, making it unfeasible for data-scarce problems where labeled data generation is computationally expensive, or labour and time intensive. Here, I introduce surrogate and invariance- boosted contrastive learning (SIB-CL), a deep learning framework which overcomes data-scarcity by incorporating three “inexpensive" and easily obtainable auxiliary information. Specifically, these are: 1) abundant unlabeled data, 2) prior knowledge of known symmetries or invariances of the problem and 3) a surrogate dataset obtained at near-zero cost either from simplification or approximation. I demonstrate the effectiveness and generality of SIB-CL on various scientific problems, for example, the prediction of the density-of-states of 2D photonic crystals and solving the time-independent Schrödinger equation of 3D random potentials. SIB-CL is shown to provide orders of magnitude savings on the amount of labeled data needed when compared to conventional deep learning techniques, offering opportunities to apply data-driven methods even to data-scarce problems.}, @@ -9362,7 +10479,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {9}, number = {4}, pages = {169--174}, - publisher = {{Taylor \& Francis}}, + publisher = {Taylor \& Francis}, issn = {null}, doi = {10.1080/21663831.2020.1863876}, url = {https://doi.org/10.1080/21663831.2020.1863876}, @@ -9399,7 +10516,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {7}, number = {4}, pages = {045802}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevMaterials.7.045802}, url = {https://link.aps.org/doi/10.1103/PhysRevMaterials.7.045802}, urldate = {2023-05-05}, @@ -9424,16 +10541,13 @@ Junqi Yin (Oak Ridge National Laboratory)}, file = {/Users/wasmer/Nextcloud/Zotero/Lopanitsyna et al_2022_Modeling high-entropy transition-metal alloys with alchemical compression.pdf;/Users/wasmer/Nextcloud/Zotero/Lopanitsyna et al_2022_Modeling high-entropy transition-metal alloys with alchemical compression2.pdf;/Users/wasmer/Zotero/storage/QNGQ9AQD/2212.html} } -@book{lounisTheoryMagneticTransition2007, +@thesis{lounisTheoryMagneticTransition2007, title = {Theory of {{Magnetic Transition Metal Nanoclusters}} on {{Surfaces}}}, author = {Lounis, Samir}, date = {2007}, - series = {Schriften Des {{Forschungszentrums Jülich}}. {{Reihe Materie}} Und {{Material}} / {{Matter}} and {{Materials}}}, - number = {41}, - publisher = {{Forschungszentrum Jülich GmbH Zentralbibliothek, Verlag}}, - location = {{Jülich}}, + institution = {Forschungszentrum Jülich GmbH Zentralbibliothek, Verlag}, + location = {Jülich}, abstract = {The question how magnetism behaves when the dimension of materials is reduced to increasingly smaller sizes has attracted much research and led to the development of the field of magnetic nanostructures. This research has been fueled by the technological potential of these systems for the field of high-density magnetic storage media and has been accelerated by the many novel experimental methods and techniques developed exhibiting atomic resolution. This thesis is motivated by the quest for the understanding and the exploration of complex magnetism provided by atomic scale magnetic clusters deposited on surfaces or embedded in the bulk. The nature of magnetism in these systems can be very rich, in that the properties depend on the atomic species, the cluster size, shape and symmetry or choice of the substrate. Small variations of the cluster parameter may change the properties dramatically. Particularly rich and particularly challenging for experiment and theory is the behavior of clusters with competing magnetic interactions either between the cluster atoms or between the cluster and the substrate. In both cases magnetic frustration can lead to non-collinear magnetic structures for which the magnetic quantization axis changes from atom to atom. This thesis sheds light onto these systems from a theoretical perspective. Use is made of the density functional theory (DFT), the most successful material specific theory for describing electronic and derived properties from first-principles. Acting within this framework, we have developed and implemented the treatment of non-collinear magnetism into the Jülich version of the full-potential Korringa-Kohn-Rostoker Green Function (KKR-GF) method. The KKR-GF method provides several advantages compared to other first-principles methods. Based on solving the Dyson equation it allows an elegant treatment of non-periodic systems such as impurities and clusters in bulk or on surfaces. Electronic, magnetic properties and the observables provided by experimental techniques such as x-ray, scanning tunneling microscopy and spectroscopy can be accessed with the KKR-GF method. Firstly, the method was applied to 3\$\textbackslash textit\{d\}\$ transition-metal clusters on different ferromagnetic surfaces. Different types of magnetic clusters where selected. Clusters of Fe, Co, Ni atoms are ferromagnetic and thus magnetically collinear. In order to investigate magnetic frustration due to competing interactions within the ad-cluster we considered a (001) oriented surface of \$\textbackslash textit\{fcc\}\$ metals, a topology which usually does not lead to non-collinear magnetism. We tuned the strength of the magnetic coupling between the ad-clusters and the ferromagnetic surface by varying the substrate from the case of Ni(001) with a rather weak hybridization of the Ni \$\textbackslash textit\{d\}\$-states with the adatom \$\textbackslash textit\{d\}\$-states to the case of Fe\$\_\{3ML\}\$/Cu(001) with a much stronger hybridization due to the larger extend of the Fe wavefunctions. On Ni(001), the interaction between the Cr- as well as the Mn-dimer adatoms is of antiferromagnetic nature, which is in competition with the interaction with the substrate atoms. If we allow the magnetism to be non-collinear, the moments rotate such the Cr-(Mn) adatom moments are aligned antiparallel to each other and are basically perpendicular to the substrate moments. However, the weak AF(FM) interaction with the substrate causes a slight tilting towards the substrate, leading to an angle of 94.2â—¦(72.6â—¦) instead of 90â—¦. After performing total energy calculations we find that for Cr-dimer the ground state is collinear whereas the Mn-dimer prefers the non-collinear configuration as ground state. The Heisenberg model is shown [...]}, - isbn = {978-3-89336-501-2}, langid = {english}, pagetotal = {189}, keywords = {\_tablet,Dissertation (Univ.),Hochschulschrift,juKKR,KKR,magnetism,PGI-1/IAS-1,thesis}, @@ -9469,7 +10583,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {21}, number = {1}, pages = {75--91}, - publisher = {{物ç†å¦æŠ¥}}, + publisher = {物ç†å¦æŠ¥}, issn = {1000-3290}, doi = {10.7498/aps.21.75}, url = {https://wulixb.iphy.ac.cn/en/article/doi/10.7498/aps.21.75}, @@ -9489,7 +10603,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {6}, number = {11}, pages = {761--781}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2397-3358}, doi = {10.1038/s41570-022-00424-3}, url = {https://www.nature.com/articles/s41570-022-00424-3}, @@ -9514,7 +10628,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, urldate = {2023-02-23}, abstract = {The advent of computational statistical disciplines, such as machine learning, is leading to a paradigm shift in the way we conceive the design of new compounds. Today computational science does not only provide a sound understanding of experiments, but also can directly design the best compound for specific applications. This approach, known as reverse engineering, requires the construction of models able to efficiently predict continuous structure-property maps. Here we show that reverse engineering can be used to tune the magnetic properties of a single-ion molecular magnet in an automated intelligent fashion. We design a machine learning model to predict both the energy and magnetic properties as function of the chemical structure. Then, a particle-swarm optimization algorithm is used to explore the conformational landscapes in the search for new molecular structures leading to an enhanced magnetic anisotropy. We find that a 5\% change in one of the coordination angles leads to a 50\% increase in the anisotropy. Our approach paves the way for a machine-learning-driven exploration of the chemical space of general classes of magnetic materials. Most importantly, it can be applied to any structure-property relation and offers an effective way to automatically generate new materials with target properties starting from the knowledge of previously synthesized ones.}, pubstate = {preprint}, - keywords = {\_tablet,/unread,Condensed Matter - Materials Science,Physics - Computational Physics}, + keywords = {/unread,\_tablet,Condensed Matter - Materials Science,Physics - Computational Physics}, file = {/Users/wasmer/Nextcloud/Zotero/Lunghi_Sanvito_2019_Surfing multiple conformation-property landscapes via machine learning.pdf;/Users/wasmer/Zotero/storage/FQSQYUBP/1911.html} } @@ -9526,8 +10640,8 @@ Junqi Yin (Oak Ridge National Laboratory)}, date = {2022}, series = {Communications in {{Computer}} and {{Information Science}}}, pages = {79--98}, - publisher = {{Springer International Publishing}}, - location = {{Cham}}, + publisher = {Springer International Publishing}, + location = {Cham}, doi = {10.1007/978-3-030-96498-6_5}, abstract = {We use graph convolutional neural networks (GCNNs) to produce fast and accurate predictions of the total energy of solid solution binary alloys. GCNNs allow us to abstract the lattice structure of a solid material as a graph, whereby atoms are modeled as nodes and metallic bonds as edges. This representation naturally incorporates information about the structure of the material, thereby eliminating the need for computationally expensive data pre-processing which would be required with standard neural network (NN) approaches. We train GCNNs on ab-initio density functional theory (DFT) for copper-gold (CuAu) and iron-platinum (FePt) data that has been generated by running the LSMS-3 code, which implements a locally self-consistent multiple scattering method, on OLCF supercomputers Titan and Summit. GCNN outperforms the ab-initio DFT simulation by orders of magnitude in terms of computational time to produce the estimate of the total energy for a given atomic configuration of the lattice structure. We compare the predictive performance of GCNN models against a standard NN such as dense feedforward multi-layer perceptron (MLP) by using the root-mean-squared errors to quantify the predictive quality of the deep learning (DL) models. We find that the attainable accuracy of GCNNs is at least an order of magnitude better than that of the MLP.}, isbn = {978-3-030-96498-6}, @@ -9561,7 +10675,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, url = {https://www.osti.gov/biblio/1826659}, urldate = {2023-09-19}, abstract = {Distributed PyTorch implementation of multi-headed graph convolutional neural networks}, - organization = {{Oak Ridge National Laboratory (ORNL), Oak Ridge, TN (United States)}}, + organization = {Oak Ridge National Laboratory (ORNL), Oak Ridge, TN (United States)}, keywords = {/unread,AML,GNN,HydraGNN,library,ML,multi-task learning,PyTorch} } @@ -9607,7 +10721,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {7}, number = {1}, pages = {1--12}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2057-3960}, doi = {10.1038/s41524-021-00559-9}, url = {https://www.nature.com/articles/s41524-021-00559-9}, @@ -9628,7 +10742,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {24}, number = {47}, pages = {28700--28781}, - publisher = {{Royal Society of Chemistry}}, + publisher = {Royal Society of Chemistry}, doi = {10.1039/D2CP02827A}, url = {https://pubs.rsc.org/en/content/articlelanding/2022/cp/d2cp02827a}, urldate = {2023-06-30}, @@ -9646,7 +10760,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, urldate = {2023-07-01}, abstract = {Self-driving labs (SDLs) leverage combinations of artificial intelligence, automation, and advanced computing to accelerate scientific discovery. The promise of this field has given rise to a rich community of passionate scientists, engineers, and social scientists, as evidenced by the development of the Acceleration Consortium and recent Accelerate Conference. Despite its strengths, this rapidly developing field presents numerous opportunities for growth, challenges to overcome, and potential risks of which to remain aware. This community perspective builds on a discourse instantiated during the first Accelerate Conference, and looks to the future of self-driving labs with a tempered optimism. Incorporating input from academia, government, and industry, we briefly describe the current status of self-driving labs, then turn our attention to barriers, opportunities, and a vision for what is possible. Our field is delivering solutions in technology and infrastructure, artificial intelligence and knowledge generation, and education and workforce development. In the spirit of community, we intend for this work to foster discussion and drive best practices as our field grows.}, langid = {english}, - organization = {{arXiv.org}}, + organization = {arXiv.org}, keywords = {todo-tagging}, file = {/Users/wasmer/Nextcloud/Zotero/Maffettone et al_2023_What is missing in autonomous discovery.pdf} } @@ -9660,7 +10774,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {1}, number = {6}, pages = {353--360}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, doi = {10.1021/acsearthspacechem.7b00061}, url = {https://doi.org/10.1021/acsearthspacechem.7b00061}, urldate = {2021-10-21}, @@ -9669,21 +10783,12 @@ Junqi Yin (Oak Ridge National Laboratory)}, file = {/Users/wasmer/Nextcloud/Zotero/Magalhães et al_2017_Density Functional Theory Calculation of the Absorption Properties of Brown.pdf;/Users/wasmer/Zotero/storage/ZRG83Z75/acsearthspacechem.html} } -@book{MagnetismElectronicStructure, - title = {Magnetism and the {{Electronic Structure}} of {{Crystals}}}, - url = {https://link.springer.com/book/10.1007/978-3-642-84411-9}, - urldate = {2022-06-18}, - langid = {english}, - keywords = {\_tablet,condensed matter,defects,DFT,magnetism}, - file = {/Users/wasmer/Nextcloud/Zotero/Magnetism and the Electronic Structure of Crystals.pdf;/Users/wasmer/Zotero/storage/QVJRNHRA/978-3-642-84411-9.html} -} - @book{majlisQuantumTheoryMagnetism2007, title = {The {{Quantum Theory}} of {{Magnetism}}}, author = {Majlis, Norberto}, date = {2007-09}, edition = {2}, - publisher = {{WORLD SCIENTIFIC}}, + publisher = {WORLD SCIENTIFIC}, doi = {10.1142/6094}, url = {http://www.worldscientific.com/worldscibooks/10.1142/6094}, urldate = {2022-06-18}, @@ -9718,7 +10823,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {9}, number = {1}, pages = {1--6}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2057-3960}, doi = {10.1038/s41524-023-01028-1}, url = {https://www.nature.com/articles/s41524-023-01028-1}, @@ -9739,7 +10844,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {9}, number = {1}, pages = {1--6}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2057-3960}, doi = {10.1038/s41524-023-01028-1}, url = {https://www.nature.com/articles/s41524-023-01028-1}, @@ -9773,7 +10878,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {601}, number = {7892}, pages = {188--190}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, doi = {10.1038/d41586-022-00018-5}, url = {https://www.nature.com/articles/d41586-022-00018-5}, urldate = {2023-08-21}, @@ -9781,7 +10886,9 @@ Junqi Yin (Oak Ridge National Laboratory)}, issue = {7892}, langid = {english}, keywords = {/unread,Institutions,Pasteur \& ISI,Research management,todo-tagging}, - annotation = {Bandiera\_abtest: a Cg\_type: Comment Subject\_term: Research management, Institutions}, + annotation = {Bandiera\_abtest: a\\ +Cg\_type: Comment\\ +Subject\_term: Research management, Institutions}, file = {/Users/wasmer/Nextcloud/Zotero/Marblestone et al_2022_Unblock research bottlenecks with non-profit start-ups.pdf;/Users/wasmer/Zotero/storage/95Z9E64E/d41586-022-00018-5.html} } @@ -9794,7 +10901,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {12}, number = {1}, pages = {344}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2041-1723}, doi = {10.1038/s41467-020-20471-y}, url = {https://www.nature.com/articles/s41467-020-20471-y}, @@ -9803,7 +10910,12 @@ Junqi Yin (Oak Ridge National Laboratory)}, issue = {1}, langid = {english}, keywords = {AML,KDFA,library,long-range interaction,ML,ML-DFA,ML-DFT,ML-ESM,prediction of Exc}, - annotation = {Bandiera\_abtest: a Cc\_license\_type: cc\_by Cg\_type: Nature Research Journals Primary\_atype: Research Subject\_term: Computational chemistry;Density functional theory;Method development;Molecular dynamics Subject\_term\_id: computational-chemistry;density-functional-theory;method-development;molecular-dynamics}, + annotation = {Bandiera\_abtest: a\\ +Cc\_license\_type: cc\_by\\ +Cg\_type: Nature Research Journals\\ +Primary\_atype: Research\\ +Subject\_term: Computational chemistry;Density functional theory;Method development;Molecular dynamics\\ +Subject\_term\_id: computational-chemistry;density-functional-theory;method-development;molecular-dynamics}, file = {/Users/wasmer/Nextcloud/Zotero/Margraf_Reuter_2021_Pure non-local machine-learned density functional theory for electron.pdf;/Users/wasmer/Zotero/storage/RCFG2NBC/s41467-020-20471-y.html} } @@ -9824,20 +10936,38 @@ Junqi Yin (Oak Ridge National Laboratory)}, file = {/Users/wasmer/Zotero/storage/LIPPS6I7/Margraf_2023_Science-Driven Atomistic Machine Learning.pdf;/Users/wasmer/Zotero/storage/V3VTFITJ/ange.html} } +@article{margrafScienceDrivenAtomisticMachine2023a, + title = {Science-{{Driven Atomistic Machine Learning}}}, + author = {Margraf, Johannes T.}, + date = {2023}, + journaltitle = {Angewandte Chemie International Edition}, + volume = {62}, + number = {26}, + pages = {e202219170}, + issn = {1521-3773}, + doi = {10.1002/anie.202219170}, + url = {https://onlinelibrary.wiley.com/doi/abs/10.1002/anie.202219170}, + urldate = {2024-04-07}, + abstract = {Machine learning (ML) algorithms are currently emerging as powerful tools in all areas of science. Conventionally, ML is understood as a fundamentally data-driven endeavour. Unfortunately, large well-curated databases are sparse in chemistry. In this contribution, I therefore review science-driven ML approaches which do not rely on “big dataâ€, focusing on the atomistic modelling of materials and molecules. In this context, the term science-driven refers to approaches that begin with a scientific question and then ask what training data and model design choices are appropriate. As key features of science-driven ML, the automated and purpose-driven collection of data and the use of chemical and physical priors to achieve high data-efficiency are discussed. Furthermore, the importance of appropriate model evaluation and error estimation is emphasized.}, + langid = {english}, + keywords = {\_tablet,active learning,all-electron,AML,body-order,data-driven,database generation,delta learning,equivariant,inductive bias,iterative learning,iterative learning scheme,MACE,ML,ML-DFA,ML-DFT,ML-ESM,model evaluation,physical prior,physics-informed ML,prediction of electron density,review,review-of-AML,science-driven,uncertainty quantification}, + file = {/Users/wasmer/Nextcloud/Zotero/Margraf_2023_Science-Driven Atomistic Machine Learning.pdf;/Users/wasmer/Zotero/storage/5BDTDNS9/anie.html} +} + @book{martinElectronicStructureBasic2020, title = {Electronic {{Structure}}: {{Basic Theory}} and {{Practical Methods}}}, shorttitle = {Electronic {{Structure}}}, author = {Martin, Richard M.}, date = {2020}, edition = {2}, - publisher = {{Cambridge University Press}}, - location = {{Cambridge}}, + publisher = {Cambridge University Press}, + location = {Cambridge}, doi = {10.1017/9781108555586}, url = {https://www.cambridge.org/core/books/electronic-structure/ED0FF348536BFFE8899627C8F78FEE6A}, urldate = {2023-07-04}, abstract = {The study of electronic structure of materials is at a momentous stage, with new computational methods and advances in basic theory. Many properties of materials can be determined from the fundamental equations, and electronic structure theory is now an integral part of research in physics, chemistry, materials science and other fields. This book provides a unified exposition of the theory and methods, with emphasis on understanding each essential component. New in the second edition are recent advances in density functional theory, an introduction to Berry phases and topological insulators explained in terms of elementary band theory, and many new examples of applications. Graduate students and research scientists will find careful explanations with references to original papers, pertinent reviews, and accessible books. Each chapter includes a short list of the most relevant works and exercises that reveal salient points and challenge the reader.}, isbn = {978-1-108-42990-0}, - keywords = {\_tablet,/unread}, + keywords = {\_tablet}, file = {/Users/wasmer/Nextcloud/Zotero/false;/Users/wasmer/Nextcloud/Zotero/Martin_2020_Electronic Structure.pdf;/Users/wasmer/Zotero/storage/PUPKBCZR/ED0FF348536BFFE8899627C8F78FEE6A.html} } @@ -9850,7 +10980,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {107}, number = {3}, pages = {035432}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevB.107.035432}, url = {https://link.aps.org/doi/10.1103/PhysRevB.107.035432}, urldate = {2023-09-20}, @@ -9871,17 +11001,35 @@ Junqi Yin (Oak Ridge National Laboratory)}, urldate = {2023-09-20}, abstract = {We propose a method to determine the magnetic exchange interaction and on-site anisotropy tensors of extended Heisenberg spin models from density functional theory including relativistic effects. The method is based on the Liechtenstein-Katsnelson-Antropov-Gubanov torque formalism, whereby energy variations upon infinitesimal rotations are performed. We assume that the Kohn-Sham Hamiltonian is expanded in a non-orthogonal basis set of pseudo-atomic orbitals. We define local operators that are both hermitian and satisfy relevant sum rules. We demonstrate that in the presence of spin-orbit coupling a correct mapping from the density functional total energy to a spin model that relies on the rotation of the exchange field part of the Hamiltonian can not be accounted for by transforming the full Hamiltonian. We derive a set of sum rules that pose stringent validity tests on any specific calculation. We showcase the flexibility and accuracy of the method by computing the exchange and anisotropy tensors of both well-studied magnetic nanostructures and of recently synthesized two-dimensional magnets. Specifically, we benchmark our approach against the established Korringa-Kohn-Rostoker Green's function method and show that they agree well. Finally, we demonstrate how the application of biaxial strain on the two-dimensional magnet T-CrTe2 can trigger a magnetic phase transition.}, pubstate = {preprint}, - keywords = {/unread,Budapest KKR group,DFT,GF2023 workshop,Jij,SIESTA,todo-tagging}, + keywords = {Budapest KKR group,DFT,GF2023 workshop,Jij,SIESTA,todo-tagging}, file = {/Users/wasmer/Nextcloud/Zotero/MartÃnez-Carracedo et al_2023_Relativistic magnetic interactions from non-orthogonal basis sets.pdf;/Users/wasmer/Zotero/storage/8KP4SZA4/2309.html} } +@article{martinez-carracedoRelativisticMagneticInteractions2023a, + title = {Relativistic Magnetic Interactions from Nonorthogonal Basis Sets}, + author = {MartÃnez-Carracedo, Gabriel and Oroszlány, László and GarcÃa-Fuente, Amador and Nyári, Bendegúz and Udvardi, László and Szunyogh, László and Ferrer, Jaime}, + date = {2023-12-18}, + journaltitle = {Physical Review B}, + shortjournal = {Phys. Rev. B}, + volume = {108}, + number = {21}, + pages = {214418}, + publisher = {American Physical Society}, + doi = {10.1103/PhysRevB.108.214418}, + url = {https://link.aps.org/doi/10.1103/PhysRevB.108.214418}, + urldate = {2024-01-01}, + abstract = {We propose a method to determine the magnetic exchange interaction and onsite anisotropy tensors of extended Heisenberg spin models from density functional theory including relativistic effects. The method is based on the Liechtenstein-Katsnelson-Antropov-Gubanov torque formalism, whereby energy variations upon infinitesimal rotations are performed. We assume that the Kohn-Sham Hamiltonian is expanded in a nonorthogonal basis set of pseudoatomic orbitals. We define local operators that are both Hermitian and satisfy relevant sum rules. We demonstrate that in the presence of spin-orbit coupling a correct mapping from the density functional total energy to a spin model that relies on the rotation of the exchange field part of the Hamiltonian can not be accounted for by transforming the full Hamiltonian. We derive a set of sum rules that pose stringent validity tests on any specific calculation. We showcase the flexibility and accuracy of the method by computing the exchange and anisotropy tensors of both well-studied magnetic nanostructures and of recently synthesized two-dimensional magnets. Specifically, we benchmark our approach against the established Korringa-Kohn-Rostoker Green's function method and show that they agree well. Finally, we demonstrate how the application of biaxial strain on the two-dimensional magnet T−CrTe2 can trigger a magnetic phase transition.}, + keywords = {/unread,Budapest KKR group,DFT,GF2023 workshop,Jij,SIESTA,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/MartÃnez-Carracedo et al_2023_Relativistic magnetic interactions from nonorthogonal basis sets.pdf;/Users/wasmer/Zotero/storage/EG37HM8T/PhysRevB.108.html} +} + @book{martinInteractingElectronsTheory2016, title = {Interacting {{Electrons}}: {{Theory}} and {{Computational Approaches}}}, shorttitle = {Interacting {{Electrons}}}, author = {Martin, Richard M. and Reining, Lucia and Ceperley, David M.}, date = {2016}, - publisher = {{Cambridge University Press}}, - location = {{Cambridge}}, + publisher = {Cambridge University Press}, + location = {Cambridge}, doi = {10.1017/CBO9781139050807}, url = {https://www.cambridge.org/core/books/interacting-electrons/4317C43D0531C900920E83DD4632CFE9}, urldate = {2023-07-04}, @@ -9907,7 +11055,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {20}, number = {6}, pages = {736--749}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {1476-4660}, doi = {10.1038/s41563-021-01013-3}, url = {https://www.nature.com/articles/s41563-021-01013-3}, @@ -9915,7 +11063,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, abstract = {The accuracy and efficiency of electronic-structure methods to understand, predict and design the properties of materials has driven a new paradigm in research. Simulations can greatly accelerate the identification, characterization and optimization of materials, with this acceleration driven by continuous progress in theory, algorithms and hardware, and by adaptation of concepts and tools from computer science. Nevertheless, the capability to identify and characterize materials relies on the predictive accuracy of the underlying physical descriptions, and on the ability to capture the complexity of realistic systems. We provide here an overview of electronic-structure methods, of their application to the prediction of materials properties, and of the different strategies employed towards the broader goals of materials design and discovery.}, issue = {6}, langid = {english}, - keywords = {database generation,DFT,Electronic structure,ESM,magnetism,ML,review}, + keywords = {database generation,DFT,Electronic structure,ESM,experimental science,for abstracts,for introductions,magnetism,Magnetism,materials properties,ML,reference,review,review-of-DFT}, file = {/Users/wasmer/Nextcloud/Zotero/Marzari et al_2021_Electronic-structure methods for materials design.pdf;/Users/wasmer/Zotero/storage/AKF7QEMC/s41563-021-01013-3.html} } @@ -9941,7 +11089,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, urldate = {2023-10-13}, abstract = {The study of the electronic properties of charged defects is crucial for our understanding of various electrical properties of materials. However, the high computational cost of density functional theory (DFT) hinders the research on large defect models. In this study, we present an E(3) equivariant graph neural network framework (HamGNN-Q), which can predict the tight-binding Hamiltonian matrices for various defect types with different charges using only one set of network parameters. By incorporating features of background charge into the element representation, HamGNN-Q enables a direct mapping from structure and background charge to the electronic Hamiltonian matrix of charged defect systems without DFT calculation. We demonstrate the model's high precision and transferability through testing on GaAs systems with various charged defect configurations. Our approach provides a practical solution for accelerating charged defect electronic structure calculations and advancing the design of materials with tailored electronic properties.}, pubstate = {preprint}, - keywords = {AML,defects,disordered,ML,ML-DFT,ML-ESM,point defects,prediction of Hamiltonian matrix}, + keywords = {\_tablet,AML,defects,disordered,HamGNN,ML,ML-DFT,ML-ESM,MPNN,point defects,prediction of Hamiltonian matrix}, file = {/Users/wasmer/Nextcloud/Zotero/Ma et al_2023_Transferable Machine Learning Approach for Predicting Electronic Structures of.pdf;/Users/wasmer/Zotero/storage/TICQMBV5/2306.html} } @@ -9953,8 +11101,8 @@ Junqi Yin (Oak Ridge National Laboratory)}, series = {{{NIC}} Series}, volume = {31}, pages = {131--158}, - publisher = {{NIC-Secretariat, Research Centre Jülich}}, - location = {{Jülich}}, + publisher = {NIC-Secretariat, Research Centre Jülich}, + location = {Jülich}, url = {http://hdl.handle.net/2128/4777}, urldate = {2021-06-28}, abstract = {The Korringa-Kohn-Rostoker (KKR) method for the calculation of the electronic structure ofmaterials is founded on the concepts of the Green function and of multiple-scattering. In thismanuscript, after a short introduction to Green functions,we present a description of single-site scattering (including anisotropic potentials) and multiple-scattering theory and the KKRequations. The KKR representation of the Green function andthe algebraic Dyson equation areintroduced. We then discuss the screened KKR formalism, andits advantages in the numericaleffort for the calculation of layered systems. Finally we give a summary of the self-consistencyalgorithm for the calculation of the electronic structure.}, @@ -9974,7 +11122,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {18}, number = {4}, pages = {367--368}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {1745-2481}, doi = {10.1038/s41567-022-01575-2}, url = {https://www.nature.com/articles/s41567-022-01575-2}, @@ -9986,6 +11134,25 @@ Junqi Yin (Oak Ridge National Laboratory)}, file = {/Users/wasmer/Nextcloud/Zotero/Mazin_2022_Inverse Occam’s razor.pdf} } +@article{mccardlePredictingElectronicStructure2023, + title = {Predicting Electronic Structure Calculation Results}, + author = {McCardle, Kaitlin}, + date = {2023-11}, + journaltitle = {Nature Computational Science}, + shortjournal = {Nat Comput Sci}, + volume = {3}, + number = {11}, + pages = {915--915}, + publisher = {Nature Publishing Group}, + issn = {2662-8457}, + doi = {10.1038/s43588-023-00562-8}, + url = {https://www.nature.com/articles/s43588-023-00562-8}, + urldate = {2024-03-08}, + langid = {english}, + keywords = {\_tablet,AML,density matrix,editorial highlight,ML,ML-DFT,ML-ESM,ML-WFT,prediction of density matrix,prediction of electron density}, + file = {/Users/wasmer/Nextcloud/Zotero/McCardle_2023_Predicting electronic structure calculation results.pdf} +} + @article{medvedevDensityFunctionalTheory2017, title = {Density Functional Theory Is Straying from the Path toward the Exact Functional}, author = {Medvedev, Michael G. and Bushmarinov, Ivan S. and Sun, Jianwei and Perdew, John P. and Lyssenko, Konstantin A.}, @@ -9994,7 +11161,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {355}, number = {6320}, pages = {49--52}, - publisher = {{American Association for the Advancement of Science}}, + publisher = {American Association for the Advancement of Science}, doi = {10.1126/science.aah5975}, url = {https://www.science.org/doi/10.1126/science.aah5975}, urldate = {2021-11-17}, @@ -10020,6 +11187,48 @@ Junqi Yin (Oak Ridge National Laboratory)}, file = {/Users/wasmer/Nextcloud/Zotero/Mehta et al_2019_A high-bias, low-variance introduction to Machine Learning for physicists.pdf} } +@article{melkoLanguageModelsQuantum2024, + title = {Language Models for Quantum Simulation}, + author = {Melko, Roger G. and Carrasquilla, Juan}, + date = {2024-01}, + journaltitle = {Nature Computational Science}, + shortjournal = {Nat Comput Sci}, + volume = {4}, + number = {1}, + pages = {11--18}, + publisher = {Nature Publishing Group}, + issn = {2662-8457}, + doi = {10.1038/s43588-023-00578-0}, + url = {https://www.nature.com/articles/s43588-023-00578-0}, + urldate = {2024-02-28}, + abstract = {A key challenge in the effort to simulate today’s quantum computing devices is the ability to learn and encode the complex correlations that occur between qubits. Emerging technologies based on language models adopted from machine learning have shown unique abilities to learn quantum states. We highlight the contributions that language models are making in the effort to build quantum computers and discuss their future role in the race to quantum advantage.}, + issue = {1}, + langid = {english}, + keywords = {/unread,GPT,language models,LLM,Quantum simulation,RNN,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Melko_Carrasquilla_2024_Language models for quantum simulation.pdf} +} + +@article{melkoRestrictedBoltzmannMachines2019, + title = {Restricted {{Boltzmann}} Machines in Quantum Physics}, + author = {Melko, Roger G. and Carleo, Giuseppe and Carrasquilla, Juan and Cirac, J. Ignacio}, + date = {2019-09}, + journaltitle = {Nature Physics}, + shortjournal = {Nat. Phys.}, + volume = {15}, + number = {9}, + pages = {887--892}, + publisher = {Nature Publishing Group}, + issn = {1745-2481}, + doi = {10.1038/s41567-019-0545-1}, + url = {https://www.nature.com/articles/s41567-019-0545-1}, + urldate = {2024-02-28}, + abstract = {A type of stochastic neural network called a restricted Boltzmann machine has been widely used in artificial intelligence applications for decades. They are now finding new life in the simulation of complex wavefunctions in quantum many-body physics.}, + issue = {9}, + langid = {english}, + keywords = {/unread,ML,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Melko et al_2019_Restricted Boltzmann machines in quantum physics.pdf} +} + @online{mellorNeuralArchitectureSearch2021, title = {Neural {{Architecture Search}} without {{Training}}}, author = {Mellor, Joseph and Turner, Jack and Storkey, Amos and Crowley, Elliot J.}, @@ -10037,6 +11246,23 @@ Junqi Yin (Oak Ridge National Laboratory)}, file = {/Users/wasmer/Nextcloud/Zotero/Mellor et al_2021_Neural Architecture Search without Training.pdf;/Users/wasmer/Zotero/storage/NKM9IWFY/2006.html} } +@article{merchantScalingDeepLearning2023, + title = {Scaling Deep Learning for Materials Discovery}, + author = {Merchant, Amil and Batzner, Simon and Schoenholz, Samuel S. and Aykol, Muratahan and Cheon, Gowoon and Cubuk, Ekin Dogus}, + date = {2023-11-29}, + journaltitle = {Nature}, + pages = {1--6}, + publisher = {Nature Publishing Group}, + issn = {1476-4687}, + doi = {10.1038/s41586-023-06735-9}, + url = {https://www.nature.com/articles/s41586-023-06735-9}, + urldate = {2023-12-05}, + abstract = {Novel functional materials enable fundamental breakthroughs across technological applications from clean energy to information processing1–11. From microchips to batteries and photovoltaics, discovery of inorganic crystals has been bottlenecked by expensive trial-and-error approaches. Concurrently, deep-learning models for language, vision and biology have showcased emergent predictive capabilities with increasing data and computation12–14. Here we show that graph networks trained at scale can reach unprecedented levels of generalization, improving the efficiency of materials discovery by an order of magnitude. Building on 48,000 stable crystals identified in continuing studies15–17, improved efficiency enables the discovery of 2.2 million structures below the current convex hull, many of which escaped previous human chemical intuition. Our work represents an order-of-magnitude expansion in stable materials known to humanity. Stable discoveries that are on the final convex hull will be made available to screen for technological applications, as we demonstrate for layered materials and solid-electrolyte candidates. Of the stable structures, 736 have already been independently experimentally realized. The scale and diversity of hundreds of millions of first-principles calculations also unlock modelling capabilities for downstream applications, leading in particular to highly accurate and robust learned interatomic potentials that can be used in condensed-phase molecular-dynamics simulations and high-fidelity zero-shot prediction of ionic conductivity.}, + langid = {english}, + keywords = {active learning,AML,convex hull,database generation,deep ensembles,DeepMind,DFT,ensemble learning,generalization,GNN,GNoME,Google,hybrid AI/simulation,library,M3GNet,materials,materials database,materials discovery,materials project,ML,MLP,NequIP,OQMD,out-of-distribution,scaling law,structure prediction,symmetry,universal potential,VASP,with-code,zero-shot learning}, + file = {/Users/wasmer/Zotero/storage/MBZNUHBX/Merchant et al. - 2023 - Scaling deep learning for materials discovery.pdf} +} + @article{meredigCanMachineLearning2018, title = {Can Machine Learning Identify the next High-Temperature Superconductor? {{Examining}} Extrapolation Performance for Materials Discovery}, shorttitle = {Can Machine Learning Identify the next High-Temperature Superconductor?}, @@ -10047,7 +11273,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {3}, number = {5}, pages = {819--825}, - publisher = {{The Royal Society of Chemistry}}, + publisher = {The Royal Society of Chemistry}, issn = {2058-9689}, doi = {10.1039/C8ME00012C}, url = {https://pubs.rsc.org/en/content/articlelanding/2018/me/c8me00012c}, @@ -10066,7 +11292,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {31}, number = {23}, pages = {9579--9581}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, issn = {0897-4756}, doi = {10.1021/acs.chemmater.9b04078}, url = {https://doi.org/10.1021/acs.chemmater.9b04078}, @@ -10139,7 +11365,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {107}, number = {24}, pages = {245423}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevB.107.245423}, url = {https://link.aps.org/doi/10.1103/PhysRevB.107.245423}, urldate = {2023-06-26}, @@ -10171,8 +11397,8 @@ Junqi Yin (Oak Ridge National Laboratory)}, date = {2013-03-18}, series = {{{EDBT}} '13}, pages = {773--776}, - publisher = {{Association for Computing Machinery}}, - location = {{New York, NY, USA}}, + publisher = {Association for Computing Machinery}, + location = {New York, NY, USA}, doi = {10.1145/2452376.2452478}, url = {https://doi.org/10.1145/2452376.2452478}, urldate = {2021-10-17}, @@ -10205,8 +11431,8 @@ Junqi Yin (Oak Ridge National Laboratory)}, date = {2018}, series = {Adaptive Computation and Machine Learning}, edition = {Second edition}, - publisher = {{The MIT Press}}, - location = {{Cambridge, Massachusetts}}, + publisher = {The MIT Press}, + location = {Cambridge, Massachusetts}, url = {https://cs.nyu.edu/~mohri/mlbook/}, isbn = {978-0-262-03940-6}, pagetotal = {486}, @@ -10222,12 +11448,12 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {14}, number = {5}, pages = {1326--1333}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, doi = {10.1021/acs.jpclett.2c03670}, url = {https://doi.org/10.1021/acs.jpclett.2c03670}, urldate = {2023-04-04}, abstract = {We present an analysis of the static exchange-correlation (XC) kernel computed from hybrid functionals with a single mixing coefficient such as PBE0 and PBE0–1/3. We break down the hybrid XC kernels into the exchange and correlation parts using the Hartree–Fock functional, the exchange-only PBE, and the correlation-only PBE. This decomposition is combined with exact data for the static XC kernel of the uniform electron gas and an Airy gas model within a subsystem functional approach. This gives us a tool for the non-empirical choice of the mixing coefficient under ambient and extreme conditions. Our analysis provides physical insights into the effect of the variation of the mixing coefficient in hybrid functionals, which is of immense practical value. The presented approach is general and can be used for other types of functionals like screened hybrids.}, - keywords = {\_tablet,/unread,CASUS,DFA,DFT,HZDR,PGI-1/IAS-1}, + keywords = {/unread,\_tablet,CASUS,DFA,DFT,HZDR,PGI-1/IAS-1}, file = {/Users/wasmer/Nextcloud/Zotero/Moldabekov et al_2023_Non-empirical Mixing Coefficient for Hybrid XC Functionals from Analysis of the.pdf;/Users/wasmer/Zotero/storage/WGXJ5PMF/acs.jpclett.html} } @@ -10250,7 +11476,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {75}, number = {12}, pages = {121306}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevB.75.121306}, url = {https://link.aps.org/doi/10.1103/PhysRevB.75.121306}, urldate = {2023-07-12}, @@ -10268,7 +11494,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {117}, number = {32}, pages = {7356--7366}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, issn = {1089-5639}, doi = {10.1021/jp401225b}, url = {https://doi.org/10.1021/jp401225b}, @@ -10287,7 +11513,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {125}, number = {7}, pages = {076402}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevLett.125.076402}, url = {https://link.aps.org/doi/10.1103/PhysRevLett.125.076402}, urldate = {2021-12-14}, @@ -10389,7 +11615,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {9}, number = {1}, pages = {1--11}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2057-3960}, doi = {10.1038/s41524-023-00973-1}, url = {https://www.nature.com/articles/s41524-023-00973-1}, @@ -10407,7 +11633,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, author = {Mosquera-Lois, Irea and R.~Kavanagh, Seán and Klarbring, Johan and Tolborg, Kasper and Walsh, Aron}, date = {2023}, journaltitle = {Chemical Society Reviews}, - publisher = {{Royal Society of Chemistry}}, + publisher = {Royal Society of Chemistry}, doi = {10.1039/D3CS00432E}, url = {https://pubs.rsc.org/en/content/articlelanding/2023/cs/d3cs00432e}, urldate = {2023-08-25}, @@ -10440,7 +11666,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, title = {Design of Magnetic Interactions in Doped Topological Insulators}, author = {Mozumder, Rubel and Rüßmann, Philipp and Blügel, Stefan}, date = {2022-04-12}, - institution = {{Heinrich Heine University Düsseldorf}}, + institution = {Heinrich Heine University Düsseldorf}, abstract = {Magnetic impurities and their long-range interaction (ferromagnetic) order play pivotal roles in the topological phase transition from QSHI to QAHI. This transition transforms helical edge states belonging to the QSHI for 2D TIs (surface states for 3D TI) to the chiral edge states in QAHI for 2D TIs (surface states for 3D TI). Due to such chiral states, the QAHIs forbid back-scattering in electron conducting channels, which in turn provide passionless current and increase energy efficiency for conduct- ing channels. The chiral states are consist of single spin electrons which provide spin currents from conventional charge currents. Regarding the properties of QAHIs, the QAHIs opens a new venue for low-energy elec- tronics, spintronics and quantum computation [9]. Independently, the V- [10] and Cr-doped [11] as well as their co-doping [12] (Sb, Bi)2 Te3 shows stable QAHE but with very low temperatures (≤ 0.5K). In this high throughput ab-initio work, we will investigate other possible co-doping, dimer calculations, from the d-block elements in 3D TI Bi2 Te3 . For this purpose, we have extended AiiDA-KKR plugins by developing combine- impurity workflow called combine imps wc using GF formulation of DFT code (KKR-GF method) and the new workflow is capable to run multi- impurity calculations. Here, the dimer calculations are in the main fo- cus, and from the calculation results we will analyze Heisenberg isotropic collinear interaction (Jij ), Dzyaloshinskii–Moriya interaction (DMI, Dij ), and their ratio for each possible impurity couple. Finally, using the ob- tained Jij data we have implemented some linear regression machine learn- ing tools to understand better the dependency of Jij on some well-known factors e.g. inter-impurity distance, electronegativity. Our results from the notion of this work will give a list of some potential impurities and after their potential impurity combinations for stable QAHE. It will also render an impression of implementation of machine learning approach for designing better magnetic interactions in TIs.}, langid = {english}, pagetotal = {85}, @@ -10490,7 +11716,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {99}, number = {22}, pages = {224414}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevB.99.224414}, url = {https://link.aps.org/doi/10.1103/PhysRevB.99.224414}, urldate = {2023-10-26}, @@ -10499,22 +11725,11 @@ Junqi Yin (Oak Ridge National Laboratory)}, file = {/Users/wasmer/Nextcloud/Zotero/Müller et al_2019_Spirit.pdf;/Users/wasmer/Zotero/storage/N8F8Z9GD/PhysRevB.99.html} } -@book{MultipleScatteringTheory, - title = {Multiple {{Scattering Theory}}}, - doi = {10.1088/2053-2563/aae7d8}, - url = {https://iopscience.iop.org/book/978-0-7503-1490-9}, - urldate = {2021-12-02}, - isbn = {978-0-7503-1490-9}, - langid = {english}, - keywords = {\_tablet}, - file = {/Users/wasmer/Nextcloud/Zotero/Multiple Scattering Theory.pdf;/Users/wasmer/Zotero/storage/UYLUXULV/978-0-7503-1490-9.html} -} - @book{murphyProbabilisticMachineLearning2022, title = {Probabilistic {{Machine Learning}}: {{An}} Introduction}, author = {Murphy, Kevin P.}, date = {2022}, - publisher = {{MIT Press}}, + publisher = {MIT Press}, url = {probml.ai}, keywords = {/unread,educational,General ML,learning material,ML,ML theory,online book,textbook} } @@ -10542,7 +11757,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {14}, number = {1}, pages = {579}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2041-1723}, doi = {10.1038/s41467-023-36329-y}, url = {https://www.nature.com/articles/s41467-023-36329-y}, @@ -10550,7 +11765,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, abstract = {A simultaneously accurate and computationally efficient parametrization of the potential energy surface of molecules and materials is a long-standing goal in the natural sciences. While atom-centered message passing neural networks (MPNNs) have shown remarkable accuracy, their information propagation has limited the accessible length-scales. Local methods, conversely, scale to large simulations but have suffered from inferior accuracy. This work introduces Allegro, a strictly local equivariant deep neural network interatomic potential architecture that simultaneously exhibits excellent accuracy and scalability. Allegro represents a many-body potential using iterated tensor products of learned equivariant representations without atom-centered message passing. Allegro obtains improvements over state-of-the-art methods on QM9 and revMD17. A single tensor product layer outperforms existing deep MPNNs and transformers on QM9. Furthermore, Allegro displays remarkable generalization to out-of-distribution data. Molecular simulations using Allegro recover structural and kinetic properties of an amorphous electrolyte in excellent agreement with ab-initio simulations. Finally, we demonstrate parallelization with a simulation of 100 million atoms.}, issue = {1}, langid = {english}, - keywords = {/unread,Allegro,AML,chemical species scaling problem,equivariant,local,MD,MD17,ML,MLP,MPNN,NequIP,original publication,parallelization,prediction of total energy,QM9,scaling,SE(3)}, + keywords = {Allegro,AML,chemical species scaling problem,equivariant,local,MD,MD17,ML,MLP,MPNN,NequIP,original publication,parallelization,prediction of total energy,QM9,scaling,SE(3)}, file = {/Users/wasmer/Zotero/storage/INEV8259/Musaelian et al. - 2023 - Learning local equivariant representations for lar.pdf} } @@ -10560,7 +11775,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, editor = {Musaelian, Albert}, editortype = {director}, date = {2023-06-13}, - location = {{online}}, + location = {online}, url = {https://m2d2.io/talks/m2d2/learning-local-equivariant-representations-for-large-scale-atomistic-dynamics/}, urldate = {2023-09-05}, abstract = {Trade-offs between accuracy and speed have long limited the applications of machine learning interatomic potentials. Recently, E(3)-equivariant architectures have demonstrated leading accuracy, data efficiency, transferability, and simulation stability, but their computational cost and scaling has generally reinforced this trade-off. In particular, the ubiquitous use of message passing architectures has precluded the extension of accessible length- and time-scales with efficient multi-GPU calculations. In this talk I will discuss Allegro, a strictly local equivariant deep learning interatomic potential designed for parallel scalability and increased computational efficiency that simultaneously exhibits excellent accuracy. After presenting the architecture, I will discuss applications and benchmarks on various materials and chemical systems, including recent demonstrations of scaling to large all-atom biomolecular systems such as solvated proteins and a 44 million atom model of the HIV capsid. Finally, I will summarize the software ecosystem and tooling around Allegro.}, @@ -10593,7 +11808,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {154}, number = {11}, pages = {114109}, - publisher = {{American Institute of Physics}}, + publisher = {American Institute of Physics}, issn = {0021-9606}, doi = {10.1063/5.0044689}, url = {https://aip.scitation.org/doi/10.1063/5.0044689}, @@ -10612,7 +11827,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {15}, number = {2}, pages = {906--915}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, issn = {1549-9618}, doi = {10.1021/acs.jctc.8b00959}, url = {https://doi.org/10.1021/acs.jctc.8b00959}, @@ -10647,7 +11862,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, date = {2021-07-26}, journaltitle = {Chemical Reviews}, shortjournal = {Chem. Rev.}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, issn = {0009-2665}, doi = {10.1021/acs.chemrev.1c00021}, url = {https://doi.org/10.1021/acs.chemrev.1c00021}, @@ -10678,7 +11893,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, url = {https://github.com/mstsuite/MuST}, urldate = {2023-09-19}, abstract = {Multiple Scattering Theory code for first principles calculations}, - organization = {{ORNL}}, + organization = {ORNL}, keywords = {/unread,DFT,KKR,library,Multiple scattering theory} } @@ -10692,7 +11907,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {5}, number = {2}, pages = {024009}, - publisher = {{IOP Publishing}}, + publisher = {IOP Publishing}, issn = {2516-1075}, doi = {10.1088/2516-1075/ace014}, url = {https://dx.doi.org/10.1088/2516-1075/ace014}, @@ -10711,7 +11926,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {346}, number = {6209}, pages = {602--607}, - publisher = {{American Association for the Advancement of Science}}, + publisher = {American Association for the Advancement of Science}, doi = {10.1126/science.1259327}, url = {https://www.science.org/doi/10.1126/science.1259327}, urldate = {2023-05-10}, @@ -10729,7 +11944,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {8}, number = {12}, pages = {899--911}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {1748-3395}, doi = {10.1038/nnano.2013.243}, url = {https://www.nature.com/articles/nnano.2013.243}, @@ -10737,7 +11952,11 @@ Junqi Yin (Oak Ridge National Laboratory)}, abstract = {This Review covers the recent developments in the observation and modelling of magnetic skyrmions, including their topological properties, current-induced dynamics and potential in future information storage devices.}, issue = {12}, langid = {english}, - annotation = {Bandiera\_abtest: a Cg\_type: Nature Research Journals Primary\_atype: Reviews Subject\_term: Magnetic properties and materials Subject\_term\_id: magnetic-properties-and-materials}, + annotation = {Bandiera\_abtest: a\\ +Cg\_type: Nature Research Journals\\ +Primary\_atype: Reviews\\ +Subject\_term: Magnetic properties and materials\\ +Subject\_term\_id: magnetic-properties-and-materials}, file = {/Users/wasmer/Nextcloud/Zotero/Nagaosa_Tokura_2013_Topological properties and dynamics of magnetic skyrmions.pdf} } @@ -10769,7 +11988,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {62}, number = {19}, pages = {4727--4735}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, issn = {1549-9596}, doi = {10.1021/acs.jcim.2c00760}, url = {https://doi.org/10.1021/acs.jcim.2c00760}, @@ -10786,7 +12005,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, journaltitle = {Nature Biotechnology}, shortjournal = {Nat Biotechnol}, pages = {1--10}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {1546-1696}, doi = {10.1038/s41587-020-00801-7}, url = {https://www.nature.com/articles/s41587-020-00801-7}, @@ -10803,7 +12022,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, date = {2020-05-14}, journaltitle = {bioRxiv}, pages = {2020.05.12.077776}, - publisher = {{Cold Spring Harbor Laboratory}}, + publisher = {Cold Spring Harbor Laboratory}, doi = {10.1101/2020.05.12.077776}, url = {https://www.biorxiv.org/content/10.1101/2020.05.12.077776v1}, urldate = {2021-05-15}, @@ -10820,13 +12039,13 @@ Junqi Yin (Oak Ridge National Laboratory)}, urldate = {2023-07-12}, abstract = {2016 Nobel Prize in Physics}, langid = {english}, - organization = {{Nature}}, + organization = {Nature}, keywords = {/unread,collection,history of science,Nobel prize,physics,popular science,quantum materials,topological insulator,Topological matter}, file = {/Users/wasmer/Zotero/storage/LSJ66I93/fwsytynlwg.html} } @online{neklyudovWassersteinQuantumMonte2023, - title = {Wasserstein {{Quantum Monte Carlo}}: {{A Novel Approach}} for {{Solving}} the {{Quantum Many-Body Schr}}\textbackslash "odinger {{Equation}}}, + title = {Wasserstein {{Quantum Monte Carlo}}: {{A Novel Approach}} for {{Solving}} the {{Quantum Many-Body Schrödinger Equation}}}, shorttitle = {Wasserstein {{Quantum Monte Carlo}}}, author = {Neklyudov, Kirill and Nys, Jannes and Thiede, Luca and Carrasquilla, Juan and Liu, Qiang and Welling, Max and Makhzani, Alireza}, date = {2023-07-16}, @@ -10838,7 +12057,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, urldate = {2023-08-22}, abstract = {Solving the quantum many-body Schr\textbackslash "odinger equation is a fundamental and challenging problem in the fields of quantum physics, quantum chemistry, and material sciences. One of the common computational approaches to this problem is Quantum Variational Monte Carlo (QVMC), in which ground-state solutions are obtained by minimizing the energy of the system within a restricted family of parameterized wave functions. Deep learning methods partially address the limitations of traditional QVMC by representing a rich family of wave functions in terms of neural networks. However, the optimization objective in QVMC remains notoriously hard to minimize and requires second-order optimization methods such as natural gradient. In this paper, we first reformulate energy functional minimization in the space of Born distributions corresponding to particle-permutation (anti-)symmetric wave functions, rather than the space of wave functions. We then interpret QVMC as the Fisher-Rao gradient flow in this distributional space, followed by a projection step onto the variational manifold. This perspective provides us with a principled framework to derive new QMC algorithms, by endowing the distributional space with better metrics, and following the projected gradient flow induced by those metrics. More specifically, we propose "Wasserstein Quantum Monte Carlo" (WQMC), which uses the gradient flow induced by the Wasserstein metric, rather than Fisher-Rao metric, and corresponds to transporting the probability mass, rather than teleporting it. We demonstrate empirically that the dynamics of WQMC results in faster convergence to the ground state of molecular systems.}, pubstate = {preprint}, - keywords = {/unread,todo-tagging}, + keywords = {Microsoft Research,ML,ML-QM,ML-QMBP,ML-WFT,prediction of wavefunction,QMC,todo-tagging,VMC}, file = {/Users/wasmer/Nextcloud/Zotero/Neklyudov et al_2023_Wasserstein Quantum Monte Carlo.pdf;/Users/wasmer/Zotero/storage/5BUA924B/2307.html} } @@ -10859,7 +12078,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, url = {http://arxiv.org/abs/2201.11647}, urldate = {2023-02-23}, abstract = {We investigate the potential of supervised machine learning to propagate a quantum system in time. While Markovian dynamics can be learned easily, given a sufficient amount of data, non-Markovian systems are non-trivial and their description requires the memory knowledge of past states. Here we analyse the feature of such memory by taking a simple 1D Heisenberg model as many-body Hamiltonian, and construct a non-Markovian description by representing the system over the single-particle reduced density matrix. The number of past states required for this representation to reproduce the time-dependent dynamics is found to grow exponentially with the number of spins and with the density of the system spectrum. Most importantly, we demonstrate that neural networks can work as time propagators at any time in the future and that they can be concatenated in time forming an autoregression. Such neural-network autoregression can be used to generate long-time and arbitrary dense time trajectories. Finally, we investigate the time resolution needed to represent the system memory. We find two regimes: for fine memory samplings the memory needed remains constant, while longer memories are required for coarse samplings, although the total number of time steps remains constant. The boundary between these two regimes is set by the period corresponding to the highest frequency in the system spectrum, demonstrating that neural network can overcome the limitation set by the Shannon-Nyquist sampling theorem.}, - keywords = {\_tablet,/unread,Condensed Matter - Mesoscale and Nanoscale Physics,Condensed Matter - Strongly Correlated Electrons,Quantum Physics}, + keywords = {/unread,\_tablet,Condensed Matter - Mesoscale and Nanoscale Physics,Condensed Matter - Strongly Correlated Electrons,Quantum Physics}, file = {/Users/wasmer/Nextcloud/Zotero/Nelson et al_2022_Data-Driven Time Propagation of Quantum Systems with Neural Networks.pdf;/Users/wasmer/Zotero/storage/N33SL7SM/2201.html} } @@ -10872,7 +12091,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {99}, number = {7}, pages = {075132}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevB.99.075132}, url = {https://link.aps.org/doi/10.1103/PhysRevB.99.075132}, urldate = {2023-03-09}, @@ -10919,7 +12138,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, url = {http://arxiv.org/abs/1906.08534}, urldate = {2023-02-23}, abstract = {The magnetic properties of a material are determined by a subtle balance between the various interactions at play, a fact that makes the design of new magnets a daunting task. High-throughput electronic structure theory may help to explore the vast chemical space available and offers a design tool to the experimental synthesis. This method efficiently predicts the elementary magnetic properties of a compound and its thermodynamical stability, but it is blind to information concerning the magnetic critical temperature. Here we introduce a range of machine-learning models to predict the Curie temperature, \$T\_\textbackslash mathrm\{C\}\$, of ferromagnets. The models are constructed by using experimental data for about 2,500 known magnets and consider the chemical composition of a compound as the only feature determining \$T\_\textbackslash mathrm\{C\}\$. Thus, we are able to establish a one-to-one relation between the chemical composition and the critical temperature. We show that the best model can predict \$T\_\textbackslash mathrm\{C\}\$'s with an accuracy of about 50K. Most importantly our model is able to extrapolate the predictions to regions of the chemical space, where only a little fraction of the data was considered for training. This is demonstrated by tracing the \$T\_\textbackslash mathrm\{C\}\$ of binary intermetallic alloys along their composition space and for the Al-Co-Fe ternary system.}, - keywords = {\_tablet,/unread,Condensed Matter - Materials Science,Physics - Computational Physics}, + keywords = {/unread,\_tablet,Condensed Matter - Materials Science,Physics - Computational Physics}, file = {/Users/wasmer/Nextcloud/Zotero/Nelson_Sanvito_2019_Predicting the Curie temperature of ferromagnets using machine learning.pdf;/Users/wasmer/Zotero/storage/J4ASXLIA/1906.html} } @@ -10932,7 +12151,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {96}, number = {6}, pages = {065807}, - publisher = {{IOP Publishing}}, + publisher = {IOP Publishing}, issn = {1402-4896}, doi = {10.1088/1402-4896/abf3f7}, url = {https://doi.org/10.1088/1402-4896/abf3f7}, @@ -10963,12 +12182,12 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {107}, number = {14}, pages = {144103}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevB.107.144103}, url = {https://link.aps.org/doi/10.1103/PhysRevB.107.144103}, urldate = {2023-06-30}, abstract = {The development of differentiable invariant descriptors for accurate representations of atomic environments plays a central role in the success of interatomic potentials for chemistry and materials science. We introduce a method to generate fast proper orthogonal descriptors for the construction of many-body interatomic potentials, and we discuss its relation to existing empirical and machine-learning interatomic potentials. A traditional way of implementing the proper orthogonal descriptors has a computational complexity that scales exponentially with the body order in terms of the number of neighbors. We present an algorithm to compute the proper orthogonal descriptors with a computational complexity that scales linearly with the number of neighbors irrespective of the body order. We show that our method can enable a more efficient implementation for a number of existing potentials, and we provide a scalable systematic framework to construct new many-body potentials. The new potentials are demonstrated on a data set of density functional theory calculations for tantalum and compared with other interatomic potentials.}, - keywords = {ACSF,AML,bispectrum,descriptor comparison,descriptors,internal coordinate descriptor,kernel methods,linear regression,ML,MLP,MTP,nonlinear regression,POD descriptor,SNAP,SOAP}, + keywords = {ACE,ACE-related,ACSF,AML,benchmarking,bispectrum,descriptor comparison,descriptors,internal coordinate descriptor,kernel methods,linear regression,ML,MLP,MLP comparison,MTP,nonlinear regression,POD descriptor,SNAP,SOAP,with-code}, file = {/Users/wasmer/Nextcloud/Zotero/Nguyen_2023_Fast proper orthogonal descriptors for many-body interatomic potentials.pdf;/Users/wasmer/Zotero/storage/3337UHFR/PhysRevB.107.html} } @@ -10981,7 +12200,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {105}, number = {16}, pages = {165131}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevB.105.165131}, url = {https://link.aps.org/doi/10.1103/PhysRevB.105.165131}, urldate = {2023-10-13}, @@ -10992,7 +12211,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, @article{nguyenProperOrthogonalDescriptors2023, title = {Proper Orthogonal Descriptors for Efficient and Accurate Interatomic Potentials}, author = {Nguyen, Ngoc Cuong and Rohskopf, Andrew}, - date = {2023-05-01}, + date = {2023-03-02}, journaltitle = {Journal of Computational Physics}, shortjournal = {Journal of Computational Physics}, volume = {480}, @@ -11003,8 +12222,8 @@ Junqi Yin (Oak Ridge National Laboratory)}, urldate = {2023-07-01}, abstract = {We present the proper orthogonal descriptors for efficient and accuracy representation of the potential energy surface. The potential energy surface is represented as a many-body expansion of parametrized potentials in which the potentials are functions of atom positions and parameters. The proper orthogonal decomposition is employed to decompose the parametrized potentials into a set of proper orthogonal descriptors (PODs). Because of the rapid convergence of the proper orthogonal decomposition, relevant snapshots can be sampled exhaustively to represent the atomic neighborhood environment accurately with a small number of descriptors. The proper orthogonal descriptors are used to develop interatomic potentials by using a linear expansion of the descriptors and determining the expansion coefficients from a weighted least-squares regression against a density functional theory (DFT) training set. We present a comprehensive evaluation of the POD potentials on previously published DFT data sets comprising Li, Mo, Cu, Ni, Si, Ge, and Ta elements. The data sets represent a diverse pool of metals, transition metals, and semiconductors. The accuracy of the POD potentials are comparable to that of state-of-the-art machine learning potentials such as the spectral neighbor analysis potential (SNAP) and the atomic cluster expansion (ACE).}, langid = {english}, - keywords = {ACE,ACSF,AML,benchmarking,bispectrum,descriptor comparison,descriptors,Julia,kernel methods,library,materials,ML,MLP,original publication,POD descriptor,SNAP,SOAP,transition metals,with-code}, - file = {/Users/wasmer/Nextcloud/Zotero/Nguyen_Rohskopf_2023_Proper orthogonal descriptors for efficient and accurate interatomic potentials.pdf;/Users/wasmer/Zotero/storage/PYJFVV5U/S0021999123001250.html} + keywords = {ACE,ACE-related,ACSF,AML,benchmarking,bispectrum,descriptor comparison,descriptors,Julia,kernel methods,library,materials,ML,MLP,MLP comparison,original publication,POD descriptor,SNAP,SOAP,transition metals,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Nguyen_Rohskopf_2023_Proper orthogonal descriptors for efficient and accurate interatomic potentials2.pdf;/Users/wasmer/Zotero/storage/PYJFVV5U/S0021999123001250.html} } @online{nigamCompletenessAtomicStructure2023, @@ -11032,7 +12251,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {156}, number = {1}, pages = {014115}, - publisher = {{American Institute of Physics}}, + publisher = {American Institute of Physics}, issn = {0021-9606}, doi = {10.1063/5.0072784}, url = {https://aip.scitation.org/doi/full/10.1063/5.0072784}, @@ -11066,15 +12285,29 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {106}, number = {23}, pages = {235114}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevB.106.235114}, url = {https://link.aps.org/doi/10.1103/PhysRevB.106.235114}, urldate = {2023-04-04}, abstract = {An efficient and accurate generalization of the removed-sphere method (RSM) to solve the Poisson equation for total charge density in a solid with space-filling convex Voronoi polyhedra (VPs) and any symmetry is presented. The generalized RSM avoids the use of multipoles and VP shape functions for cellular integrals, which have associated ill-convergent large, double-internal L sums in spherical-harmonic expansions, so that fast convergence in single-L sums is reached. Our RSM adopts full Ewald formulation to work for all configurations or when symmetry breaking occurs, such as for atomic displacements or elastic constant calculations. The structure-dependent coefficients AL that define RSM can be calculated once for a fixed structure and speed up the whole self-consistent-field procedure. The accuracy and rapid convergence properties are confirmed using two analytic models, including the Coulomb potential and energy. We then implement the full-potential RSM using the Green's function Korringa-Kohn-Rostoker (KKR) method for real applications and compare the results with other first-principle methods and experimental data, showing that they are equally as accurate.}, - keywords = {\_tablet,/unread,DFT,KKR,Poisson equation}, + keywords = {/unread,\_tablet,DFT,KKR,Poisson equation}, file = {/Users/wasmer/Nextcloud/Zotero/Ning et al_2022_Full-potential KKR within the removed-sphere method.pdf} } +@book{noltingQuantumTheoryMagnetism2009, + title = {Quantum {{Theory}} of {{Magnetism}}}, + author = {Nolting, Wolfgang and Ramakanth, Anupuru}, + date = {2009}, + edition = {1}, + publisher = {Springer Berlin Heidelberg}, + url = {https://link.springer.com/book/10.1007/978-3-540-85416-6}, + urldate = {2022-06-18}, + isbn = {978-3-540-85416-6}, + langid = {english}, + keywords = {\_tablet,condensed matter,graduate,magnetism,textbook}, + file = {/Users/wasmer/Nextcloud/Zotero/Quantum Theory of Magnetism.pdf;/Users/wasmer/Zotero/storage/ULV44ULF/978-3-540-85416-6.html} +} + @article{novikovMagneticMomentTensor2022, title = {Magnetic {{Moment Tensor Potentials}} for Collinear Spin-Polarized Materials Reproduce Different Magnetic States of Bcc {{Fe}}}, author = {Novikov, Ivan and Grabowski, Blazej and Körmann, Fritz and Shapeev, Alexander}, @@ -11084,7 +12317,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {8}, number = {1}, pages = {1--6}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2057-3960}, doi = {10.1038/s41524-022-00696-9}, url = {https://www.nature.com/articles/s41524-022-00696-9}, @@ -11105,7 +12338,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {8}, number = {1}, pages = {1--6}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2057-3960}, doi = {10.1038/s41524-022-00696-9}, url = {https://www.nature.com/articles/s41524-022-00696-9}, @@ -11117,6 +12350,24 @@ Junqi Yin (Oak Ridge National Laboratory)}, file = {/Users/wasmer/Nextcloud/Zotero/Novikov et al_2022_Magnetic Moment Tensor Potentials for collinear spin-polarized materials.pdf} } +@article{nyariTopologicalSuperconductivityFirst2023, + title = {Topological Superconductivity from First Principles. {{I}}. {{Shiba}} Band Structure and Topological Edge States of Artificial Spin Chains}, + author = {Nyári, Bendegúz and Lászlóffy, András and Csire, Gábor and Szunyogh, László and Újfalussy, Balázs}, + date = {2023-10-23}, + journaltitle = {Physical Review B}, + shortjournal = {Phys. Rev. B}, + volume = {108}, + number = {13}, + pages = {134512}, + publisher = {American Physical Society}, + doi = {10.1103/PhysRevB.108.134512}, + url = {https://link.aps.org/doi/10.1103/PhysRevB.108.134512}, + urldate = {2024-01-01}, + abstract = {Magnetic chains on superconductors hosting Majorana zero modes (MZMs) have attracted a great deal of interest due to their possible applications in fault-tolerant quantum computing. However, this is hindered by the lack of a detailed, quantitative understanding of these systems. As a significant step forward, we present a first-principles computational approach based on a microscopic relativistic theory of inhomogeneous superconductors applied to an iron chain on the top of Au-covered Nb(110) to study the Shiba band structure and the topological nature of the edge states. Contrary to contemporary considerations, our method enables the introduction of quantities indicating band inversion without fitting parameters in realistic experimental settings, holding thus the power to determine the topological nature of zero-energy edge states in an accurate ab initio based description of the experimental systems. We confirm that ferromagnetic Fe chains on an Au/Nb(110) surface do not support any separated MZM; however, a broad range of spin-spirals can be identified with robust zero-energy edge states displaying signatures of MZMs. For these spirals, we explore the structure of the superconducting order parameter, shedding light on the internally antisymmetric triplet pairing hosted by MZMs. We also reveal a twofold effect of spin-orbit coupling: although it tends to enlarge the topological phase regarding spin spiraling angles, it also extends the localization of MZMs. Due to the presented predictive power, our work fills a big gap between experimental efforts and theoretical models while paving the way for engineering platforms for topological quantum computation.}, + keywords = {/unread,Budapest KKR group,GF2023 workshop,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Nyári et al_2023_Topological superconductivity from first principles.pdf;/Users/wasmer/Zotero/storage/JGL92RA5/PhysRevB.108.html} +} + @online{nyariTopologicalSuperconductivityFirstprinciples2023, title = {Topological Superconductivity from First-Principles {{I}}: {{Shiba}} Band Structure and Topological Edge States of Artificial Spin Chains}, shorttitle = {Topological Superconductivity from First-Principles {{I}}}, @@ -11159,7 +12410,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {340}, number = {6129}, pages = {153--154}, - publisher = {{American Association for the Advancement of Science}}, + publisher = {American Association for the Advancement of Science}, doi = {10.1126/science.1237215}, url = {https://www.science.org/doi/10.1126/science.1237215}, urldate = {2022-05-13}, @@ -11176,7 +12427,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {153}, number = {2}, pages = {024117}, - publisher = {{American Institute of Physics}}, + publisher = {American Institute of Physics}, issn = {0021-9606}, doi = {10.1063/5.0012901}, url = {https://aip.scitation.org/doi/10.1063/5.0012901}, @@ -11194,7 +12445,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {28}, number = {20}, pages = {7324--7331}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, issn = {0897-4756}, doi = {10.1021/acs.chemmater.6b02724}, url = {https://doi.org/10.1021/acs.chemmater.6b02724}, @@ -11212,7 +12463,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {153}, number = {14}, pages = {144106}, - publisher = {{American Institute of Physics}}, + publisher = {American Institute of Physics}, issn = {0021-9606}, doi = {10.1063/5.0016005}, url = {https://aip.scitation.org/doi/10.1063/5.0016005}, @@ -11222,9 +12473,10 @@ Junqi Yin (Oak Ridge National Laboratory)}, file = {/Users/wasmer/Nextcloud/Zotero/Onat et al_2020_Sensitivity and dimensionality of atomic environment representations used for.pdf;/Users/wasmer/Zotero/storage/RQ8UAKFX/5.html} } -@online{OnlineCourseMachine, +@online{OnlineCourseMachine2021, title = {Online {{Course}}: {{Machine Learning}} for {{Physicists}} 2021 - {{HedgeDoc}}}, shorttitle = {Online {{Course}}}, + date = {2021}, url = {https://pad.gwdg.de/s/Machine_Learning_For_Physicists_2021#}, urldate = {2021-05-13}, abstract = {\# Online Course: Machine Learning for Physicists 2021 :::info **Lecture Series by Florian Marquard}, @@ -11289,7 +12541,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {576}, number = {7787}, pages = {416--422}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {1476-4687}, doi = {10.1038/s41586-019-1840-9}, url = {https://www.nature.com/articles/s41586-019-1840-9}, @@ -11311,7 +12563,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {2}, number = {8}, pages = {083802}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevMaterials.2.083802}, url = {https://link.aps.org/doi/10.1103/PhysRevMaterials.2.083802}, urldate = {2021-05-19}, @@ -11330,7 +12582,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {2}, number = {8}, pages = {083802}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevMaterials.2.083802}, url = {https://link.aps.org/doi/10.1103/PhysRevMaterials.2.083802}, urldate = {2023-05-06}, @@ -11348,7 +12600,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {3}, number = {6}, pages = {597--607}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, doi = {10.1021/accountsmr.1c00244}, url = {https://doi.org/10.1021/accountsmr.1c00244}, urldate = {2022-07-11}, @@ -11366,7 +12618,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {2}, number = {3}, pages = {037001}, - publisher = {{IOP Publishing}}, + publisher = {IOP Publishing}, issn = {2632-2153}, doi = {10.1088/2632-2153/abe663}, url = {https://doi.org/10.1088/2632-2153/abe663}, @@ -11407,7 +12659,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {2}, number = {1}, pages = {015018}, - publisher = {{IOP Publishing}}, + publisher = {IOP Publishing}, issn = {2632-2153}, doi = {10.1088/2632-2153/abb212}, url = {https://doi.org/10.1088/2632-2153/abb212}, @@ -11427,7 +12679,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {2}, number = {1}, pages = {015018}, - publisher = {{IOP Publishing}}, + publisher = {IOP Publishing}, issn = {2632-2153}, doi = {10.1088/2632-2153/abb212}, url = {https://dx.doi.org/10.1088/2632-2153/abb212}, @@ -11447,7 +12699,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {156}, number = {3}, pages = {034302}, - publisher = {{American Institute of Physics}}, + publisher = {American Institute of Physics}, issn = {0021-9606}, doi = {10.1063/5.0070488}, url = {https://aip.scitation.org/doi/full/10.1063/5.0070488}, @@ -11466,7 +12718,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {156}, number = {3}, pages = {034302}, - publisher = {{American Institute of Physics}}, + publisher = {American Institute of Physics}, issn = {0021-9606}, doi = {10.1063/5.0070488}, url = {https://aip.scitation.org/doi/full/10.1063/5.0070488}, @@ -11485,7 +12737,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {33}, number = {8}, pages = {084005}, - publisher = {{IOP Publishing}}, + publisher = {IOP Publishing}, issn = {0953-8984}, doi = {10.1088/1361-648X/abcb10}, url = {https://dx.doi.org/10.1088/1361-648X/abcb10}, @@ -11520,7 +12772,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {3}, number = {2}, pages = {025007}, - publisher = {{IOP Publishing}}, + publisher = {IOP Publishing}, issn = {2632-2153}, doi = {10.1088/2632-2153/ac6a51}, url = {https://dx.doi.org/10.1088/2632-2153/ac6a51}, @@ -11546,6 +12798,22 @@ Junqi Yin (Oak Ridge National Laboratory)}, file = {/Users/wasmer/Nextcloud/Zotero/Pasini et al_2023_Transferable prediction of formation energy across lattices of increasing size.pdf} } +@online{passaroReducingConvolutionsEfficient2023, + title = {Reducing {{SO}}(3) {{Convolutions}} to {{SO}}(2) for {{Efficient Equivariant GNNs}}}, + author = {Passaro, Saro and Zitnick, C. Lawrence}, + date = {2023-06-14}, + eprint = {2302.03655}, + eprinttype = {arxiv}, + eprintclass = {physics}, + doi = {10.48550/arXiv.2302.03655}, + url = {http://arxiv.org/abs/2302.03655}, + urldate = {2024-05-07}, + abstract = {Graph neural networks that model 3D data, such as point clouds or atoms, are typically desired to be \$SO(3)\$ equivariant, i.e., equivariant to 3D rotations. Unfortunately equivariant convolutions, which are a fundamental operation for equivariant networks, increase significantly in computational complexity as higher-order tensors are used. In this paper, we address this issue by reducing the \$SO(3)\$ convolutions or tensor products to mathematically equivalent convolutions in \$SO(2)\$ . This is accomplished by aligning the node embeddings' primary axis with the edge vectors, which sparsifies the tensor product and reduces the computational complexity from \$O(L\textasciicircum 6)\$ to \$O(L\textasciicircum 3)\$, where \$L\$ is the degree of the representation. We demonstrate the potential implications of this improvement by proposing the Equivariant Spherical Channel Network (eSCN), a graph neural network utilizing our novel approach to equivariant convolutions, which achieves state-of-the-art results on the large-scale OC-20 and OC-22 datasets.}, + pubstate = {preprint}, + keywords = {/unread,alternative approaches,alternative for equivariance,AML,computational complexity,convolution,equivariant,equivariant alternative,eSCN,GNN,Meta Research,ML,MLP,MPNN,Open Catalyst,rotational symmetry,SO(3),tensor product,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Passaro_Zitnick_2023_Reducing SO(3) Convolutions to SO(2) for Efficient Equivariant GNNs2.pdf;/Users/wasmer/Zotero/storage/IIL5PCZ5/2302.html} +} + @online{PasteurLabsISI2023, title = {Pasteur {{Labs}} \& {{ISI}} - {{Research}}}, date = {2023-08-21}, @@ -11566,7 +12834,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {7}, number = {7}, pages = {503--521}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2058-8437}, doi = {10.1038/s41578-022-00433-0}, url = {https://www.nature.com/articles/s41578-022-00433-0}, @@ -11599,7 +12867,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {105}, number = {19}, pages = {195141}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevB.105.195141}, url = {https://link.aps.org/doi/10.1103/PhysRevB.105.195141}, urldate = {2023-04-14}, @@ -11634,7 +12902,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, url = {http://arxiv.org/abs/2205.01591}, urldate = {2022-05-13}, abstract = {Over the past decade machine learning has made significant advances in approximating density functionals, but whether this signals the end of human-designed functionals remains to be seen. Ryan Pederson, Bhupalee Kalita and Kieron Burke discuss the rise of machine learning for functional design.}, - keywords = {DeepMind,density functional,DFT,DM21,ML,ML-DFT,ML-ESM}, + keywords = {DeepMind,density functional,DFT,DM21,ML,ML-DFA,ML-DFT,ML-ESM,perspective}, file = {/Users/wasmer/Nextcloud/Zotero/Pederson et al_2022_Machine learning and density functional theory.pdf;/Users/wasmer/Zotero/storage/UPT9RJEW/2205.html} } @@ -11662,7 +12930,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {5}, number = {1}, pages = {1--6}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2397-4648}, doi = {10.1038/s41535-020-00288-0}, url = {https://www.nature.com/articles/s41535-020-00288-0}, @@ -11698,7 +12966,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, date = {2023-03-30}, journaltitle = {ACS Physical Chemistry Au}, shortjournal = {ACS Phys. Chem Au}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, doi = {10.1021/acsphyschemau.2c00069}, url = {https://doi.org/10.1021/acsphyschemau.2c00069}, urldate = {2023-06-30}, @@ -11726,6 +12994,23 @@ Junqi Yin (Oak Ridge National Laboratory)}, file = {/Users/wasmer/Nextcloud/Zotero/Pereira et al_2021_Challenges of Topological Insulator Research.pdf;/Users/wasmer/Zotero/storage/CXAHTBAM/pssb.html} } +@online{persaudReproducibilityComputationalMaterials2023, + title = {Reproducibility in {{Computational Materials Science}}: {{Lessons}} from '{{A General-Purpose Machine Learning Framework}} for {{Predicting Properties}} of {{Inorganic Materials}}'}, + shorttitle = {Reproducibility in {{Computational Materials Science}}}, + author = {Persaud, Daniel and Ward, Logan and Hattrick-Simpers, Jason}, + date = {2023-10-10}, + eprint = {2310.07044}, + eprinttype = {arxiv}, + eprintclass = {cond-mat}, + doi = {10.48550/arXiv.2310.07044}, + url = {http://arxiv.org/abs/2310.07044}, + urldate = {2023-12-05}, + abstract = {The integration of machine learning techniques in materials discovery has become prominent in materials science research and has been accompanied by an increasing trend towards open-source data and tools to propel the field. Despite the increasing usefulness and capabilities of these tools, developers neglecting to follow reproducible practices creates a significant barrier for researchers looking to use or build upon their work. In this study, we investigate the challenges encountered while attempting to reproduce a section of the results presented in "A general-purpose machine learning framework for predicting properties of inorganic materials." Our analysis identifies four major categories of challenges: (1) reporting computational dependencies, (2) recording and sharing version logs, (3) sequential code organization, and (4) clarifying code references within the manuscript. The result is a proposed set of tangible action items for those aiming to make code accessible to, and useful for the community.}, + pubstate = {preprint}, + keywords = {AML,materials informatics,metadata,ML,RDM,reproducibility,scientific workflows,todo-tagging,version control,workflows}, + file = {/Users/wasmer/Nextcloud/Zotero/Persaud et al_2023_Reproducibility in Computational Materials Science.pdf;/Users/wasmer/Zotero/storage/XPWR5SBW/2310.html} +} + @inproceedings{pezoaFoundationsJSONSchema2016, title = {Foundations of {{JSON Schema}}}, booktitle = {Proceedings of the 25th {{International Conference}} on {{World Wide Web}}}, @@ -11733,8 +13018,8 @@ Junqi Yin (Oak Ridge National Laboratory)}, date = {2016-04-11}, series = {{{WWW}} '16}, pages = {263--273}, - publisher = {{International World Wide Web Conferences Steering Committee}}, - location = {{Republic and Canton of Geneva, CHE}}, + publisher = {International World Wide Web Conferences Steering Committee}, + location = {Republic and Canton of Geneva, CHE}, doi = {10.1145/2872427.2883029}, url = {https://doi.org/10.1145/2872427.2883029}, urldate = {2021-10-17}, @@ -11765,7 +13050,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, urldate = {2023-06-26}, abstract = {We develop a protocol to determine the presence and extent of a topological phase with Majorana zero modes in a hybrid superconductor-semiconductor device. The protocol is based on conductance measurements in a three-terminal device with two normal leads and one superconducting lead. A radio-frequency technique acts as a proxy for the measurement of local conductance, allowing a rapid, systematic scan of the large experimental phase space of the device. Majorana zero modes cause zero bias conductance peaks at each end of the wire, so we identify promising regions of the phase space by filtering for this condition. To validate the presence of a topological phase, a subsequent measurement of the non-local conductance in these regions is used to detect a topological transition via the closing and reopening of the bulk energy gap. We define data analysis routines that allow for an automated and unbiased execution of the protocol. Our protocol is designed to screen out false positives, especially trivial Andreev bound states that mimic Majorana zero modes in local conductance. We apply the protocol to several examples of simulated data illustrating the detection of topological phases and the screening of false positives.}, langid = {english}, - organization = {{arXiv.org}}, + organization = {arXiv.org}, keywords = {/unread,experimental science,Majorana,MZM,physics,topological insulator,transport properties}, file = {/Users/wasmer/Nextcloud/Zotero/Pikulin et al_2021_Protocol to identify a topological superconducting phase in a three-terminal.pdf} } @@ -11779,7 +13064,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {1}, number = {1}, pages = {1--188}, - publisher = {{Morgan \& Claypool Publishers}}, + publisher = {Morgan \& Claypool Publishers}, issn = {2691-1930}, doi = {10.2200/S00981ED1V01Y202001MOP001}, url = {https://www.morganclaypool.com/doi/10.2200/S00981ED1V01Y202001MOP001}, @@ -11813,7 +13098,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {12}, number = {43}, pages = {14396--14413}, - publisher = {{The Royal Society of Chemistry}}, + publisher = {The Royal Society of Chemistry}, issn = {2041-6539}, doi = {10.1039/D1SC03564A}, url = {https://pubs.rsc.org/en/content/articlelanding/2021/sc/d1sc03564a}, @@ -11849,7 +13134,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, date = {2023-10-03}, journaltitle = {Chemical Science}, shortjournal = {Chem. Sci.}, - publisher = {{The Royal Society of Chemistry}}, + publisher = {The Royal Society of Chemistry}, issn = {2041-6539}, doi = {10.1039/D3SC02581K}, url = {https://pubs.rsc.org/en/content/articlelanding/2023/sc/d3sc02581k}, @@ -11887,7 +13172,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {3}, number = {4}, pages = {040501}, - publisher = {{IOP Publishing}}, + publisher = {IOP Publishing}, issn = {2632-2153}, doi = {10.1088/2632-2153/ac4d11}, url = {https://dx.doi.org/10.1088/2632-2153/ac4d11}, @@ -11966,7 +13251,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {107}, number = {12}, pages = {125160}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevB.107.125160}, url = {https://link.aps.org/doi/10.1103/PhysRevB.107.125160}, urldate = {2023-04-02}, @@ -11984,7 +13269,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {125}, number = {16}, pages = {166001}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevLett.125.166001}, url = {https://link.aps.org/doi/10.1103/PhysRevLett.125.166001}, urldate = {2021-05-13}, @@ -12022,6 +13307,20 @@ Junqi Yin (Oak Ridge National Laboratory)}, file = {/Users/wasmer/Nextcloud/Zotero/Pozdnyakov_Ceriotti_2023_Smooth, exact rotational symmetrization for deep learning on point clouds.pdf;/Users/wasmer/Zotero/storage/W32HXDSQ/2305.html} } +@book{princeUnderstandingDeepLearning2023, + title = {Understanding Deep Learning}, + author = {Prince, Simon J. D.}, + date = {2023}, + publisher = {The MIT Press}, + location = {Cambridge, Massachusetts}, + url = {http://udlbook.com}, + abstract = {"This book covers modern deep learning and tackles supervised learning, model architecture, unsupervised learning, and deep reinforcement learning"--}, + isbn = {978-0-262-37709-6 978-0-262-37710-2}, + pagetotal = {1}, + keywords = {\_tablet,Deep learning,educational,GNN,graph ML,learning material,ML theory,online book,online course,textbook,transformer,tutorial,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Prince_2023_Understanding deep learning.pdf} +} + @online{probstGrowingPainsReacting2022, title = {Growing Pains: {{Reacting}} to Negative Impacts of Deep Learning on Machine Learning for Chemistry}, shorttitle = {Growing Pains}, @@ -12046,7 +13345,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {102}, number = {33}, pages = {11635--11638}, - publisher = {{Proceedings of the National Academy of Sciences}}, + publisher = {Proceedings of the National Academy of Sciences}, doi = {10.1073/pnas.0505436102}, url = {https://www.pnas.org/doi/full/10.1073/pnas.0505436102}, urldate = {2022-10-05}, @@ -12066,17 +13365,27 @@ Junqi Yin (Oak Ridge National Laboratory)}, urldate = {2023-01-20}, abstract = {We present an atomic cluster expansion (ACE) for carbon that improves over available classical and machine learning potentials. The ACE is parameterized from an exhaustive set of important carbon structures at extended volume and energy range, computed using density functional theory (DFT). Rigorous validation reveals that ACE predicts accurately a broad range of properties of both crystalline and amorphous carbon phases while being several orders of magnitude more computationally efficient than available machine learning models. We demonstrate the predictive power of ACE on three distinct applications, brittle crack propagation in diamond, evolution of amorphous carbon structures at different densities and quench rates and nucleation and growth of fullerene clusters under high pressure and temperature conditions.}, pubstate = {preprint}, - keywords = {ACE,carbon,descriptors,MLP,molecular dynamics}, + keywords = {ACE,AML,carbon,DeePMD-kit,descriptors,DFT,GAP,ML,MLP,molecular dynamics}, file = {/Users/wasmer/Nextcloud/Zotero/Qamar et al_2022_Atomic cluster expansion for quantum-accurate large-scale simulations of carbon.pdf;/Users/wasmer/Zotero/storage/SCVIRYIV/2210.html} } -@book{QuantumTheoryMagnetism, - title = {Quantum {{Theory}} of {{Magnetism}}}, - url = {https://link.springer.com/book/10.1007/978-3-540-85416-6}, - urldate = {2022-06-18}, - langid = {english}, - keywords = {\_tablet,condensed matter,graduate,magnetism,textbook}, - file = {/Users/wasmer/Nextcloud/Zotero/Quantum Theory of Magnetism.pdf;/Users/wasmer/Zotero/storage/ULV44ULF/978-3-540-85416-6.html} +@article{qamarAtomicClusterExpansion2023, + title = {Atomic {{Cluster Expansion}} for {{Quantum-Accurate Large-Scale Simulations}} of {{Carbon}}}, + author = {Qamar, Minaam and Mrovec, Matous and Lysogorskiy, Yury and Bochkarev, Anton and Drautz, Ralf}, + date = {2023-08-08}, + journaltitle = {Journal of Chemical Theory and Computation}, + shortjournal = {J. Chem. Theory Comput.}, + volume = {19}, + number = {15}, + pages = {5151--5167}, + publisher = {American Chemical Society}, + issn = {1549-9618}, + doi = {10.1021/acs.jctc.2c01149}, + url = {https://doi.org/10.1021/acs.jctc.2c01149}, + urldate = {2023-12-18}, + abstract = {We present an atomic cluster expansion (ACE) for carbon that improves over available classical and machine learning potentials. The ACE is parametrized from an exhaustive set of important carbon structures over extended volume and energy ranges, computed using density functional theory (DFT). Rigorous validation reveals that ACE accurately predicts a broad range of properties of both crystalline and amorphous carbon phases while being several orders of magnitude more computationally efficient than available machine learning models. We demonstrate the predictive power of ACE on three distinct applications: brittle crack propagation in diamond, the evolution of amorphous carbon structures at different densities and quench rates, and the nucleation and growth of fullerene clusters under high-pressure and high-temperature conditions.}, + keywords = {ACE,AML,carbon,DeePMD-kit,descriptors,DFT,GAP,ML,MLP,molecular dynamics}, + file = {/Users/wasmer/Nextcloud/Zotero/Qamar et al_2023_Atomic Cluster Expansion for Quantum-Accurate Large-Scale Simulations of Carbon.pdf} } @book{quMachineLearningMolecular2023, @@ -12085,8 +13394,8 @@ Junqi Yin (Oak Ridge National Laboratory)}, date = {2023}, series = {Challenges and {{Advances}} in {{Computational Chemistry}} and {{Physics}}}, volume = {36}, - publisher = {{Springer International Publishing}}, - location = {{Cham}}, + publisher = {Springer International Publishing}, + location = {Cham}, doi = {10.1007/978-3-031-37196-7}, url = {https://link.springer.com/10.1007/978-3-031-37196-7}, urldate = {2023-10-06}, @@ -12240,7 +13549,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {11}, number = {5}, pages = {2087--2096}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, issn = {1549-9618}, doi = {10.1021/acs.jctc.5b00099}, url = {https://doi.org/10.1021/acs.jctc.5b00099}, @@ -12258,7 +13567,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, eprint = {2109.07573}, eprinttype = {arxiv}, eprintclass = {physics}, - publisher = {{arXiv}}, + publisher = {arXiv}, doi = {10.48550/arXiv.2109.07573}, url = {http://arxiv.org/abs/2109.07573}, urldate = {2022-05-18}, @@ -12310,7 +13619,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {126}, number = {4}, pages = {529--535}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, issn = {1089-5639}, doi = {10.1021/acs.jpca.1c08950}, url = {https://doi.org/10.1021/acs.jpca.1c08950}, @@ -12356,11 +13665,31 @@ Junqi Yin (Oak Ridge National Laboratory)}, urldate = {2023-07-01}, abstract = {The Atomic Cluster Expansion (ACE) provides a formally complete basis for the local atomic environment. ACE is not limited to representing energies as a function of atomic positions and chemical species, but can be generalized to vectorial or tensorial properties and to incorporate further degrees of freedom (DOF). This is crucial for magnetic materials with potential energy surfaces that depend on atomic positions and atomic magnetic moments simultaneously. In this work, we employ the ACE formalism to develop a non-collinear magnetic ACE parametrization for the prototypical magnetic element Fe. The model is trained on a broad range of collinear and non-collinear magnetic structures calculated using spin density functional theory. We demonstrate that the non-collinear magnetic ACE is able to reproduce not only ground state properties of various magnetic phases of Fe but also the magnetic and lattice excitations that are essential for a correct description of the finite temperature behavior and properties of crystal defects.}, langid = {english}, - organization = {{arXiv.org}}, - keywords = {todo-tagging}, + pubstate = {preprint}, + keywords = {\_tablet,ACE,AML,Dzyaloshinskii–Moriya interaction,Heisenberg model,higher-order exchange interactions,Jij,linear regression,magnetism,ML,MLP,non-collinear,pacemaker,prediction of Jij,spin-dependent,tensorial target,todo-tagging,with-code}, file = {/Users/wasmer/Nextcloud/Zotero/Rinaldi et al_2023_Non-collinear Magnetic Atomic Cluster Expansion for Iron.pdf} } +@article{rinaldiNoncollinearMagneticAtomic2024, + title = {Non-Collinear Magnetic Atomic Cluster Expansion for Iron}, + author = {Rinaldi, Matteo and Mrovec, Matous and Bochkarev, Anton and Lysogorskiy, Yury and Drautz, Ralf}, + date = {2024-01-11}, + journaltitle = {npj Computational Materials}, + shortjournal = {npj Comput Mater}, + volume = {10}, + number = {1}, + pages = {1--12}, + publisher = {Nature Publishing Group}, + issn = {2057-3960}, + doi = {10.1038/s41524-024-01196-8}, + url = {https://www.nature.com/articles/s41524-024-01196-8}, + urldate = {2024-04-24}, + abstract = {The Atomic Cluster Expansion (ACE) provides a formally complete basis for the local atomic environment. ACE is not limited to representing energies as a function of atomic positions and chemical species, but can be generalized to vectorial or tensorial properties and to incorporate further degrees of freedom (DOF). This is crucial for magnetic materials with potential energy surfaces that depend on atomic positions and atomic magnetic moments simultaneously. In this work, we employ the ACE formalism to develop a non-collinear magnetic ACE parametrization for the prototypical magnetic element Fe. The model is trained on a broad range of collinear and non-collinear magnetic structures calculated using spin density functional theory. We demonstrate that the non-collinear magnetic ACE is able to reproduce not only ground state properties of various magnetic phases of Fe but also the magnetic and lattice excitations that are essential for a correct description of finite temperature behavior and properties of crystal defects.}, + langid = {english}, + keywords = {ACE,AML,Dzyaloshinskii–Moriya interaction,equivariant,Heisenberg model,higher-order exchange interactions,Jij,KKR,linear regression,magnetism,magnon dispersion,magnons,ML,MLP,non-collinear,pacemaker,prediction of Jij,spin spiral,spin-dependent,SPRKKR,tensorial target,todo-tagging,TRS,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Rinaldi et al_2024_Non-collinear magnetic atomic cluster expansion for iron.pdf} +} + @article{RiseQuantumMaterials2016, title = {The Rise of Quantum Materials}, date = {2016-02}, @@ -12369,7 +13698,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {12}, number = {2}, pages = {105--105}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {1745-2481}, doi = {10.1038/nphys3668}, url = {https://www.nature.com/articles/nphys3668}, @@ -12378,7 +13707,11 @@ Junqi Yin (Oak Ridge National Laboratory)}, issue = {2}, langid = {english}, keywords = {quantum materials,review}, - annotation = {Bandiera\_abtest: a Cg\_type: Nature Research Journals Primary\_atype: Editorial Subject\_term: Condensed-matter physics;History;Quantum physics Subject\_term\_id: condensed-matter-physics;history;quantum-physics}, + annotation = {Bandiera\_abtest: a\\ +Cg\_type: Nature Research Journals\\ +Primary\_atype: Editorial\\ +Subject\_term: Condensed-matter physics;History;Quantum physics\\ +Subject\_term\_id: condensed-matter-physics;history;quantum-physics}, file = {/Users/wasmer/Nextcloud/Zotero/2016_The rise of quantum materials.pdf;/Users/wasmer/Zotero/storage/YG3UAYEY/nphys3668.html} } @@ -12408,7 +13741,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {4}, number = {4}, pages = {335--339}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {1476-4660}, doi = {10.1038/nmat1349}, url = {https://www.nature.com/articles/nmat1349}, @@ -12429,7 +13762,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {3}, number = {4}, pages = {045016}, - publisher = {{IOP Publishing}}, + publisher = {IOP Publishing}, issn = {2632-2153}, doi = {10.1088/2632-2153/ac9a9d}, url = {https://dx.doi.org/10.1088/2632-2153/ac9a9d}, @@ -12449,7 +13782,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {157}, number = {17}, pages = {174115}, - publisher = {{American Institute of Physics}}, + publisher = {American Institute of Physics}, issn = {0021-9606}, doi = {10.1063/5.0121748}, url = {https://aip.scitation.org/doi/full/10.1063/5.0121748}, @@ -12475,6 +13808,25 @@ Junqi Yin (Oak Ridge National Laboratory)}, file = {/Users/wasmer/Zotero/storage/JDTKMCIM/Rossignol et al. - 2023 - Machine-Learning-Assisted Construction of Ternary .pdf;/Users/wasmer/Zotero/storage/2Z9WD4PH/2308.html} } +@article{rossignolMachineLearningAssistedConstructionTernary2024, + title = {Machine-{{Learning-Assisted Construction}} of {{Ternary Convex Hull Diagrams}}}, + author = {Rossignol, Hugo and Minotakis, Michail and Cobelli, Matteo and Sanvito, Stefano}, + date = {2024-03-25}, + journaltitle = {Journal of Chemical Information and Modeling}, + shortjournal = {J. Chem. Inf. Model.}, + volume = {64}, + number = {6}, + pages = {1828--1840}, + publisher = {American Chemical Society}, + issn = {1549-9596}, + doi = {10.1021/acs.jcim.3c01391}, + url = {https://doi.org/10.1021/acs.jcim.3c01391}, + urldate = {2024-03-31}, + abstract = {In the search for novel intermetallic ternary alloys, much of the effort goes into performing a large number of ab initio calculations covering a wide range of compositions and structures. These are essential to building a reliable convex hull diagram. While density functional theory (DFT) provides accurate predictions for many systems, its computational overheads set a throughput limit on the number of hypothetical phases that can be probed. Here, we demonstrate how an ensemble of machine-learning (ML) spectral neighbor-analysis potentials (SNAPs) can be integrated into a workflow for the construction of accurate ternary convex hull diagrams, highlighting regions that are fertile for materials discovery. Our workflow relies on using available binary-alloy data both to train the SNAP models and to create prototypes for ternary phases. From the prototype structures, all unique ternary decorations are created and used to form a pool of candidate compounds. The SNAPs ensemble is then used to prerelax the structures and screen the most favorable prototypes before using DFT to build the final phase diagram. As constructed, the proposed workflow relies on no extra first-principles data to train the ML surrogate model and yields a DFT-level accurate convex hull. We demonstrate its efficacy by investigating the Cu–Ag–Au and Mo–Ta–W ternary systems.}, + keywords = {/unread,AFLOWLIB,alloys,AML,ase,bispectrum,convex hull,ensemble learning,LAMMPS,ML,ML-DFT,ML-ESM,MLP,phase diagram,pymatgen,scikit-learn,SNAP,structure relaxation,surrogate model,ternary systems,with-data}, + file = {/Users/wasmer/Nextcloud/Zotero/Rossignol et al_2024_Machine-Learning-Assisted Construction of Ternary Convex Hull Diagrams.pdf} +} + @article{rossLargescaleChemicalLanguage2022, title = {Large-Scale Chemical Language Representations Capture Molecular Structure and Properties}, author = {Ross, Jerret and Belgodere, Brian and Chenthamarakshan, Vijil and Padhi, Inkit and Mroueh, Youssef and Das, Payel}, @@ -12484,7 +13836,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {4}, number = {12}, pages = {1256--1264}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2522-5839}, doi = {10.1038/s42256-022-00580-7}, url = {https://www.nature.com/articles/s42256-022-00580-7}, @@ -12496,6 +13848,23 @@ Junqi Yin (Oak Ridge National Laboratory)}, file = {/Users/wasmer/Nextcloud/Zotero/Ross et al_2022_Large-scale chemical language representations capture molecular structure and.pdf} } +@online{rubungoLLMPropPredictingPhysical2023, + title = {{{LLM-Prop}}: {{Predicting Physical And Electronic Properties Of Crystalline Solids From Their Text Descriptions}}}, + shorttitle = {{{LLM-Prop}}}, + author = {Rubungo, Andre Niyongabo and Arnold, Craig and Rand, Barry P. and Dieng, Adji Bousso}, + date = {2023-10-21}, + eprint = {2310.14029}, + eprinttype = {arxiv}, + eprintclass = {cond-mat}, + doi = {10.48550/arXiv.2310.14029}, + url = {http://arxiv.org/abs/2310.14029}, + urldate = {2023-12-05}, + abstract = {The prediction of crystal properties plays a crucial role in the crystal design process. Current methods for predicting crystal properties focus on modeling crystal structures using graph neural networks (GNNs). Although GNNs are powerful, accurately modeling the complex interactions between atoms and molecules within a crystal remains a challenge. Surprisingly, predicting crystal properties from crystal text descriptions is understudied, despite the rich information and expressiveness that text data offer. One of the main reasons is the lack of publicly available data for this task. In this paper, we develop and make public a benchmark dataset (called TextEdge) that contains text descriptions of crystal structures with their properties. We then propose LLM-Prop, a method that leverages the general-purpose learning capabilities of large language models (LLMs) to predict the physical and electronic properties of crystals from their text descriptions. LLM-Prop outperforms the current state-of-the-art GNN-based crystal property predictor by about 4\% in predicting band gap, 3\% in classifying whether the band gap is direct or indirect, and 66\% in predicting unit cell volume. LLM-Prop also outperforms a finetuned MatBERT, a domain-specific pre-trained BERT model, despite having 3 times fewer parameters. Our empirical results may highlight the current inability of GNNs to capture information pertaining to space group symmetry and Wyckoff sites for accurate crystal property prediction.}, + pubstate = {preprint}, + keywords = {/unread,AML,language models,library,LLM,materials,ML,todo-tagging,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Rubungo et al_2023_LLM-Prop.pdf;/Users/wasmer/Zotero/storage/BLZZVYKN/2310.html} +} + @online{ruheGeometricCliffordAlgebra2023, title = {Geometric {{Clifford Algebra Networks}}}, author = {Ruhe, David and Gupta, Jayesh K. and family=Keninck, given=Steven, prefix=de, useprefix=true and Welling, Max and Brandstetter, Johannes}, @@ -12521,7 +13890,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {52}, number = {12}, pages = {997--1000}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevLett.52.997}, url = {https://link.aps.org/doi/10.1103/PhysRevLett.52.997}, urldate = {2023-09-21}, @@ -12539,7 +13908,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {108}, number = {5}, pages = {058301}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevLett.108.058301}, url = {https://link.aps.org/doi/10.1103/PhysRevLett.108.058301}, urldate = {2021-07-10}, @@ -12589,7 +13958,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {7}, number = {1}, pages = {1--9}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2057-3960}, doi = {10.1038/s41524-020-00482-5}, url = {https://www.nature.com/articles/s41524-020-00482-5}, @@ -12638,7 +14007,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, date = {2023-08-09}, url = {https://www.fz-juelich.de/en/pgi/pgi-1/expertise/spintronics-and-quantum-transformation-spin-qx-2023}, eventtitle = {Spintronics and {{Quantum Transformation}} ({{Spin-QX}} 2023)}, - venue = {{Forschungszentrum Jülich}}, + venue = {Forschungszentrum Jülich}, keywords = {BdG,CPA,defects,DFT,FZJ,impurity embedding,JuKKR,KKR,KS-BdG,MZM,PGI,PGI-1/IAS-1,physics,quantum materials,SOC,spintronics,superconducting spitronics,superconductor,Topological Superconductor,Yu-Shiba-Rusinov} } @@ -12688,7 +14057,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, urldate = {2023-08-19}, abstract = {Interfacing a topological insulator (TI) with an \$s\$-wave superconductor (SC) is a promising material platform that offers the possibility to realize a topological superconductor through which Majorana-based topologically protected qubits can be engineered. In our computational study of the prototypical SC/TI interface between Nb and Bi\$\_2\$Te\$\_3\$, we identify the benefits and possible bottlenecks of this potential Majorana material platform. Bringing Nb in contact with the TI film induces charge doping from the SC to the TI, which shifts the Fermi level into the TI conduction band. For thick TI films, this results in band bending leading to the population of trivial TI quantum-well states at the interface. In the superconducting state, we uncover that the topological surface state experiences a sizable superconducting gap-opening at the SC/TI interface, which is furthermore robust against fluctuations of the Fermi energy. We also show that the trivial interface state is only marginally proximitized, potentially obstructing the realization of Majorana-based qubits in this material platform.}, pubstate = {preprint}, - keywords = {/unread,PGI-1/IAS-1,todo-tagging}, + keywords = {PGI-1/IAS-1,todo-tagging}, file = {/Users/wasmer/Nextcloud/Zotero/Rüßmann_Blügel_2022_Proximity induced superconductivity in a topological insulator.pdf;/Users/wasmer/Zotero/storage/5Q45YH6R/2208.html} } @@ -12697,7 +14066,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, author = {Rüßmann, Philipp}, date = {2018}, number = {FZJ-2018-04348}, - institution = {{Forschungszentrum Jülich GmbH Zentralbibliothek, Verlag}}, + institution = {Forschungszentrum Jülich GmbH Zentralbibliothek, Verlag}, url = {http://hdl.handle.net/2128/19428}, urldate = {2022-08-12}, abstract = {This thesis provides a detailed microscopic understanding of the impurity scattering of topologically protected electrons, which are studied within the example of strong threedimensional topological insulators (Tls) and type-II Weyl semimetals. The immense research interest in the recent past in topological materials is to a large extend due to the fact that their unconventional electronic surface states are robust against perturbations, such as surface structural relaxations or defects. One of the most intringuing physical properties of topological surface states in Tls is the forbidden backscattering off time-reversal invariant defects, which makes Tl materials very promising candidates for future low-power electronics or quantum information technology. [...] Rüßmann, Philipp}, @@ -12785,7 +14154,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {139}, number = {18}, pages = {184118}, - publisher = {{American Institute of Physics}}, + publisher = {American Institute of Physics}, issn = {0021-9606}, doi = {10.1063/1.4828704}, url = {https://aip.scitation.org/doi/10.1063/1.4828704}, @@ -12820,7 +14189,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {11}, number = {1}, pages = {892}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2041-1723}, doi = {10.1038/s41467-020-14660-y}, url = {https://www.nature.com/articles/s41467-020-14660-y}, @@ -12875,7 +14244,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {40}, number = {6}, pages = {3336--3355}, - publisher = {{The Royal Society of Chemistry}}, + publisher = {The Royal Society of Chemistry}, issn = {1460-4744}, doi = {10.1039/C1CS15047B}, url = {https://pubs.rsc.org/en/content/articlelanding/2011/cs/c1cs15047b}, @@ -12886,13 +14255,29 @@ Junqi Yin (Oak Ridge National Laboratory)}, file = {/Users/wasmer/Nextcloud/Zotero/Sanvito_2011_Molecular spintronics.pdf} } +@online{sasseLeakageMachineLearning2023, + title = {On {{Leakage}} in {{Machine Learning Pipelines}}}, + author = {Sasse, Leonard and Nicolaisen-Sobesky, Eliana and Dukart, Juergen and Eickhoff, Simon B. and Götz, Michael and Hamdan, Sami and Komeyer, Vera and Kulkarni, Abhijit and Lahnakoski, Juha and Love, Bradley C. and Raimondo, Federico and Patil, Kaustubh R.}, + date = {2023-11-07}, + eprint = {2311.04179}, + eprinttype = {arxiv}, + eprintclass = {cs}, + doi = {10.48550/arXiv.2311.04179}, + url = {http://arxiv.org/abs/2311.04179}, + urldate = {2024-01-14}, + abstract = {Machine learning (ML) provides powerful tools for predictive modeling. ML's popularity stems from the promise of sample-level prediction with applications across a variety of fields from physics and marketing to healthcare. However, if not properly implemented and evaluated, ML pipelines may contain leakage typically resulting in overoptimistic performance estimates and failure to generalize to new data. This can have severe negative financial and societal implications. Our aim is to expand understanding associated with causes leading to leakage when designing, implementing, and evaluating ML pipelines. Illustrated by concrete examples, we provide a comprehensive overview and discussion of various types of leakage that may arise in ML pipelines.}, + pubstate = {preprint}, + keywords = {/unread,best practices,data leakage,FZJ,General ML,ML,MLOps,workflows}, + file = {/Users/wasmer/Nextcloud/Zotero/Sasse et al_2023_On Leakage in Machine Learning Pipelines.pdf;/Users/wasmer/Zotero/storage/VLTE5EB5/2311.html} +} + @inproceedings{satorrasEquivariantGraphNeural2021, title = {E(n) {{Equivariant Graph Neural Networks}}}, booktitle = {Proceedings of the 38th {{International Conference}} on {{Machine Learning}}}, author = {Satorras, VıÌctor Garcia and Hoogeboom, Emiel and Welling, Max}, date = {2021-07-01}, pages = {9323--9332}, - publisher = {{PMLR}}, + publisher = {PMLR}, issn = {2640-3498}, url = {https://proceedings.mlr.press/v139/satorras21a.html}, urldate = {2022-03-29}, @@ -12944,7 +14329,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {13}, number = {1}, pages = {3733}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2041-1723}, doi = {10.1038/s41467-022-31093-x}, url = {https://www.nature.com/articles/s41467-022-31093-x}, @@ -13013,7 +14398,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {8}, number = {1}, pages = {1--11}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2057-3960}, doi = {10.1038/s41524-022-00791-x}, url = {https://www.nature.com/articles/s41524-022-00791-x}, @@ -13064,7 +14449,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {2}, number = {5}, pages = {331--341}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2662-8457}, doi = {10.1038/s43588-022-00228-x}, url = {https://www.nature.com/articles/s43588-022-00228-x}, @@ -13084,7 +14469,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {12}, number = {8}, pages = {1070}, - publisher = {{Multidisciplinary Digital Publishing Institute}}, + publisher = {Multidisciplinary Digital Publishing Institute}, issn = {2073-4352}, doi = {10.3390/cryst12081070}, url = {https://www.mdpi.com/2073-4352/12/8/1070}, @@ -13096,6 +14481,24 @@ Junqi Yin (Oak Ridge National Laboratory)}, file = {/Users/wasmer/Nextcloud/Zotero/Schlenz_Sandfeld_2022_Applications of Machine Learning to the Study of Crystalline Materials.pdf} } +@article{schmidhuberDeepLearningNeural2015, + title = {Deep Learning in Neural Networks: {{An}} Overview}, + shorttitle = {Deep Learning in Neural Networks}, + author = {Schmidhuber, Jürgen}, + date = {2015-01-01}, + journaltitle = {Neural Networks}, + shortjournal = {Neural Networks}, + volume = {61}, + pages = {85--117}, + issn = {0893-6080}, + doi = {10.1016/j.neunet.2014.09.003}, + url = {https://www.sciencedirect.com/science/article/pii/S0893608014002135}, + urldate = {2023-12-21}, + abstract = {In recent years, deep artificial neural networks (including recurrent ones) have won numerous contests in pattern recognition and machine learning. This historical survey compactly summarizes relevant work, much of it from the previous millennium. Shallow and Deep Learners are distinguished by the depth of their credit assignment paths, which are chains of possibly learnable, causal links between actions and effects. I review deep supervised learning (also recapitulating the history of backpropagation), unsupervised learning, reinforcement learning \& evolutionary computation, and indirect search for short programs encoding deep and large networks.}, + keywords = {best paper award,General ML,history of AI,history of science,ML,reinforcement-learning,review,review-of-DL,Supervised learning,unsupervised learning}, + file = {/Users/wasmer/Nextcloud/Zotero/Schmidhuber_2015_Deep learning in neural networks.pdf;/Users/wasmer/Zotero/storage/FHRQM782/S0893608014002135.html} +} + @article{schmidtCrystalGraphAttention2021, title = {Crystal Graph Attention Networks for the Prediction of Stable Materials}, author = {Schmidt, Jonathan and Pettersson, Love and Verdozzi, Claudio and Botti, Silvana and Marques, Miguel A. L.}, @@ -13104,7 +14507,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {7}, number = {49}, pages = {eabi7948}, - publisher = {{American Association for the Advancement of Science}}, + publisher = {American Association for the Advancement of Science}, doi = {10.1126/sciadv.abi7948}, url = {https://www.science.org/doi/10.1126/sciadv.abi7948}, urldate = {2023-04-04}, @@ -13156,7 +14559,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {10}, number = {20}, pages = {6425--6431}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, doi = {10.1021/acs.jpclett.9b02422}, url = {https://doi.org/10.1021/acs.jpclett.9b02422}, urldate = {2022-07-05}, @@ -13174,7 +14577,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {5}, number = {1}, pages = {1--36}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2057-3960}, doi = {10.1038/s41524-019-0221-0}, url = {https://www.nature.com/articles/s41524-019-0221-0}, @@ -13182,7 +14585,12 @@ Junqi Yin (Oak Ridge National Laboratory)}, abstract = {One of the most exciting tools that have entered the material science toolbox in recent years is machine learning. This collection of statistical methods has already proved to be capable of considerably speeding up both fundamental and applied research. At present, we are witnessing an explosion of works that develop and apply machine learning to solid-state systems. We provide a comprehensive overview and analysis of the most recent research in this topic. As a starting point, we introduce machine learning principles, algorithms, descriptors, and databases in materials science. We continue with the description of different machine learning approaches for the discovery of stable materials and the prediction of their crystal structure. Then we discuss research in numerous quantitative structure–property relationships and various approaches for the replacement of first-principle methods by machine learning. We review how active learning and surrogate-based optimization can be applied to improve the rational design process and related examples of applications. Two major questions are always the interpretability of and the physical understanding gained from machine learning models. We consider therefore the different facets of interpretability and their importance in materials science. Finally, we propose solutions and future research paths for various challenges in computational materials science.}, issue = {1}, langid = {english}, - annotation = {Bandiera\_abtest: a Cc\_license\_type: cc\_by Cg\_type: Nature Research Journals Primary\_atype: Reviews Subject\_term: Condensed-matter physics;Electronic structure;Materials science;Metals and alloys;Semiconductors Subject\_term\_id: condensed-matter-physics;electronic-structure;materials-science;metals-and-alloys;semiconductors}, + annotation = {Bandiera\_abtest: a\\ +Cc\_license\_type: cc\_by\\ +Cg\_type: Nature Research Journals\\ +Primary\_atype: Reviews\\ +Subject\_term: Condensed-matter physics;Electronic structure;Materials science;Metals and alloys;Semiconductors\\ +Subject\_term\_id: condensed-matter-physics;electronic-structure;materials-science;metals-and-alloys;semiconductors}, file = {/Users/wasmer/Nextcloud/Zotero/Schmidt et al_2019_Recent advances and applications of machine learning in solid-state materials.pdf;/Users/wasmer/Zotero/storage/BY9RESIZ/s41524-019-0221-0.html} } @@ -13195,7 +14603,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {22}, number = {7}, pages = {2595--2602}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, issn = {1530-6984}, doi = {10.1021/acs.nanolett.1c04055}, url = {https://doi.org/10.1021/acs.nanolett.1c04055}, @@ -13205,6 +14613,25 @@ Junqi Yin (Oak Ridge National Laboratory)}, file = {/Users/wasmer/Nextcloud/Zotero/Schmitt et al_2022_Integration of Topological Insulator Josephson Junctions in Superconducting.pdf;/Users/wasmer/Zotero/storage/ZVNVRDSF/acs.nanolett.html} } +@article{scholzWritingPublishingScientific2022, + title = {Writing and Publishing a Scientific Paper}, + author = {Scholz, Fritz}, + date = {2022-01-11}, + journaltitle = {ChemTexts}, + shortjournal = {ChemTexts}, + volume = {8}, + number = {1}, + pages = {8}, + issn = {2199-3793}, + doi = {10.1007/s40828-022-00160-7}, + url = {https://doi.org/10.1007/s40828-022-00160-7}, + urldate = {2023-12-07}, + abstract = {This text is designed to give the reader a helping hand in writing a scientific paper. It provides generic advice on ways that a scientific paper can be improved. The focus is on the following ethical and non-technical issues: (1) when to start writing, and in what language; (2) how to choose a good title; (3) what should be included in the various sections (abstract, introduction, experimental, results, discussion, conclusions, and supporting information (supplementary material); (4) who should be considered as a co-author, and who should be acknowledged for help; (5) which journal should be chosen; and (6) how to respond to reviewers’ comments. Purely technical issues, such as grammar, artwork, reference styles, etc., are not considered.}, + langid = {english}, + keywords = {/unread,\_tablet,advice,best practices,educational,publishing,scientific journals,scientific writing,working in science,writing}, + file = {/Users/wasmer/Nextcloud/Zotero/Scholz_2022_Writing and publishing a scientific paper.pdf} +} + @article{schuchComputationalComplexityInteracting2009, title = {Computational Complexity of Interacting Electrons and Fundamental Limitations of Density Functional Theory}, author = {Schuch, Norbert and Verstraete, Frank}, @@ -13214,7 +14641,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {5}, number = {10}, pages = {732--735}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {1745-2481}, doi = {10.1038/nphys1370}, url = {https://www.nature.com/articles/nphys1370}, @@ -13232,8 +14659,8 @@ Junqi Yin (Oak Ridge National Laboratory)}, date = {2020}, series = {Lecture {{Notes}} in {{Physics}}}, volume = {968}, - publisher = {{Springer International Publishing}}, - location = {{Cham}}, + publisher = {Springer International Publishing}, + location = {Cham}, doi = {10.1007/978-3-030-40245-7}, url = {http://link.springer.com/10.1007/978-3-030-40245-7}, urldate = {2021-05-13}, @@ -13252,7 +14679,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {148}, number = {24}, pages = {241722}, - publisher = {{American Institute of Physics}}, + publisher = {American Institute of Physics}, issn = {0021-9606}, doi = {10.1063/1.5019779}, url = {https://aip.scitation.org/doi/full/10.1063/1.5019779}, @@ -13288,7 +14715,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {10}, number = {1}, pages = {5024}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2041-1723}, doi = {10.1038/s41467-019-12875-2}, url = {https://www.nature.com/articles/s41467-019-12875-2}, @@ -13309,7 +14736,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {34}, number = {9}, pages = {11395--11407}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, issn = {0887-0624}, doi = {10.1021/acs.energyfuels.0c01533}, url = {https://doi.org/10.1021/acs.energyfuels.0c01533}, @@ -13324,7 +14751,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, author = {Sculley, D. and Holt, Gary and Golovin, Daniel and Davydov, Eugene and Phillips, Todd and Ebner, Dietmar and Chaudhary, Vinay and Young, Michael and Crespo, Jean-François and Dennison, Dan}, date = {2015}, volume = {28}, - publisher = {{Curran Associates, Inc.}}, + publisher = {Curran Associates, Inc.}, url = {https://papers.nips.cc/paper_files/paper/2015/hash/86df7dcfd896fcaf2674f757a2463eba-Abstract.html}, urldate = {2023-09-01}, abstract = {Machine learning offers a fantastically powerful toolkit for building useful complexprediction systems quickly. This paper argues it is dangerous to think ofthese quick wins as coming for free. Using the software engineering frameworkof technical debt, we find it is common to incur massive ongoing maintenancecosts in real-world ML systems. We explore several ML-specific risk factors toaccount for in system design. These include boundary erosion, entanglement,hidden feedback loops, undeclared consumers, data dependencies, configurationissues, changes in the external world, and a variety of system-level anti-patterns.}, @@ -13387,7 +14814,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, title = {Uncertainty {{Quantification}} of {{Machine Learned Density Functionals}}}, author = {Shah, Karan}, date = {2018-05}, - institution = {{Georgia Institute of Technology}}, + institution = {Georgia Institute of Technology}, url = {http://hdl.handle.net/1853/61364}, urldate = {2023-10-06}, abstract = {Density Functional Theory(DFT) is one of the most popular and successful methods for quantum mechanical simulations of matter because of its relatively lower computational costs. While it is formally exact, approximations of eXchange Correlation(XC) functionals have to be made. These calculations are highly time consuming and scale poorly with system size. The prospect of combining computer vision and deep learning is a fundamentally new approach to designing these XC functionals. This approach combines the intuitive power of physical insight with the flexibility of machine learning and high-quality training data in order to develop new routes to approximating exchange-correlation energies. A parameterized function is first fit on the data and the resulting residuals are used for bootstrap aggregating via an ensemble of neural networks. This two-stage method provides robust uncertainty quantification on the predicted XC energies and can be automated for many systems without significant manual intervention.}, @@ -13396,6 +14823,26 @@ Junqi Yin (Oak Ridge National Laboratory)}, file = {/Users/wasmer/Nextcloud/Zotero/Shah_2018_Uncertainty Quantification of Machine Learned Density Functionals.pdf} } +@article{shaoMachineLearningElectronic2023, + title = {Machine Learning Electronic Structure Methods Based on the One-Electron Reduced Density Matrix}, + author = {Shao, Xuecheng and Paetow, Lukas and Tuckerman, Mark E. and Pavanello, Michele}, + date = {2023-10-07}, + journaltitle = {Nature Communications}, + shortjournal = {Nat Commun}, + volume = {14}, + number = {1}, + pages = {6281}, + publisher = {Nature Publishing Group}, + issn = {2041-1723}, + doi = {10.1038/s41467-023-41953-9}, + url = {https://www.nature.com/articles/s41467-023-41953-9}, + urldate = {2024-03-08}, + abstract = {The theorems of density functional theory (DFT) establish bijective maps between the local external potential of a many-body system and its electron density, wavefunction and, therefore, one-particle reduced density matrix. Building on this foundation, we show that machine learning models based on the one-electron reduced density matrix can be used to generate surrogate electronic structure methods. We generate surrogates of local and hybrid DFT, Hartree-Fock and full configuration interaction theories for systems ranging from small molecules such as water to more complex compounds like benzene and propanol. The surrogate models use the one-electron reduced density matrix as the central quantity to be learned. From the predicted density matrices, we show that either standard quantum chemistry or a second machine-learning model can be used to compute molecular observables, energies, and atomic forces. The surrogate models can generate essentially anything that a standard electronic structure method can, ranging from band gaps and Kohn-Sham orbitals to energy-conserving ab-initio molecular dynamics simulations and infrared spectra, which account for anharmonicity and thermal effects, without the need to employ computationally expensive algorithms such as self-consistent field theory. The algorithms are packaged in an efficient and easy to use Python code, QMLearn, accessible on popular platforms.}, + langid = {english}, + keywords = {\_tablet,AML,ase,B3LYP,configuration interaction,density matrix,DFT,DFT speedup,DFT speedup with ML,GTO basis,HFT,KRR,LDA,library,MD,ML,ML-DFT,ML-ESM,ML-WFT,molecules,multi-step model,prediction from potential,prediction of density matrix,prediction of electron density,prediction of energy,prediction of forces,PySCF,RDMFT,SCF,surrogate model,WFT,with-code,with-data}, + file = {/Users/wasmer/Nextcloud/Zotero/Shao et al_2023_Machine learning electronic structure methods based on the one-electron reduced.pdf} +} + @article{shapeevAccurateRepresentationFormation2017, title = {Accurate Representation of Formation Energies of Crystalline Alloys with Many Components}, author = {Shapeev, A.}, @@ -13487,7 +14934,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, date = {2022-09-19}, journaltitle = {Journal of Chemical Theory and Computation}, shortjournal = {J. Chem. Theory Comput.}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, issn = {1549-9618}, doi = {10.1021/acs.jctc.2c00555}, url = {https://doi.org/10.1021/acs.jctc.2c00555}, @@ -13497,6 +14944,59 @@ Junqi Yin (Oak Ridge National Laboratory)}, file = {/Users/wasmer/Nextcloud/Zotero/Shmilovich et al_2022_Orbital Mixer.pdf} } +@online{shoghiMoleculesMaterialsPretraining2024, + title = {From {{Molecules}} to {{Materials}}: {{Pre-training Large Generalizable Models}} for {{Atomic Property Prediction}}}, + shorttitle = {From {{Molecules}} to {{Materials}}}, + author = {Shoghi, Nima and Kolluru, Adeesh and Kitchin, John R. and Ulissi, Zachary W. and Zitnick, C. Lawrence and Wood, Brandon M.}, + date = {2024-05-06}, + eprint = {2310.16802}, + eprinttype = {arxiv}, + eprintclass = {cs}, + doi = {10.48550/arXiv.2310.16802}, + url = {http://arxiv.org/abs/2310.16802}, + urldate = {2024-05-07}, + abstract = {Foundation models have been transformational in machine learning fields such as natural language processing and computer vision. Similar success in atomic property prediction has been limited due to the challenges of training effective models across multiple chemical domains. To address this, we introduce Joint Multi-domain Pre-training (JMP), a supervised pre-training strategy that simultaneously trains on multiple datasets from different chemical domains, treating each dataset as a unique pre-training task within a multi-task framework. Our combined training dataset consists of \$\textbackslash sim\$120M systems from OC20, OC22, ANI-1x, and Transition-1x. We evaluate performance and generalization by fine-tuning over a diverse set of downstream tasks and datasets including: QM9, rMD17, MatBench, QMOF, SPICE, and MD22. JMP demonstrates an average improvement of 59\% over training from scratch, and matches or sets state-of-the-art on 34 out of 40 tasks. Our work highlights the potential of pre-training strategies that utilize diverse data to advance property prediction across chemical domains, especially for low-data tasks. Please visit https://nima.sh/jmp for further information.}, + pubstate = {preprint}, + keywords = {Allegro,AML,ANI1-x,benchmarking,fine-tuning,foundation models,JMP,MACE,MatBench,MD17,Meta Research,ML,MLP,MODNet,multi-domain,multi-task learning,OC20,Open Catalyst,pretrained models,QM9,sGDML,SPICE dataset,universal potential}, + file = {/Users/wasmer/Nextcloud/Zotero/Shoghi et al_2024_From Molecules to Materials.pdf;/Users/wasmer/Zotero/storage/XXHLMYBL/2310.html} +} + +@online{simeonInclusionChargeSpin2024, + title = {On the {{Inclusion}} of {{Charge}} and {{Spin States}} in {{Cartesian Tensor Neural Network Potentials}}}, + author = {Simeon, Guillem and Mirarchi, Antonio and Pelaez, Raul P. and Galvelis, Raimondas and De Fabritiis, Gianni}, + date = {2024-03-22}, + eprint = {2403.15073}, + eprinttype = {arxiv}, + eprintclass = {physics}, + doi = {10.48550/arXiv.2403.15073}, + url = {http://arxiv.org/abs/2403.15073}, + urldate = {2024-03-31}, + abstract = {In this letter, we present an extension to TensorNet, a state-of-the-art equivariant Cartesian tensor neural network potential, allowing it to handle charged molecules and spin states without architectural changes or increased costs. By incorporating these attributes, we address input degeneracy issues, enhancing the model's predictive accuracy across diverse chemical systems. This advancement significantly broadens TensorNet's applicability, maintaining its efficiency and accuracy.}, + pubstate = {preprint}, + keywords = {/unread,AML,equivariant,ML,MLP,molecules,prediction of charge,prediction of energy,prediction of spin state,QMSpin,SPICE dataset,spin,spin-dependent,TensorNet,TorchMDNet}, + file = {/Users/wasmer/Nextcloud/Zotero/Simeon et al_2024_On the Inclusion of Charge and Spin States in Cartesian Tensor Neural Network.pdf;/Users/wasmer/Zotero/storage/U8FJBM8P/2403.html} +} + +@article{simImprovingResultsImproving2022, + title = {Improving {{Results}} by {{Improving Densities}}: {{Density-Corrected Density Functional Theory}}}, + shorttitle = {Improving {{Results}} by {{Improving Densities}}}, + author = {Sim, Eunji and Song, Suhwan and Vuckovic, Stefan and Burke, Kieron}, + date = {2022-04-20}, + journaltitle = {Journal of the American Chemical Society}, + shortjournal = {J. Am. Chem. Soc.}, + volume = {144}, + number = {15}, + pages = {6625--6639}, + publisher = {American Chemical Society}, + issn = {0002-7863}, + doi = {10.1021/jacs.1c11506}, + url = {https://doi.org/10.1021/jacs.1c11506}, + urldate = {2024-01-01}, + abstract = {Density functional theory (DFT) calculations have become widespread in both chemistry and materials, because they usually provide useful accuracy at much lower computational cost than wavefunction-based methods. All practical DFT calculations require an approximation to the unknown exchange-correlation energy, which is then used self-consistently in the Kohn–Sham scheme to produce an approximate energy from an approximate density. Density-corrected DFT is simply the study of the relative contributions to the total energy error. In the vast majority of DFT calculations, the error due to the approximate density is negligible. But with certain classes of functionals applied to certain classes of problems, the density error is sufficiently large as to contribute to the energy noticeably, and its removal leads to much better results. These problems include reaction barriers, torsional barriers involving Ï€-conjugation, halogen bonds, radicals and anions, most stretched bonds, etc. In all such cases, use of a more accurate density significantly improves performance, and often the simple expedient of using the Hartree–Fock density is enough. This Perspective explains what DC-DFT is, where it is likely to improve results, and how DC-DFT can produce more accurate functionals. We also outline challenges and prospects for the field.}, + keywords = {charge density,DFT,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Sim et al_2022_Improving Results by Improving Densities.pdf} +} + @article{simmhanSurveyDataProvenance2005, title = {A Survey of Data Provenance in E-Science}, author = {Simmhan, Yogesh L. and Plale, Beth and Gannon, Dennis}, @@ -13519,7 +15019,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, author = {Singh, David J. and Nordström, Lars}, date = {2006}, edition = {2}, - publisher = {{Springer US}}, + publisher = {Springer US}, doi = {10.1007/978-0-387-29684-5}, url = {http://link.springer.com/10.1007/978-0-387-29684-5}, urldate = {2023-10-01}, @@ -13559,7 +15059,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {44}, number = {16}, pages = {8578--8583}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevB.44.8578}, url = {https://link.aps.org/doi/10.1103/PhysRevB.44.8578}, urldate = {2023-09-19}, @@ -13577,7 +15077,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {15}, number = {5}, pages = {3075--3092}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, issn = {1549-9618}, doi = {10.1021/acs.jctc.8b01092}, url = {https://doi.org/10.1021/acs.jctc.8b01092}, @@ -13612,7 +15112,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {6}, number = {1}, pages = {1--8}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2057-3960}, doi = {10.1038/s41524-020-00367-7}, url = {https://www.nature.com/articles/s41524-020-00367-7}, @@ -13633,7 +15133,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {3}, number = {1}, pages = {L012002}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevResearch.3.L012002}, url = {https://link.aps.org/doi/10.1103/PhysRevResearch.3.L012002}, urldate = {2022-10-17}, @@ -13649,8 +15149,8 @@ Junqi Yin (Oak Ridge National Laboratory)}, author = {Smith, Micah J. and Sala, Carles and Kanter, James Max and Veeramachaneni, Kalyan}, date = {2020-06-11}, pages = {785--800}, - publisher = {{ACM}}, - location = {{Portland OR USA}}, + publisher = {ACM}, + location = {Portland OR USA}, doi = {10.1145/3318464.3386146}, url = {https://dl.acm.org/doi/10.1145/3318464.3386146}, urldate = {2021-10-08}, @@ -13669,7 +15169,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {108}, number = {25}, pages = {253002}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevLett.108.253002}, url = {https://link.aps.org/doi/10.1103/PhysRevLett.108.253002}, urldate = {2021-10-15}, @@ -13685,7 +15185,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, journaltitle = {Data Science}, volume = {Preprint}, pages = {1--42}, - publisher = {{IOS Press}}, + publisher = {IOS Press}, issn = {2451-8484}, doi = {10.3233/DS-210053}, url = {https://content.iospress.com/articles/data-science/ds210053}, @@ -13697,6 +15197,27 @@ Junqi Yin (Oak Ridge National Laboratory)}, file = {/Users/wasmer/Nextcloud/Zotero/Soiland-Reyes et al_2022_Packaging research artefacts with RO-Crate.pdf;/Users/wasmer/Zotero/storage/X2IWHLC7/ds210053.html} } +@article{sommer3DSCDatasetSuperconductors2023, + title = {{{3DSC}} - a Dataset of Superconductors Including Crystal Structures}, + author = {Sommer, Timo and Willa, Roland and Schmalian, Jörg and Friederich, Pascal}, + date = {2023-11-21}, + journaltitle = {Scientific Data}, + shortjournal = {Sci Data}, + volume = {10}, + number = {1}, + pages = {816}, + publisher = {Nature Publishing Group}, + issn = {2052-4463}, + doi = {10.1038/s41597-023-02721-y}, + url = {https://www.nature.com/articles/s41597-023-02721-y}, + urldate = {2024-02-05}, + abstract = {Data-driven methods, in particular machine learning, can help to speed up the discovery of new materials by finding hidden patterns in existing data and using them to identify promising candidate materials. In the case of superconductors, the use of data science tools is to date slowed down by a lack of accessible data. In this work, we present a new and publicly available superconductivity dataset (‘3DSC’), featuring the critical temperature TC of superconducting materials additionally to tested non-superconductors. In contrast to existing databases such as the SuperCon database which contains information on the chemical composition, the 3DSC is augmented by approximate three-dimensional crystal structures. We perform a statistical analysis and machine learning experiments to show that access to this structural information improves the prediction of the critical temperature TC of materials. Furthermore, we provide ideas and directions for further research to improve the 3DSC. We are confident that this database will be useful in applying state-of-the-art machine learning methods to eventually find new superconductors.}, + issue = {1}, + langid = {english}, + keywords = {/unread,AML,Database,descriptors,disordered,disordered SOAP,ensemble learning,magpie,ML,SOAP,superconductor,with-code,with-data}, + file = {/Users/wasmer/Nextcloud/Zotero/Sommer et al_2023_3DSC - a dataset of superconductors including crystal structures.pdf} +} + @online{sommer3DSCNewDataset2022, title = {{{3DSC}} - {{A New Dataset}} of {{Superconductors Including Crystal Structures}}}, author = {Sommer, Timo and Willa, Roland and Schmalian, Jörg and Friederich, Pascal}, @@ -13709,7 +15230,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, urldate = {2023-02-15}, abstract = {Data-driven methods, in particular machine learning, can help to speed up the discovery of new materials by finding hidden patterns in existing data and using them to identify promising candidate materials. In the case of superconductors, which are a highly interesting but also a complex class of materials with many relevant applications, the use of data science tools is to date slowed down by a lack of accessible data. In this work, we present a new and publicly available superconductivity dataset ('3DSC'), featuring the critical temperature \$T\_\textbackslash mathrm\{c\}\$ of superconducting materials additionally to tested non-superconductors. In contrast to existing databases such as the SuperCon database which contains information on the chemical composition, the 3DSC is augmented by the approximate three-dimensional crystal structure of each material. We perform a statistical analysis and machine learning experiments to show that access to this structural information improves the prediction of the critical temperature \$T\_\textbackslash mathrm\{c\}\$ of materials. Furthermore, we see the 3DSC not as a finished dataset, but we provide ideas and directions for further research to improve the 3DSC in multiple ways. We are confident that this database will be useful in applying state-of-the-art machine learning methods to eventually find new superconductors.}, pubstate = {preprint}, - keywords = {Database,disordered,disordered SOAP,ensemble learning,magpie,SOAP,superconductor,with-code}, + keywords = {AML,Database,descriptors,disordered,disordered SOAP,ensemble learning,magpie,ML,SOAP,superconductor,with-code,with-data}, file = {/Users/wasmer/Nextcloud/Zotero/Sommer et al_2022_3DSC - A New Dataset of Superconductors Including Crystal Structures.pdf;/Users/wasmer/Zotero/storage/JMMVYJCI/2212.html} } @@ -13730,6 +15251,43 @@ Junqi Yin (Oak Ridge National Laboratory)}, file = {/Users/wasmer/Nextcloud/Zotero/Song et al_2023_DeepSpeed4Science Initiative.pdf;/Users/wasmer/Zotero/storage/9XE4R4E2/2310.html} } +@article{songDensityCorrectedDFTExplained2022, + title = {Density-{{Corrected DFT Explained}}: {{Questions}} and {{Answers}}}, + shorttitle = {Density-{{Corrected DFT Explained}}}, + author = {Song, Suhwan and Vuckovic, Stefan and Sim, Eunji and Burke, Kieron}, + date = {2022-02-08}, + journaltitle = {Journal of Chemical Theory and Computation}, + shortjournal = {J. Chem. Theory Comput.}, + volume = {18}, + number = {2}, + pages = {817--827}, + publisher = {American Chemical Society}, + issn = {1549-9618}, + doi = {10.1021/acs.jctc.1c01045}, + url = {https://doi.org/10.1021/acs.jctc.1c01045}, + urldate = {2024-01-01}, + abstract = {HF-DFT, the practice of evaluating approximate density functionals on Hartree–Fock densities, has long been used in testing density functional approximations. Density-corrected DFT (DC-DFT) is a general theoretical framework for identifying failures of density functional approximations by separating errors in a functional from errors in its self-consistent (SC) density. Most modern DFT calculations yield highly accurate densities, but important characteristic classes of calculation have large density-driven errors, including reaction barrier heights, electron affinities, radicals and anions in solution, dissociation of heterodimers, and even some torsional barriers. Here, the HF density (if not spin-contaminated) usually yields more accurate and consistent energies than those of the SC density. We use the term DC(HF)-DFT to indicate DC-DFT using HF densities only in such cases. A recent comprehensive study (J. Chem. Theory Comput. 2021, 17, 1368–1379) of HF-DFT led to many unfavorable conclusions. A reanalysis using DC-DFT shows that DC(HF)-DFT substantially improves DFT results precisely when SC densities are flawed.}, + keywords = {/unread,charge density,DFT,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Song et al_2022_Density-Corrected DFT Explained.pdf} +} + +@online{songOmniPredLanguageModels2024, + title = {{{OmniPred}}: {{Language Models}} as {{Universal Regressors}}}, + shorttitle = {{{OmniPred}}}, + author = {Song, Xingyou and Li, Oscar and Lee, Chansoo and Yang, Bangding and Peng, Daiyi and Perel, Sagi and Chen, Yutian}, + date = {2024-03-04}, + eprint = {2402.14547}, + eprinttype = {arxiv}, + eprintclass = {cs}, + doi = {10.48550/arXiv.2402.14547}, + url = {http://arxiv.org/abs/2402.14547}, + urldate = {2024-03-31}, + abstract = {Over the broad landscape of experimental design, regression has been a powerful tool to accurately predict the outcome metrics of a system or model given a set of parameters, but has been traditionally restricted to methods which are only applicable to a specific task. In this paper, we propose OmniPred, a framework for training language models as universal end-to-end regressors over \$(x,y)\$ evaluation data from diverse real world experiments. Using data sourced from Google Vizier, one of the largest blackbox optimization databases in the world, our extensive experiments demonstrate that through only textual representations of mathematical parameters and values, language models are capable of very precise numerical regression, and if given the opportunity to train over multiple tasks, can significantly outperform traditional regression models.}, + pubstate = {preprint}, + keywords = {/unread,alternative approaches,alternative for equivariance,alternative to GNN,DeepMind,GNN,Google,language models,LLM,model comparison,regression,transformer,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Song et al_2024_OmniPred.pdf;/Users/wasmer/Zotero/storage/5KU8ET6U/2402.html} +} + @inproceedings{souzaProvenanceDataMachine2019, title = {Provenance {{Data}} in the {{Machine Learning Lifecycle}} in {{Computational Science}} and {{Engineering}}}, booktitle = {2019 {{IEEE}}/{{ACM Workflows}} in {{Support}} of {{Large-Scale Science}} ({{WORKS}})}, @@ -13752,7 +15310,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {156}, number = {3}, pages = {809--813}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRev.156.809}, url = {https://link.aps.org/doi/10.1103/PhysRev.156.809}, urldate = {2023-09-19}, @@ -13767,8 +15325,8 @@ Junqi Yin (Oak Ridge National Laboratory)}, author = {Spaldin, Nicola A.}, date = {2010}, edition = {2}, - publisher = {{Cambridge University Press}}, - location = {{Cambridge}}, + publisher = {Cambridge University Press}, + location = {Cambridge}, doi = {10.1017/CBO9780511781599}, url = {https://www.cambridge.org/core/books/magnetic-materials/4C8C2C5DF32C9E8D528E1E8D26381C1F}, urldate = {2022-08-30}, @@ -13817,7 +15375,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, journaltitle = {Frontiers in Materials}, shortjournal = {Front. Mater.}, volume = {5}, - publisher = {{Frontiers}}, + publisher = {Frontiers}, issn = {2296-8016}, doi = {10.3389/fmats.2018.00070}, url = {https://www.frontiersin.org/articles/10.3389/fmats.2018.00070/full#h3}, @@ -13838,7 +15396,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {3}, number = {1}, pages = {015032}, - publisher = {{IOP Publishing}}, + publisher = {IOP Publishing}, issn = {2632-2153}, doi = {10.1088/2632-2153/ac568d}, url = {https://doi.org/10.1088/2632-2153/ac568d}, @@ -13849,12 +15407,30 @@ Junqi Yin (Oak Ridge National Laboratory)}, file = {/Users/wasmer/Nextcloud/Zotero/Staacke et al_2022_Kernel charge equilibration.pdf} } +@article{stanleyDarkSideGenerosity2023, + title = {The Dark Side of Generosity: {{Employees}} with a Reputation for Giving Are Selectively Targeted for Exploitation}, + shorttitle = {The Dark Side of Generosity}, + author = {Stanley, Matthew L. and Neck, Christopher P. and Neck, Christopher B.}, + date = {2023-09-01}, + journaltitle = {Journal of Experimental Social Psychology}, + shortjournal = {Journal of Experimental Social Psychology}, + volume = {108}, + pages = {104503}, + issn = {0022-1031}, + doi = {10.1016/j.jesp.2023.104503}, + url = {https://www.sciencedirect.com/science/article/pii/S0022103123000604}, + urldate = {2024-01-13}, + abstract = {People endorse generosity as a moral virtue worth exemplifying, and those who acquire reputations for generosity are admired and publicly celebrated. In an organizational context, hiring, retaining, and promoting generous employees can make organizations more appealing to customers, suppliers, and top talent. However, using complementary methods and experimental designs with large samples of full-time managers, we find consistent evidence that managers are inclined to take unfair advantage of employees with reputations for generosity, selectively targeting them for exploitation in ways that likely, and ironically, hamper long-term organizational success. This selective targeting of generous employees for exploitation was statistically explained by a problematic assumption: Since they have reputations for generosity, managers assume that, if they had the opportunity, they would have freely volunteered for their own exploitation. We also investigate a possible solution to the targeting of more generous employees for exploitative practices. Merely asking managers to make a judgment about the ethics of an exploitative request eliminates their propensity to target generous employees over other employees for exploitation.}, + keywords = {/unread,ethics,management,psychology,working in science}, + file = {/Users/wasmer/Zotero/storage/P8YS37PB/S0022103123000604.html} +} + @unpublished{steinbachReproducibilityDataScience2022, type = {presentation}, title = {Reproducibility in {{Data Science}} and {{Machine Learning}}}, author = {Steinbach, Peter}, date = {2022-06-09}, - publisher = {{figshare}}, + publisher = {figshare}, doi = {10.6084/m9.figshare.20036651.v1}, url = {https://figshare.com/articles/presentation/Reproducibility_in_Data_Science_and_Machine_Learning/20036651/1}, urldate = {2022-06-09}, @@ -13873,7 +15449,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, urldate = {2023-06-16}, abstract = {In the theoretical and experimental studies of topological states of matter, including the quantum Hall effect; topological insulators, superconductors, and semimetals; twisted bilayer graphene. The course covers advanced theoretical and experimental methods}, langid = {american}, - organization = {{קמפוס IL}}, + organization = {קמפוס IL}, keywords = {/unread}, file = {/Users/wasmer/Zotero/storage/PQ5256JG/weizmann-acd-quantum-topologicalstatesofmatter-en.html} } @@ -13882,7 +15458,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, title = {{{AI}} for {{Science Report}} 2020}, author = {Stevens, Rick and Taylor, Valerie and Nichols, Jeff and Maccabe, Arthur B. and Yelick, Katherine and Brown, David}, date = {2020}, - institution = {{DOE Office of Science}}, + institution = {DOE Office of Science}, url = {https://www.anl.gov/cels/reference/ai-for-science-report-2020}, urldate = {2023-06-28}, abstract = {Argonne, Oak Ridge, and Berkeley national laboratories hosted four AI for Science town halls attended by more than a thousand scientists and engineers from the U.S. Department of Energy (DOE) national laboratories. The goal of the town hall series was to examine scientific opportunities in the areas of artificial intelligence (AI), big data, and high-performance computing (HPC) in the next decade, and to capture the big ideas, grand challenges, and next steps to realizing these opportunities. Sixteen topical expert teams summarized the state of the art, outlined challenges, developed an AI roadmap for the coming decade, and explored opportunities for accelerating progress on that roadmap. Following the town halls, an AI for Science Report was compiled, which captures and highlights the important themes that emerged for AI applications in science and outlines the research and infrastructure needed to advance AI methods and techniques for science applications.}, @@ -13901,7 +15477,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {41}, number = {5}, pages = {339--343}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevLett.41.339}, url = {https://link.aps.org/doi/10.1103/PhysRevLett.41.339}, urldate = {2023-09-19}, @@ -13909,6 +15485,42 @@ Junqi Yin (Oak Ridge National Laboratory)}, file = {/Users/wasmer/Nextcloud/Zotero/Stocks et al_1978_Complete Solution of the Korringa-Kohn-Rostoker.pdf;/Users/wasmer/Zotero/storage/8KJXURZT/PhysRevLett.41.html} } +@article{suEfficientDeterminationHamiltonian2023, + title = {Efficient Determination of the {{Hamiltonian}} and Electronic Properties Using Graph Neural Network with Complete Local Coordinates}, + author = {Su, Mao and Yang, Ji-Hui and Xiang, Hongjun and Gong, Xin-Gao}, + date = {2023-07}, + journaltitle = {Machine Learning: Science and Technology}, + shortjournal = {Mach. Learn.: Sci. Technol.}, + volume = {4}, + number = {3}, + pages = {035010}, + publisher = {IOP Publishing}, + issn = {2632-2153}, + doi = {10.1088/2632-2153/accb26}, + url = {https://dx.doi.org/10.1088/2632-2153/accb26}, + urldate = {2024-04-18}, + abstract = {Despite the successes of machine learning methods in physical sciences, the prediction of the Hamiltonian, and thus the electronic properties, is still unsatisfactory. Based on graph neural network (NN) architecture, we present an extendable NN model to determine the Hamiltonian from ab initio data, with only local atomic structures as inputs. The rotational equivariance of the Hamiltonian is achieved by our complete local coordinates (LCs). The LC information, encoded using a convolutional NN and designed to preserve Hermitian symmetry, is used to map hopping parameters onto local structures. We demonstrate the performance of our model using graphene and SiGe random alloys as examples. We show that our NN model, although trained using small-size systems, can predict the Hamiltonian, as well as electronic properties such as band structures and densities of states for large-size systems within the ab initio accuracy, justifying its extensibility. In combination with the high efficiency of our model, which takes only seconds to get the Hamiltonian of a 1728-atom system, the present work provides a general framework to predict electronic properties efficiently and accurately, which provides new insights into computational physics and will accelerate the research for large-scale materials.}, + langid = {english}, + keywords = {\_tablet,AML,attention,CNN,convolution,equivariant,GNN,ML,ML-DFT,ML-ESM,prediction of bandstructure,prediction of DOS,prediction of Hamiltonian matrix,ResNet,skip connection}, + file = {/Users/wasmer/Nextcloud/Zotero/Su et al_2023_Efficient determination of the Hamiltonian and electronic properties using.pdf} +} + +@article{sunshineChemicalPropertiesGraph2023, + title = {Chemical {{Properties}} from {{Graph Neural Network-Predicted Electron Densities}}}, + author = {Sunshine, Ethan M. and Shuaibi, Muhammed and Ulissi, Zachary W. and Kitchin, John R.}, + date = {2023-11-27}, + journaltitle = {The Journal of Physical Chemistry C}, + shortjournal = {J. Phys. Chem. C}, + publisher = {American Chemical Society}, + issn = {1932-7447}, + doi = {10.1021/acs.jpcc.3c06157}, + url = {https://doi.org/10.1021/acs.jpcc.3c06157}, + urldate = {2023-11-30}, + abstract = {According to density functional theory, any chemical property can be inferred from the electron density, making it the most informative attribute of an atomic structure. In this work, we demonstrate the use of established physical methods to obtain important chemical properties from model-predicted electron densities. We introduce graph neural network architectural choices that provide physically relevant and useful electron density predictions. Despite not being trained to predict atomic charges, the model is able to predict atomic charges with an error of an order of magnitude lower than that of a sum of atomic charge densities. Similarly, the model predicts dipole moments with half the error of the sum of the atomic charge densities method. We demonstrate that larger data sets lead to more useful predictions for these tasks. These results pave the way for an alternative path in atomistic machine learning where data-driven approaches and existing physical methods are used in tandem to obtain a variety of chemical properties in an explainable and self-consistent manner.}, + keywords = {AML,dipole moments,GNN,hybrid AI/simulation,library,ML,ML-DFT,ML-ESM,OC20,partial charges,prediction of electron density,SchNet,VASP,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Sunshine et al_2023_Chemical Properties from Graph Neural Network-Predicted Electron Densities.pdf;/Users/wasmer/Nextcloud/Zotero/Sunshine et al_2023_Chemical Properties from Graph Neural Network-Predicted Electron Densities2.pdf} +} + @online{suSVNetWhereEquivariance2022, title = {{{SVNet}}: {{Where SO}}(3) {{Equivariance Meets Binarization}} on {{Point Cloud Representation}}}, shorttitle = {{{SVNet}}}, @@ -13934,7 +15546,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {11}, number = {1}, pages = {4428}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2041-1723}, doi = {10.1038/s41467-020-17112-9}, url = {https://www.nature.com/articles/s41467-020-17112-9}, @@ -13946,6 +15558,24 @@ Junqi Yin (Oak Ridge National Laboratory)}, file = {/Users/wasmer/Nextcloud/Zotero/Sutton et al_2020_Identifying domains of applicability of machine learning models for materials.pdf;/Users/wasmer/Zotero/storage/MPRCKUUI/s41467-020-17112-9.html} } +@article{szilvaQuantitativeTheoryMagnetic2023, + title = {Quantitative Theory of Magnetic Interactions in Solids}, + author = {Szilva, Attila and Kvashnin, Yaroslav and Stepanov, Evgeny A. and Nordström, Lars and Eriksson, Olle and Lichtenstein, Alexander I. and Katsnelson, Mikhail I.}, + date = {2023-09-11}, + journaltitle = {Reviews of Modern Physics}, + shortjournal = {Rev. Mod. Phys.}, + volume = {95}, + number = {3}, + pages = {035004}, + publisher = {American Physical Society}, + doi = {10.1103/RevModPhys.95.035004}, + url = {https://link.aps.org/doi/10.1103/RevModPhys.95.035004}, + urldate = {2023-12-14}, + abstract = {This review addresses the method of explicit calculations of interatomic exchange interactions of magnetic materials. This involves exchange mechanisms normally referred to as a Heisenberg exchange, a Dzyaloshinskii-Moriya interaction, and an anisotropic symmetric exchange. The connection between microscopic theories of the electronic structure, such as density functional theory and dynamical mean-field theory, and interatomic exchange is examined. The different aspects of extracting information for an effective spin Hamiltonian that involves thousands of atoms, from electronic structure calculations considering significantly fewer atoms (1–50), is highlighted. Examples of exchange interactions of a large group of materials is presented, which involves heavy elements of the 3d period, alloys between transition metals, Heusler compounds, multilayer systems as well as overlayers and adatoms on a substrate, transition metal oxides, 4f elements, magnetic materials in two dimensions, and molecular magnets. Where possible, a comparison to experimental data is made that becomes focused on the magnon dispersion. The influence of relativity is reviewed in a few cases, as is the importance of dynamical correlations. Development to theories that handle out-of-equilibrium conditions is also described here. The review ends with a description of extensions of the theories behind explicit calculations of interatomic exchange to nonmagnetic situations, such as those that describe chemical (charge) order and superconductivity.}, + keywords = {\_tablet,DFT,Dzyaloshinskii–Moriya interaction,educational,exchange interaction,finite-temperature,Green's functions,Heisenberg model,Jij,kinetic exchange,learning material,magnetic interactions,magnetism,physics,quantum magnetism,rec-by-katsumoto,review,SOC,Spin Hamiltonian,spin-dependent,symmetry breaking,transition metals,TRS}, + file = {/Users/wasmer/Nextcloud/Zotero/Szilva et al_2023_Quantitative theory of magnetic interactions in solids.pdf;/Users/wasmer/Zotero/storage/WII6UD6M/RevModPhys.95.html} +} + @article{szlachtaAccuracyTransferabilityGaussian2014, title = {Accuracy and Transferability of {{Gaussian}} Approximation Potential Models for Tungsten}, author = {Szlachta, Wojciech J. and Bartók, Albert P. and Csányi, Gábor}, @@ -13955,7 +15585,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {90}, number = {10}, pages = {104108}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevB.90.104108}, url = {https://link.aps.org/doi/10.1103/PhysRevB.90.104108}, urldate = {2023-03-12}, @@ -14002,7 +15632,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {13}, number = {1}, pages = {2991}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2041-1723}, doi = {10.1038/s41467-022-30687-9}, url = {https://www.nature.com/articles/s41467-022-30687-9}, @@ -14041,7 +15671,7 @@ Junqi Yin (Oak Ridge National Laboratory)}, volume = {7}, number = {1}, pages = {299}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2052-4463}, doi = {10.1038/s41597-020-00637-5}, url = {https://www.nature.com/articles/s41597-020-00637-5}, @@ -14049,7 +15679,12 @@ Junqi Yin (Oak Ridge National Laboratory)}, abstract = {Materials Cloud is a platform designed to enable open and seamless sharing of resources for computational science, driven by applications in materials modelling. It hosts (1) archival and dissemination services for raw and curated data, together with their provenance graph, (2) modelling services and virtual machines, (3) tools for data analytics, and pre-/post-processing, and (4) educational materials. Data is citable and archived persistently, providing a comprehensive embodiment of entire simulation pipelines (calculations performed, codes used, data generated) in the form of graphs that allow retracing and reproducing any computed result. When an AiiDA database is shared on Materials Cloud, peers can browse the interconnected record of simulations, download individual files or the full database, and start their research from the results of the original authors. The infrastructure is agnostic to the specific simulation codes used and can support diverse applications in computational science that transcend its initial materials domain.}, issue = {1}, langid = {english}, - annotation = {Bandiera\_abtest: a Cc\_license\_type: cc\_by Cg\_type: Nature Research Journals Primary\_atype: Research Subject\_term: Databases;Materials science Subject\_term\_id: databases;materials-science}, + annotation = {Bandiera\_abtest: a\\ +Cc\_license\_type: cc\_by\\ +Cg\_type: Nature Research Journals\\ +Primary\_atype: Research\\ +Subject\_term: Databases;Materials science\\ +Subject\_term\_id: databases;materials-science}, file = {/Users/wasmer/Nextcloud/Zotero/Talirz et al_2020_Materials Cloud, a platform for open computational science.pdf;/Users/wasmer/Zotero/storage/TEZC6LT2/s41597-020-00637-5.html} } @@ -14086,6 +15721,22 @@ Junqi Yin (Oak Ridge National Laboratory)}, file = {/Users/wasmer/Nextcloud/Zotero/Talirz et al_2021_Trends in Atomistic Simulation Software Usage [Article v1.pdf} } +@online{tangEfficientHybridDensity2023, + title = {Efficient Hybrid Density Functional Calculation by Deep Learning}, + author = {Tang, Zechen and Li, He and Lin, Peize and Gong, Xiaoxun and Jin, Gan and He, Lixin and Jiang, Hong and Ren, Xinguo and Duan, Wenhui and Xu, Yong}, + date = {2023-02-16}, + eprint = {2302.08221}, + eprinttype = {arxiv}, + eprintclass = {cond-mat, physics:physics}, + doi = {10.48550/arXiv.2302.08221}, + url = {http://arxiv.org/abs/2302.08221}, + urldate = {2024-04-18}, + abstract = {Hybrid density functional calculation is indispensable to accurate description of electronic structure, whereas the formidable computational cost restricts its broad application. Here we develop a deep equivariant neural network method (named DeepH-hybrid) to learn the hybrid-functional Hamiltonian from self-consistent field calculations of small structures, and apply the trained neural networks for efficient electronic-structure calculation by passing the self-consistent iterations. The method is systematically checked to show high efficiency and accuracy, making the study of large-scale materials with hybrid-functional accuracy feasible. As an important application, the DeepH-hybrid method is applied to study large-supercell Moir\textbackslash '\{e\} twisted materials, offering the first case study on how the inclusion of exact exchange affects flat bands in the magic-angle twisted bilayer graphene.}, + pubstate = {preprint}, + keywords = {AML,DeepH,hybrid DFT,ML,ML-DFT,ML-ESM,no-code,prediction of Hamiltonian matrix,transfer learning}, + file = {/Users/wasmer/Nextcloud/Zotero/Tang et al_2023_Efficient hybrid density functional calculation by deep learning.pdf;/Users/wasmer/Zotero/storage/E995LV4K/2302.html} +} + @unpublished{tangiralaNeuralNetworkPredictiveModeling2022, title = {Neural-{{Network Predictive Modeling}} of {{Physical Properties}} in {{Binary Magnetic}} and {{Non-Magnetic Alloys}}}, author = {Tangirala, Sairam}, @@ -14094,17 +15745,21 @@ Junqi Yin (Oak Ridge National Laboratory)}, urldate = {2023-09-19}, abstract = {We present a deep learning (DL) approach to reproduce the first principles Density Functional Theory (DFT) based calculations pertaining to macroscopic physical properties of a non-magnetic (CuAu) and a magnetic (FePt) binary alloys. In this study, a neural network (NN) is developed and trained using thousands of theoretically possible lattice configurations obtained from the Locally Self-Consistent Multiple Scattering (LSMS) DFT code [1]. The intrinsic physical properties of alloys like composition ratio, unit-cell structure, spatial charge distributions, Coulombic interactions, etc. are inputted into the NN model structured by the “bag-of-bonds†representation [2]. The NN regression model is trained to capture the relationship between intrinsic parameters and the total energy of the alloys. Although NNs are complex and computationally expensive to train, they are flexible and can effectively pick up nonlinear relationships between inputs and outputs. Our results show that the trained NN model is orders-of-magnitude faster than DFT in inferring the total energy with comparable accuracy [3]. This demonstrates the potential of applying the NN formalism in accelerating the computational studies of condensed matter systems. [1] LSMS. Computer software. https://www.osti.gov//servlets/purl/1420087. Vers. 00. USDOE. 1 Dec. 2017. Web. [2] J. Phys. Chem. Lett. 6, 12, 2326–2331 (2015). [3] J. Phys.: Condens. Matter 33, 084005 (2021). *ST acknowledges funding from Georgia Gwinnett College through its "Educational and Professional Leave" program}, eventtitle = {{{APS March Meeting}} 2022}, - venue = {{Chicago}}, + venue = {Chicago}, keywords = {/unread,todo-tagging}, - annotation = {Authors: - -Sairam Tangirala (Georgia Gwinnett College) - -Massimiliano L Pasini (Oakridge National Laboratory) - -Markus Eisenbach (Oak Ridge National Lab) - -Ying-Wai Li (Los Alamos National Laboratory)}, + annotation = {Authors:\\ +\\ +Sairam Tangirala\\ +(Georgia Gwinnett College)\\ +\\ +Massimiliano L Pasini\\ +(Oakridge National Laboratory)\\ +\\ +Markus Eisenbach\\ +(Oak Ridge National Lab)\\ +\\ +Ying-Wai Li\\ +(Los Alamos National Laboratory)}, file = {/Users/wasmer/Zotero/storage/SGNGMK25/T32.html} } @@ -14147,7 +15802,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, urldate = {2023-07-01}, abstract = {Despite the increasing relevance of explainable AI, assessing the quality of explanations remains a challenging issue. Due to the high costs associated with human-subject experiments, various proxy metrics are often used to approximately quantify explanation quality. Generally, one possible interpretation of the quality of an explanation is its inherent value for teaching a related concept to a student. In this work, we extend artificial simulatability studies to the domain of graph neural networks. Instead of costly human trials, we use explanation-supervisable graph neural networks to perform simulatability studies to quantify the inherent usefulness of attributional graph explanations. We perform an extensive ablation study to investigate the conditions under which the proposed analyses are most meaningful. We additionally validate our methods applicability on real-world graph classification and regression datasets. We find that relevant explanations can significantly boost the sample efficiency of graph neural networks and analyze the robustness towards noise and bias in the explanations. We believe that the notion of usefulness obtained from our proposed simulatability analysis provides a dimension of explanation quality that is largely orthogonal to the common practice of faithfulness and has great potential to expand the toolbox of explanation quality assessments, specifically for graph explanations.}, langid = {english}, - organization = {{arXiv.org}}, + organization = {arXiv.org}, keywords = {todo-tagging}, file = {/Users/wasmer/Nextcloud/Zotero/Teufel et al_2023_Quantifying the Intrinsic Usefulness of Attributional Explanations for Graph.pdf} } @@ -14160,7 +15815,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, volume = {22}, number = {6}, pages = {757--810}, - publisher = {{Taylor \& Francis}}, + publisher = {Taylor \& Francis}, issn = {0001-8732}, doi = {10.1080/00018737300101389}, url = {https://doi.org/10.1080/00018737300101389}, @@ -14174,8 +15829,8 @@ Ying-Wai Li (Los Alamos National Laboratory)}, title = {Development and Application of a Massively Parallel {{KKR Green}} Function Method for Large Scale Systems}, author = {Thieß, Alexander R. and Blügel, Stefan}, date = {2011}, - institution = {{Publikationsserver der RWTH Aachen University}}, - location = {{Aachen}}, + institution = {Publikationsserver der RWTH Aachen University}, + location = {Aachen}, langid = {english}, pagetotal = {173}, keywords = {density functional theory,Dichtefunktional,dilute magnetic semiconductors,Festkörperphysik,KKR-Methode,Korring Kohn Rostoker Green functions,phase change materials,Phase-Change-Technologie,Physik,Supercomputer,supercomputing,Verdünnte magnetische Legierung} @@ -14186,7 +15841,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, author = {Thieß, Alexander R.}, date = {2013}, number = {PreJuSER-19395}, - institution = {{Forschungszentrum Jülich GmbH Zentralbibliothek, Verlag}}, + institution = {Forschungszentrum Jülich GmbH Zentralbibliothek, Verlag}, url = {http://hdl.handle.net/2128/18578}, urldate = {2022-08-12}, abstract = {The impact of structural and functional materials on society is often overlooked but can in fact hardly be overestimated: In numerous examples, ranging from the improvement of steel to the invention of light emitting diodes, carbon fibers as well as cheaper and larger memories for data storage, novel materials are a key to successfully face global challenges on mobility, energy, communication and sustainability. Most strikingly visible is this influence for technologies based on electronic, optical, and magnetic materials, technologies that revo- lutionize computing and communication excelling mankind into the information age. With the miniaturization of devices, made possible by the invention of the transistor and the integrated circuit, enormous and still exponentially growing computing and communication capabilities are fundamentally changing how we interact, work and live. Material science and condensed matter physics are at the heart of the invention, development, design and improvement of novel materials and subsequently of novel physical phenomena and processes and are thus an excellent demonstration of the interdependence of science, technology and society. Advances in modern material design and technology are closely linked to advances in understanding on the basis of condensed matter physics, statistical physics and quantum mechanics of the many particle problem as well as the development of powerful methods. High-performance experimental tools combined with extraordinary progress in theory and computational power provide insight on the microscopic phenomena in materials and have paved new roads towards understanding as well as raising and answering new questions. On the theory side, density functional theory takes a central position in this process. The ab initio description of materials from the first principles of quantum mechanics holds fun- damental and highly valuable information on the interactions and interplay of electrons in solids and contributes such to the advancement of knowledge on the structural, mechanical, optical, thermal, electrical, magnetic, ferroic or transport properties in bulk solids, surfaces, thin films, heterostructures, quantum wells, clusters and molecules. The complicated task to compute material properties on the quantum mechanical level of myriad of atoms in solids became first accessible by exploiting the periodicity of crystalline solids and high symmetry of idealized systems. Density functional theory calculations exploiting the periodic boundary [...] Thieß, Alexander R.}, @@ -14285,7 +15940,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, eprint = {1808.01590}, eprinttype = {arxiv}, eprintclass = {cond-mat}, - publisher = {{arXiv}}, + publisher = {arXiv}, doi = {10.48550/arXiv.1808.01590}, url = {http://arxiv.org/abs/1808.01590}, urldate = {2022-05-18}, @@ -14303,7 +15958,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, volume = {13}, number = {11}, pages = {1056--1068}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {1745-2481}, doi = {10.1038/nphys4274}, url = {https://www.nature.com/articles/nphys4274}, @@ -14311,7 +15966,11 @@ Ying-Wai Li (Los Alamos National Laboratory)}, abstract = {Materials can harbour quantum many-body systems, most typically in the form of strongly correlated electrons in solids, that lead to novel and remarkable functions thanks to emergence—collective behaviours that arise from strong interactions among the elements. These include the Mott transition, high-temperature superconductivity, topological superconductivity, colossal magnetoresistance, giant magnetoelectric effect, and topological insulators. These phenomena will probably be crucial for developing the next-generation quantum technologies that will meet the urgent technological demands for achieving a sustainable and safe society. Dissipationless electronics using topological currents and quantum spins, energy harvesting such as photovoltaics and thermoelectrics, and secure quantum computing and communication are the three major fields of applications working towards this goal. Here, we review the basic principles and the current status of the emergent phenomena and functions in materials from the viewpoint of strong correlation and topology.}, issue = {11}, langid = {english}, - annotation = {Bandiera\_abtest: a Cg\_type: Nature Research Journals Primary\_atype: Reviews Subject\_term: Electronic devices;Electronic properties and materials;Ferroelectrics and multiferroics;Superconducting properties and materials;Topological matter Subject\_term\_id: electronic-devices;electronic-properties-and-materials;ferroelectrics-and-multiferroics;superconducting-properties-and-materials;topological-matter}, + annotation = {Bandiera\_abtest: a\\ +Cg\_type: Nature Research Journals\\ +Primary\_atype: Reviews\\ +Subject\_term: Electronic devices;Electronic properties and materials;Ferroelectrics and multiferroics;Superconducting properties and materials;Topological matter\\ +Subject\_term\_id: electronic-devices;electronic-properties-and-materials;ferroelectrics-and-multiferroics;superconducting-properties-and-materials;topological-matter}, file = {/Users/wasmer/Nextcloud/Zotero/Tokura et al_2017_Emergent functions of quantum materials.pdf} } @@ -14324,7 +15983,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, volume = {121}, number = {5}, pages = {2857--2897}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, issn = {0009-2665}, doi = {10.1021/acs.chemrev.0c00297}, url = {https://doi.org/10.1021/acs.chemrev.0c00297}, @@ -14341,7 +16000,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, volume = {1}, number = {2}, pages = {126--143}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2522-5820}, doi = {10.1038/s42254-018-0011-5}, url = {https://www.nature.com/articles/s42254-018-0011-5}, @@ -14359,7 +16018,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, date = {2013}, volume = {6}, pages = {i}, - publisher = {{Elsevier}}, + publisher = {Elsevier}, doi = {10.1016/B978-0-444-63314-9.00012-3}, url = {https://linkinghub.elsevier.com/retrieve/pii/B9780444633149000123}, urldate = {2023-07-12}, @@ -14368,6 +16027,27 @@ Ying-Wai Li (Los Alamos National Laboratory)}, keywords = {/unread,learning material,topological,topological insulator} } +@article{torlaiNeuralnetworkQuantumState2018, + title = {Neural-Network Quantum State Tomography}, + author = {Torlai, Giacomo and Mazzola, Guglielmo and Carrasquilla, Juan and Troyer, Matthias and Melko, Roger and Carleo, Giuseppe}, + date = {2018-05}, + journaltitle = {Nature Physics}, + shortjournal = {Nature Phys}, + volume = {14}, + number = {5}, + pages = {447--450}, + publisher = {Nature Publishing Group}, + issn = {1745-2481}, + doi = {10.1038/s41567-018-0048-5}, + url = {https://www.nature.com/articles/s41567-018-0048-5}, + urldate = {2024-02-28}, + abstract = {The experimental realization of increasingly complex synthetic quantum systems calls for the development of general theoretical methods to validate and fully exploit quantum resources. Quantum state tomography (QST) aims to reconstruct the full quantum state from simple measurements, and therefore provides a key tool to obtain reliable analytics1–3. However, exact brute-force approaches to QST place a high demand on computational resources, making them unfeasible for anything except small systems4,5. Here we show how machine learning techniques can be used to perform QST of highly entangled states with more than a hundred qubits, to a high degree of accuracy. We demonstrate that machine learning allows one to reconstruct traditionally challenging many-body quantities—such as the entanglement entropy—from simple, experimentally accessible measurements. This approach can benefit existing and future generations of devices ranging from quantum computers to ultracold-atom quantum simulators6–8.}, + issue = {5}, + langid = {english}, + keywords = {experimental science,ML-QMBP,NQS,quantum computing,quantum state tomography,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Torlai et al_2018_Neural-network quantum state tomography.pdf} +} + @article{townsendDataDrivenAccelerationCoupledCluster2019, title = {Data-{{Driven Acceleration}} of the {{Coupled-Cluster Singles}} and {{Doubles Iterative Solver}}}, author = {Townsend, Jacob and Vogiatzis, Konstantinos D.}, @@ -14377,7 +16057,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, volume = {10}, number = {14}, pages = {4129--4135}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, doi = {10.1021/acs.jpclett.9b01442}, url = {https://doi.org/10.1021/acs.jpclett.9b01442}, urldate = {2022-05-13}, @@ -14448,7 +16128,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, volume = {571}, number = {7763}, pages = {95--98}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {1476-4687}, doi = {10.1038/s41586-019-1335-8}, url = {https://www.nature.com/articles/s41586-019-1335-8}, @@ -14470,7 +16150,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, volume = {125}, number = {20}, pages = {206401}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevLett.125.206401}, url = {https://link.aps.org/doi/10.1103/PhysRevLett.125.206401}, urldate = {2023-04-11}, @@ -14534,7 +16214,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, urldate = {2021-10-15}, abstract = {The Materials Genome Initiative is a multi-agency initiative designed to create a new era of policy, resources, and infrastructure that support U.S. institutions in the effort to discover, manufacture, and deploy advanced materials twice as fast, at a fraction of the cost.}, langid = {english}, - organization = {{The White House}}, + organization = {The White House}, file = {/Users/wasmer/Zotero/storage/LEWHVD66/Materials Genome Initiative for Global Competitiveness.pdf;/Users/wasmer/Zotero/storage/9KCC6KRJ/mgi.html} } @@ -14547,7 +16227,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, volume = {121}, number = {16}, pages = {10142--10186}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, issn = {0009-2665}, doi = {10.1021/acs.chemrev.0c01111}, url = {https://doi.org/10.1021/acs.chemrev.0c01111}, @@ -14564,7 +16244,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, date = {2021}, volume = {34}, pages = {14434--14447}, - publisher = {{Curran Associates, Inc.}}, + publisher = {Curran Associates, Inc.}, url = {https://proceedings.neurips.cc/paper/2021/hash/78f1893678afbeaa90b1fa01b9cfb860-Abstract.html}, urldate = {2022-08-21}, abstract = {Machine learning has enabled the prediction of quantum chemical properties with high accuracy and efficiency, allowing to bypass computationally costly ab initio calculations. Instead of training on a fixed set of properties, more recent approaches attempt to learn the electronic wavefunction (or density) as a central quantity of atomistic systems, from which all other observables can be derived. This is complicated by the fact that wavefunctions transform non-trivially under molecular rotations, which makes them a challenging prediction target. To solve this issue, we introduce general SE(3)-equivariant operations and building blocks for constructing deep learning architectures for geometric point cloud data and apply them to reconstruct wavefunctions of atomistic systems with unprecedented accuracy. Our model achieves speedups of over three orders of magnitude compared to ab initio methods and reduces prediction errors by up to two orders of magnitude compared to the previous state-of-the-art. This accuracy makes it possible to derive properties such as energies and forces directly from the wavefunction in an end-to-end manner. We demonstrate the potential of our approach in a transfer learning application, where a model trained on low accuracy reference wavefunctions implicitly learns to correct for electronic many-body interactions from observables computed at a higher level of theory. Such machine-learned wavefunction surrogates pave the way towards novel semi-empirical methods, offering resolution at an electronic level while drastically decreasing computational cost. Additionally, the predicted wavefunctions can serve as initial guess in conventional ab initio methods, decreasing the number of iterations required to arrive at a converged solution, thus leading to significant speedups without any loss of accuracy or robustness. While we focus on physics applications in this contribution, the proposed equivariant framework for deep learning on point clouds is promising also beyond, say, in computer vision or graphics.}, @@ -14581,7 +16261,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, volume = {13}, number = {1}, pages = {5183}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2041-1723}, doi = {10.1038/s41467-022-32294-0}, url = {https://www.nature.com/articles/s41467-022-32294-0}, @@ -14589,7 +16269,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, abstract = {Atomistic modeling of chemically reactive systems has so far relied on either expensive ab initio methods or bond-order force fields requiring arduous parametrization. Here, we describe a Bayesian active learning framework for autonomous “on-the-fly†training of fast and accurate reactive many-body force fields during molecular dynamics simulations. At each time-step, predictive uncertainties of a sparse Gaussian process are evaluated to automatically determine whether additional ab initio training data are needed. We introduce a general method for mapping trained kernel models onto equivalent polynomial models whose prediction cost is much lower and independent of the training set size. As a demonstration, we perform direct two-phase simulations of heterogeneous H2 turnover on the Pt(111) catalyst surface at chemical accuracy. The model trains itself in three days and performs at twice the speed of a ReaxFF model, while maintaining much higher fidelity to DFT and excellent agreement with experiment.}, issue = {1}, langid = {english}, - keywords = {\_tablet,/unread,active learning,active learning online,AML,Bayesian methods,FLARE,Gaussian process,GPR,iterative learning,library,MD,ML,MLP,uncertainty quantification,with-code}, + keywords = {/unread,\_tablet,active learning,active learning online,AML,Bayesian methods,FLARE,Gaussian process,GPR,iterative learning,library,MD,ML,MLP,uncertainty quantification,with-code}, file = {/Users/wasmer/Nextcloud/Zotero/Vandermause et al_2022_Active learning of reactive Bayesian force fields applied to heterogeneous.pdf} } @@ -14602,7 +16282,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, volume = {9}, number = {1}, pages = {1--14}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2057-3960}, doi = {10.1038/s41524-023-01104-6}, url = {https://www.nature.com/articles/s41524-023-01104-6}, @@ -14655,7 +16335,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, urldate = {2021-09-23}, abstract = {The official home of the Python Programming Language}, langid = {english}, - organization = {{Python.org}}, + organization = {Python.org}, keywords = {coding style guide,PEP,Python,software engineering}, file = {/Users/wasmer/Zotero/storage/A4H9CLJ5/pep-0008.html} } @@ -14689,8 +16369,8 @@ Ying-Wai Li (Los Alamos National Laboratory)}, date = {2016-06-26}, series = {{{HILDA}} '16}, pages = {1--3}, - publisher = {{Association for Computing Machinery}}, - location = {{New York, NY, USA}}, + publisher = {Association for Computing Machinery}, + location = {New York, NY, USA}, doi = {10.1145/2939502.2939516}, url = {https://doi.org/10.1145/2939502.2939516}, urldate = {2021-10-23}, @@ -14708,7 +16388,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, volume = {53}, number = {45}, pages = {453001}, - publisher = {{IOP Publishing}}, + publisher = {IOP Publishing}, issn = {0022-3727}, doi = {10.1088/1361-6463/ab9d98}, url = {https://doi.org/10.1088/1361-6463/ab9d98}, @@ -14727,7 +16407,6 @@ Ying-Wai Li (Los Alamos National Laboratory)}, urldate = {2023-11-18}, abstract = {Deriving graph neural networks (GNNs) from first principles, motivating their use, and explaining how they have emerged along several related research lines. Computer Laboratory Wednesday Seminar, 17 February 2021 Slide deck: https://petar-v.com/talks/GNN-Wednesd... Link at Talks.cam: https://talks.cam.ac.uk/talk/index/15...}, eventtitle = {Computer {{Laboratory Wednesday Seminar}}}, - keywords = {/unread}, file = {/Users/wasmer/Nextcloud/Zotero/VeliÄković_2021_Theoretical Foundations of Graph Neural Networks.pdf} } @@ -14740,7 +16419,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, volume = {175}, number = {3}, pages = {747--766}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRev.175.747}, url = {https://link.aps.org/doi/10.1103/PhysRev.175.747}, urldate = {2023-09-19}, @@ -14765,6 +16444,20 @@ Ying-Wai Li (Los Alamos National Laboratory)}, file = {/Users/wasmer/Nextcloud/Zotero/Venugopal et al_2022_MatKG.pdf;/Users/wasmer/Zotero/storage/GBSDFV8K/2210.html} } +@book{vershyninHighdimensionalProbabilityIntroduction2018, + title = {High-Dimensional Probability: An Introduction with Applications in Data Science}, + shorttitle = {High-Dimensional Probability}, + author = {Vershynin, Roman}, + date = {2018}, + series = {Cambridge Series in Statistical and Probabilistic Mathematics}, + number = {47}, + publisher = {Cambridge University Press}, + location = {Cambridge ; New York, NY}, + isbn = {978-1-108-41519-4}, + pagetotal = {284}, + keywords = {/unread,data science,educational,high-dimensional,learning material,mathematics,ML theory,online book,probability theory,textbook} +} + @inproceedings{villarScalarsAreUniversal2021, title = {Scalars Are Universal: {{Equivariant}} Machine Learning, Structured like Classical Physics}, shorttitle = {Scalars Are Universal}, @@ -14773,7 +16466,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, date = {2021}, volume = {34}, pages = {28848--28863}, - publisher = {{Curran Associates, Inc.}}, + publisher = {Curran Associates, Inc.}, url = {https://proceedings.neurips.cc/paper/2021/hash/f1b0775946bc0329b35b823b86eeb5f5-Abstract.html}, urldate = {2023-06-30}, keywords = {Einstein summation,equivariant,general ML,group theory,invariance,ML,ML theory,Physics ML,symmetry}, @@ -14825,7 +16518,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, volume = {58}, number = {8}, pages = {1200--1211}, - publisher = {{NRC Research Press}}, + publisher = {NRC Research Press}, issn = {0008-4204}, doi = {10.1139/p80-159}, url = {https://cdnsciencepub.com/doi/abs/10.1139/p80-159}, @@ -14834,6 +16527,42 @@ Ying-Wai Li (Los Alamos National Laboratory)}, file = {/Users/wasmer/Nextcloud/Zotero/Vosko et al_1980_Accurate spin-dependent electron liquid correlation energies for local spin.pdf} } +@online{vuckovicTransferableDiversityDatadriven2023, + title = {Transferable Diversity – a Data-Driven Representation of Chemical Space}, + author = {Vuckovic, Stefan and Gould, Tim and Chang, Bun and Dale, Stephen}, + date = {2023-10-03}, + eprinttype = {ChemRxiv}, + doi = {10.26434/chemrxiv-2023-5075x-v2}, + url = {https://chemrxiv.org/engage/chemrxiv/article-details/6511601aed7d0eccc32e3ace}, + urldate = {2024-01-01}, + abstract = {Transferability, especially in the context of model generalization, is a paradigm of all scientific disciplines. However, the rapid advancement of machine learned model development threatens this paradigm, as it can be difficult to understand how transferability is embedded (or missed) in complex models. While transferability in general chemistry machine learning should benefit from diverse training data, a rigorous understanding of transferability together with its interplay with chemical representation remains an open problem. We introduce a transferability framework and apply it to a controllable data-driven model for developing density functional approximations (DFAs), an indispensable tool in everyday chemistry research. We reveal that human intuition introduces chemical biases that can hamper the transferability of data-driven DFAs, and we identify strategies for their elimination. We then show that uncritical use of large training sets can actually hinder the transferability of DFAs, in contradiction to typical “more is more†expectations. Finally, our transferability framework yields transferable diversity, a cornerstone principle for data curation for developing general-purpose machine learning models in chemistry}, + langid = {english}, + pubstate = {preprint}, + keywords = {/unread,AML,ML,todo-tagging,transfer learning}, + file = {/Users/wasmer/Nextcloud/Zotero/Vuckovic et al_2023_Transferable diversity – a data-driven representation of chemical space.pdf} +} + +@article{vuckovicUsingAINavigate2023, + title = {Using {{AI}} to Navigate through the {{DFA}} Zoo}, + author = {Vuckovic, Stefan}, + date = {2023-01}, + journaltitle = {Nature Computational Science}, + shortjournal = {Nat Comput Sci}, + volume = {3}, + number = {1}, + pages = {6--7}, + publisher = {Nature Publishing Group}, + issn = {2662-8457}, + doi = {10.1038/s43588-022-00393-z}, + url = {https://www.nature.com/articles/s43588-022-00393-z}, + urldate = {2024-01-01}, + abstract = {A proposed density functional approximation (DFA) recommender outperforms the use of a single functional by selecting the optimal exchange-correlation functional for a given system.}, + issue = {1}, + langid = {english}, + keywords = {/unread,AML,DFT,ML,ML-DFA,ML-DFT,ML-ESM,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Vuckovic_2023_Using AI to navigate through the DFA zoo.pdf} +} + @article{vuUnderstandingKernelRidge2015, title = {Understanding Kernel Ridge Regression: {{Common}} Behaviors from Simple Functions to Density Functionals}, shorttitle = {Understanding Kernel Ridge Regression}, @@ -14857,8 +16586,8 @@ Ying-Wai Li (Los Alamos National Laboratory)}, title = {Symmetry, Groups, and Representations in Physics}, author = {Vvedensky, Dimitri D. and Evans, Timothy S.}, date = {2010}, - publisher = {{World Scientific}}, - location = {{Singapore}}, + publisher = {World Scientific}, + location = {Singapore}, abstract = {Presents an introduction to symmetry in physics based on discrete and continuous groups. This book includes exercises that illustrate the concepts introduced in the main text, to extend some of the main results, and to introduce fresh ideas. It is suitable for both beginning and advanced graduate students.}, isbn = {978-1-84816-371-3}, langid = {english}, @@ -14883,6 +16612,39 @@ Ying-Wai Li (Los Alamos National Laboratory)}, file = {/Users/wasmer/Nextcloud/Zotero/Wang et al_2022_Approximately Equivariant Networks for Imperfectly Symmetric Dynamics.pdf;/Users/wasmer/Zotero/storage/L7W3IW67/2201.html} } +@online{wangDeepH2EnhancingDeeplearning2024, + title = {{{DeepH-2}}: {{Enhancing}} Deep-Learning Electronic Structure via an Equivariant Local-Coordinate Transformer}, + shorttitle = {{{DeepH-2}}}, + author = {Wang, Yuxiang and Li, He and Tang, Zechen and Tao, Honggeng and Wang, Yanzhen and Yuan, Zilong and Chen, Zezhou and Duan, Wenhui and Xu, Yong}, + date = {2024-01-30}, + eprint = {2401.17015}, + eprinttype = {arxiv}, + eprintclass = {cond-mat, physics:physics}, + doi = {10.48550/arXiv.2401.17015}, + url = {http://arxiv.org/abs/2401.17015}, + urldate = {2024-04-18}, + abstract = {Deep-learning electronic structure calculations show great potential for revolutionizing the landscape of computational materials research. However, current neural-network architectures are not deemed suitable for widespread general-purpose application. Here we introduce a framework of equivariant local-coordinate transformer, designed to enhance the deep-learning density functional theory Hamiltonian referred to as DeepH-2. Unlike previous models such as DeepH and DeepH-E3, DeepH-2 seamlessly integrates the simplicity of local-coordinate transformations and the mathematical elegance of equivariant neural networks, effectively overcoming their respective disadvantages. Based on our comprehensive experiments, DeepH-2 demonstrates superiority over its predecessors in both efficiency and accuracy, showcasing state-of-the-art performance. This advancement opens up opportunities for exploring universal neural network models or even large materials models.}, + pubstate = {preprint}, + keywords = {\_tablet,AML,DeepH,ELCT,Equiformer,equivariant,LCNN,local coordinates,ML,ML-DFT,ML-ESM,prediction of Hamiltonian matrix,transformer,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Wang et al_2024_DeepH-2.pdf;/Users/wasmer/Zotero/storage/WVSPJ8YJ/2401.html} +} + +@online{wangGeneratingMolecularConformer2023, + title = {Generating {{Molecular Conformer Fields}}}, + author = {Wang, Yuyang and Elhag, Ahmed A. and Jaitly, Navdeep and Susskind, Joshua M. and Bautista, Miguel Angel}, + date = {2023-11-27}, + eprint = {2311.17932}, + eprinttype = {arxiv}, + eprintclass = {physics}, + doi = {10.48550/arXiv.2311.17932}, + url = {http://arxiv.org/abs/2311.17932}, + urldate = {2023-12-04}, + abstract = {In this paper we tackle the problem of generating conformers of a molecule in 3D space given its molecular graph. We parameterize these conformers as continuous functions that map elements from the molecular graph to points in 3D space. We then formulate the problem of learning to generate conformers as learning a distribution over these functions using a diffusion generative model, called Molecular Conformer Fields (MCF). Our approach is simple and scalable, and achieves state-of-the-art performance on challenging molecular conformer generation benchmarks while making no assumptions about the explicit structure of molecules (e.g. modeling torsional angles). MCF represents an advance in extending diffusion models to handle complex scientific problems in a conceptually simple, scalable and effective manner.}, + pubstate = {preprint}, + keywords = {/unread}, + file = {/Users/wasmer/Nextcloud/Zotero/Wang et al_2023_Generating Molecular Conformer Fields.pdf;/Users/wasmer/Zotero/storage/HPM6A9N3/2311.html} +} + @online{wangGraphNetsPartial2019, title = {Graph {{Nets}} for {{Partial Charge Prediction}}}, author = {Wang, Yuanqing and Fass, Josh and Stern, Chaya D. and Luo, Kun and Chodera, John}, @@ -14899,6 +16661,26 @@ Ying-Wai Li (Los Alamos National Laboratory)}, file = {/Users/wasmer/Nextcloud/Zotero/Wang et al_2019_Graph Nets for Partial Charge Prediction.pdf;/Users/wasmer/Zotero/storage/5MD2WVP3/1909.html} } +@article{wangHeterogeneousRelationalMessage2022, + title = {Heterogeneous Relational Message Passing Networks for Molecular Dynamics Simulations}, + author = {Wang, Zun and Wang, Chong and Zhao, Sibo and Xu, Yong and Hao, Shaogang and Hsieh, Chang Yu and Gu, Bing-Lin and Duan, Wenhui}, + date = {2022-03-31}, + journaltitle = {npj Computational Materials}, + shortjournal = {npj Comput Mater}, + volume = {8}, + number = {1}, + pages = {1--9}, + publisher = {Nature Publishing Group}, + issn = {2057-3960}, + doi = {10.1038/s41524-022-00739-1}, + url = {https://www.nature.com/articles/s41524-022-00739-1}, + urldate = {2024-04-18}, + abstract = {With many frameworks based on message passing neural networks proposed to predict molecular and bulk properties, machine learning methods have tremendously shifted the paradigms of computational sciences underpinning physics, material science, chemistry, and biology. While existing machine learning models have yielded superior performances in many occasions, most of them model and process molecular systems in terms of homogeneous graph, which severely limits the expressive power for representing diverse interactions. In practice, graph data with multiple node and edge types is ubiquitous and more appropriate for molecular systems. Thus, we propose the heterogeneous relational message passing network (HermNet), an end-to-end heterogeneous graph neural networks, to efficiently express multiple interactions in a single model with ab initio accuracy. HermNet performs impressively against many top-performing models on both molecular and extended systems. Specifically, HermNet outperforms other tested models in nearly 75\%, 83\% and 69\% of tasks on revised Molecular Dynamics 17 (rMD17), Quantum Machines 9 (QM9) and extended systems datasets, respectively. In addition, molecular dynamics simulations and material property calculations are performed with HermNet to demonstrate its performance. Finally, we elucidate how the design of HermNet is compatible with quantum mechanics from the perspective of the density functional theory. Besides, HermNet is a universal framework, whose sub-networks could be replaced by other advanced models.}, + langid = {english}, + keywords = {AML,heterogeneous GNN,MD,MD17,ML,MPNN,PAiNN,QM9}, + file = {/Users/wasmer/Nextcloud/Zotero/Wang et al_2022_Heterogeneous relational message passing networks for molecular dynamics.pdf} +} + @online{wangIntrinsicMagneticTopological2022, title = {Intrinsic {{Magnetic Topological Materials}}}, author = {Wang, Yuan and Zhang, Fayuan and Zeng, Meng and Sun, Hongyi and Hao, Zhanyang and Cai, Yongqing and Rong, Hongtao and Zhang, Chengcheng and Liu, Cai and Ma, Xiaoming and Wang, Le and Guo, Shu and Lin, Junhao and Liu, Qihang and Liu, Chang and Chen, Chaoyu}, @@ -14943,7 +16725,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, volume = {9}, number = {1}, pages = {59}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2052-4463}, doi = {10.1038/s41597-022-01158-z}, url = {https://www.nature.com/articles/s41597-022-01158-z}, @@ -14965,7 +16747,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, volume = {32}, number = {12}, pages = {4954--4965}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, issn = {0897-4756}, doi = {10.1021/acs.chemmater.0c01907}, url = {https://doi.org/10.1021/acs.chemmater.0c01907}, @@ -14999,7 +16781,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, volume = {75}, number = {15}, pages = {2867--2870}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevLett.75.2867}, url = {https://link.aps.org/doi/10.1103/PhysRevLett.75.2867}, urldate = {2023-09-19}, @@ -15028,7 +16810,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, volume = {620}, number = {7972}, pages = {47--60}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {1476-4687}, doi = {10.1038/s41586-023-06221-2}, url = {https://www.nature.com/articles/s41586-023-06221-2}, @@ -15040,6 +16822,24 @@ Ying-Wai Li (Los Alamos National Laboratory)}, file = {/Users/wasmer/Nextcloud/Zotero/Wang et al_2023_Scientific discovery in the age of artificial intelligence.pdf} } +@article{wangSymmetryadaptedGraphNeural2021, + title = {Symmetry-Adapted Graph Neural Networks for Constructing Molecular Dynamics Force Fields}, + author = {Wang, Zun and Wang, Chong and Zhao, SiBo and Du, ShiQiao and Xu, Yong and Gu, Bing-Lin and Duan, WenHui}, + date = {2021-09-28}, + journaltitle = {Science China Physics, Mechanics \& Astronomy}, + shortjournal = {Sci. China Phys. Mech. Astron.}, + volume = {64}, + number = {11}, + pages = {117211}, + issn = {1869-1927}, + doi = {10.1007/s11433-021-1739-4}, + url = {https://doi.org/10.1007/s11433-021-1739-4}, + urldate = {2024-04-18}, + abstract = {Molecular dynamics is a powerful simulation tool to explore material properties. Most realistic material systems are too large to be simulated using first-principles molecular dynamics. Classical molecular dynamics has a lower computational cost but requires accurate force fields to achieve chemical accuracy. In this work, we develop a symmetry-adapted graph neural network framework called the molecular dynamics graph neural network (MDGNN) to construct force fields automatically for molecular dynamics simulations for both molecules and crystals. This architecture consistently preserves translation, rotation, and permutation invariance in the simulations. We also propose a new feature engineering method that includes high-order terms of interatomic distances and demonstrate that the MDGNN accurately reproduces the results of both classical and first-principles molecular dynamics. In addition, we demonstrate that force fields constructed by the proposed model have good transferability. The MDGNN is thus an efficient and promising option for performing molecular dynamics simulations of large-scale systems with high accuracy.}, + langid = {english}, + file = {/Users/wasmer/Nextcloud/Zotero/Wang et al_2021_Symmetry-adapted graph neural networks for constructing molecular dynamics.pdf} +} + @online{wangSymmetrybasedComputationalSearch2022, title = {Symmetry-Based Computational Search for Novel Binary and Ternary {{2D}} Materials}, author = {Wang, Hai-Chen and Schmidt, Jonathan and Marques, Miguel A. L. and Wirtz, Ludger and Romero, Aldo H.}, @@ -15084,7 +16884,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, volume = {16}, number = {11}, pages = {1062--1067}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {1476-4660}, doi = {10.1038/nmat5012}, url = {https://www.nature.com/articles/nmat5012}, @@ -15092,7 +16892,11 @@ Ying-Wai Li (Los Alamos National Laboratory)}, abstract = {Topological states of quantum matter have been investigated intensively in recent years in materials science and condensed matter physics. The field developed explosively largely because of the precise theoretical predictions, well-controlled materials processing, and novel characterization techniques. In this Perspective, we review recent progress in topological insulators, the quantum anomalous Hall effect, chiral topological superconductors, helical topological superconductors and Weyl semimetals.}, issue = {11}, langid = {english}, - annotation = {Bandiera\_abtest: a Cg\_type: Nature Research Journals Primary\_atype: Reviews Subject\_term: Electronic properties and materials;Quantum Hall;Superconducting properties and materials;Topological matter Subject\_term\_id: electronic-properties-and-materials;quantum-hall;superconducting-properties-and-materials;topological-matter}, + annotation = {Bandiera\_abtest: a\\ +Cg\_type: Nature Research Journals\\ +Primary\_atype: Reviews\\ +Subject\_term: Electronic properties and materials;Quantum Hall;Superconducting properties and materials;Topological matter\\ +Subject\_term\_id: electronic-properties-and-materials;quantum-hall;superconducting-properties-and-materials;topological-matter}, file = {/Users/wasmer/Nextcloud/Zotero/Wang_Zhang_2017_Topological states of condensed matter.pdf} } @@ -15105,7 +16909,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, volume = {14}, number = {1}, pages = {21--25}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, issn = {1936-0851}, doi = {10.1021/acsnano.9b00184}, url = {https://doi.org/10.1021/acsnano.9b00184}, @@ -15122,7 +16926,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, volume = {2}, number = {1}, pages = {1--7}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2057-3960}, doi = {10.1038/npjcompumats.2016.28}, url = {https://www.nature.com/articles/npjcompumats201628}, @@ -15162,7 +16966,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, volume = {29}, number = {19}, pages = {8346--8360}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, issn = {0897-4756}, doi = {10.1021/acs.chemmater.7b02766}, url = {https://doi.org/10.1021/acs.chemmater.7b02766}, @@ -15180,7 +16984,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, url = {https://github.com/JuDFTteam/aiida-jutools}, urldate = {2022-10-16}, abstract = {Tools for managing high-throughput experiments with AiiDA.}, - organization = {{JuDFTteam}} + organization = {JuDFTteam} } @unpublished{wasmerComparisonStructuralRepresentations2021, @@ -15193,16 +16997,16 @@ Ying-Wai Li (Los Alamos National Laboratory)}, abstract = {Wasmer, Johannes; Rüßmann, Philipp; Blügel, Stefan}, eventtitle = {{{DPG SKM21}}}, langid = {english}, - venue = {{online}}, + venue = {online}, file = {/Users/wasmer/Zotero/storage/RJIXGQPU/901958.html} } @thesis{wasmerDevelopmentSurrogateMachine2021, type = {mathesis}, title = {Development of a Surrogate Machine Learning Model for the Acceleration of Density Functional Calculations with the {{Korringa-Kohn-Rostoker}} Method}, - author = {Wasmer, Johannes}, + author = {Wasmer, Johannes and Rüßmann, Philipp and Blügel, Stefan}, date = {2021-10-27}, - institution = {{RWTH Aachen University}}, + institution = {RWTH Aachen University}, url = {https://iffgit.fz-juelich.de/phd-project-wasmer/theses/master-thesis}, urldate = {2022-08-08}, langid = {english}, @@ -15232,12 +17036,12 @@ Ying-Wai Li (Los Alamos National Laboratory)}, title = {Equivariant and {{Coordinate Independent Convolutional Networks}} - {{A Gauge Field Theory}} of {{Neural Networks}}}, author = {Weiler, Maurice and Forré, Patrick and Verlinde, Erik and Welling, Max}, date = {2023}, - publisher = {{self-published}}, + publisher = {self-published}, url = {https://maurice-weiler.gitlab.io/#cnn_book}, urldate = {2023-11-18}, abstract = {In this book, Equivariant and Coordinate Independent Convolutional Networks, we develop a gauge theory of artificial neural networks for processing spatially structured data like images, audio, or videos. The standard neural network architecture for such data are convolutional networks, which are characterized by their position-independent inference. Generalizing whatever they learn over spatial locations, convolutional networks are substantially more data efficient and robust in comparison to non-convolutional models. This characteristic is especially important in domains like medical imaging, where training data is scarce. The independence from spatial locations is formally captured by the networks’ translation group equivariance, i.e. their property to commute with translations of their input signals. We show that the convolutional network design is not only sufficient for translation equivariance but is actually a necessary condition – convolutions can therefore be derived by demanding the model’s equivariance. The first part of this work leverages this insight to define generalized convolutional networks which are equivariant under larger symmetry groups. Such models generalize their inference over additional geometric transformations, for instance, rotations or reflections of patterns in images. We demonstrate empirically that they exhibit a significantly enhanced data efficiency, convergence rate, and final performance in comparison to conventional convolutional networks. Our publicly available implementation found wide use in the research community. In the second part, we extend convolutional networks further to process signals on Riemannian manifolds. Beyond flat Euclidean images, this setting includes, e.g., spherical signals like global weather patterns on the earth’s surface, or signals on general surfaces like artery walls or the cerebral cortex. We show that convolution kernels on manifolds are required to be equivariant under local gauge transformations if the networks’ inference is demanded to be coordinate independent. The resulting coordinate independent networks are proven to be equivariant with respect to the manifolds’ global symmetries (isometries). Our objective is not to propose yet another equivariant network design for a narrow application domain, but to devise a unifying mathematical framework for convolutional networks. The last part of this book demonstrates the generality of our differential geometric formulation of convolutional networks by showing that is able to explain a vast number of equivariant network architectures from the literature.}, langid = {american}, - keywords = {/unread,CNN,covariant,educational,equivariant,gauge theory,General ML,geometric deep learning,GNN,group theory,invariance,learning material,ML,ML theory,online book,physics-informed ML,review,review-of-GDL,steerable CNN,symmetry,textbook}, + keywords = {\_tablet,CNN,covariant,educational,equivariant,gauge theory,General ML,geometric deep learning,GNN,group theory,invariance,learning material,ML,ML theory,online book,physics-informed ML,review,review-of-GDL,steerable CNN,symmetry,textbook}, file = {/Users/wasmer/Nextcloud/Zotero/Weiler et al_2023_Equivariant and Coordinate Independent Convolutional Networks - A Gauge Field.pdf;/Users/wasmer/Zotero/storage/U2AEW2RU/maurice-weiler.gitlab.io.html} } @@ -15267,7 +17071,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, volume = {22}, number = {11}, pages = {2433--2439}, - publisher = {{American Institute of Physics}}, + publisher = {American Institute of Physics}, issn = {0022-2488}, doi = {10.1063/1.524800}, url = {https://aip.scitation.org/doi/10.1063/1.524800}, @@ -15318,7 +17122,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, volume = {19}, number = {8}, pages = {2149--2160}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, issn = {1549-9618}, doi = {10.1021/acs.jctc.2c01235}, url = {https://doi.org/10.1021/acs.jctc.2c01235}, @@ -15336,7 +17140,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, urldate = {2023-11-11}, abstract = {Everything tangible in the universe is made of molecules. Yet our ability to digitally simulate even small molecules is rather poor due to the complexities of quantum mechanics. However, there are a number of advances that are converging to dramatically improve our ability to understand the behavior of molecules. Firstly, deep learning and in particular equivariant graph neural networks are now an important tool to model molecules. They are for instance the core technology in Deepmind’s AlphaFold to predict the 3d shape of a molecule from its amino acid sequence. Second, despite claims to the contrary, Moore’s law is still alive, and in particular the design of ASIC architectures for special purpose computation will continue to accelerate our ability to break new computational barriers. And finally there is the rapid advance of quantum computation. While fault tolerant quantum computation might still be a decade away, it is expected that it’s first useful application, to simulate (quantum) nature itself, may be much closer. In this talk I will introduce some technology around equivariant graph neural networks and give my perspective on why I am excited about the opportunities that will come from new breakthroughs in molecular simulation. It may facilitate the search for new sustainable technologies to capture carbon from the air, develop biodegradable plastics, reduce the cost of electrolysis through better catalysts, develop cleaner and cheaper fertilizers, design new drugs to treat disease and so on. Our understanding of matter will be key to unlocking these new materials for the benefit of humanity.}, langid = {american}, - venue = {{Valence Labs - M2D2 reading group}}, + venue = {Valence Labs - M2D2 reading group}, keywords = {/unread,AI4Science,AML,biomolecules,GNN,Microsoft Research,ML,molecules,neural operator,PDE,review,review-of-AI4science}, file = {/Users/wasmer/Zotero/storage/HF88TM6X/converging-advances-to-accelerate-molecular-simulation-EvHb5ThyGLXPJBp.html} } @@ -15350,7 +17154,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, volume = {121}, number = {16}, pages = {9873--9926}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, issn = {0009-2665}, doi = {10.1021/acs.chemrev.0c00749}, url = {https://doi.org/10.1021/acs.chemrev.0c00749}, @@ -15383,7 +17187,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, volume = {2}, number = {2}, pages = {368--376}, - publisher = {{RSC}}, + publisher = {RSC}, issn = {2635-098X}, doi = {10.1039/D2DD00087C}, url = {https://pubs.rsc.org/en/content/articlelanding/2023/dd/d2dd00087c}, @@ -15420,7 +17224,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, journaltitle = {Nature Reviews Chemistry}, shortjournal = {Nat Rev Chem}, pages = {1--2}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2397-3358}, doi = {10.1038/s41570-023-00502-0}, url = {https://www.nature.com/articles/s41570-023-00502-0}, @@ -15439,7 +17243,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, urldate = {2023-10-07}, abstract = {Andrew White is a Member of Technical Staff, Future House The Applied Machine Learning Days channel features talks and performances from the Applied Machine Learning Days held at the EPFL. AMLD is one of the largest machine learning \& AI events in Europe, focused specifically on the applications of machine learning and AI, making it particularly interesting to industry and academia. Follow AMLD: on Twitter: https://www.twitter.com/appliedmldays on LinkedIn: https://www.linkedin.com/company/appl... on Mastodon : https://mastodon.social/@amld AMLD Website: https://www.appliedmldays.org}, eventtitle = {Applied {{Machine Learning Days}}}, - venue = {{EPFL, Lausanne, Switzerland}}, + venue = {EPFL, Lausanne, Switzerland}, keywords = {/unread,chemistry,equivariant,foundation models,GNN,LLM,pretrained models} } @@ -15469,7 +17273,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, volume = {116}, number = {9}, pages = {3401--3406}, - publisher = {{Proceedings of the National Academy of Sciences}}, + publisher = {Proceedings of the National Academy of Sciences}, doi = {10.1073/pnas.1816132116}, url = {https://www.pnas.org/doi/10.1073/pnas.1816132116}, urldate = {2022-08-16}, @@ -15486,7 +17290,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, volume = {3}, number = {1}, pages = {160018}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2052-4463}, doi = {10.1038/sdata.2016.18}, url = {https://www.nature.com/articles/sdata201618}, @@ -15495,7 +17299,11 @@ Ying-Wai Li (Los Alamos National Laboratory)}, issue = {1}, langid = {english}, keywords = {FAIR,original publication}, - annotation = {Bandiera\_abtest: a Cg\_type: Nature Research Journals Primary\_atype: Comments \& Opinion Subject\_term: Publication characteristics;Research data Subject\_term\_id: publication-characteristics;research-data}, + annotation = {Bandiera\_abtest: a\\ +Cg\_type: Nature Research Journals\\ +Primary\_atype: Comments \& Opinion\\ +Subject\_term: Publication characteristics;Research data\\ +Subject\_term\_id: publication-characteristics;research-data}, file = {/Users/wasmer/Nextcloud/Zotero/Wilkinson et al_2016_The FAIR Guiding Principles for scientific data management and stewardship.pdf;/Users/wasmer/Zotero/storage/7QCVD3LB/sdata201618.html} } @@ -15508,7 +17316,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, volume = {150}, number = {15}, pages = {154110}, - publisher = {{American Institute of Physics}}, + publisher = {American Institute of Physics}, issn = {0021-9606}, doi = {10.1063/1.5090481}, url = {https://aip.scitation.org/doi/10.1063/1.5090481}, @@ -15527,9 +17335,9 @@ Ying-Wai Li (Los Alamos National Laboratory)}, volume = {20}, number = {47}, pages = {29661--29668}, - publisher = {{The Royal Society of Chemistry}}, + publisher = {The Royal Society of Chemistry}, issn = {1463-9084}, - doi = {(}, + doi = {10.1039/C8CP05921G}, url = {https://pubs.rsc.org/en/content/articlelanding/2018/cp/c8cp05921g}, urldate = {2021-05-13}, abstract = {Machine-learning of atomic-scale properties amounts to extracting correlations between structure, composition and the quantity that one wants to predict. Representing the input structure in a way that best reflects such correlations makes it possible to improve the accuracy of the model for a given amount of reference data. When using a description of the structures that is transparent and well-principled, optimizing the representation might reveal insights into the chemistry of the data set. Here we show how one can generalize the SOAP kernel to introduce a distance-dependent weight that accounts for the multi-scale nature of the interactions, and a description of correlations between chemical species. We show that this improves substantially the performance of ML models of molecular and materials stability, while making it easier to work with complex, multi-component systems and to extend SOAP to coarse-grained intermolecular potentials. The element correlations that give the best performing model show striking similarities with the conventional periodic table of the elements, providing an inspiring example of how machine learning can rediscover, and generalize, intuitive concepts that constitute the foundations of chemistry.}, @@ -15577,7 +17385,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, volume = {33}, number = {7}, pages = {777--795}, - publisher = {{Cambridge University Press}}, + publisher = {Cambridge University Press}, issn = {0884-2914, 2044-5326}, doi = {10.1557/jmr.2017.462}, url = {https://www.cambridge.org/core/journals/journal-of-materials-research/article/orbitalfree-density-functional-theory-for-materials-research/49FCB1F2856649AA431E803AE340674E}, @@ -15601,7 +17409,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, urldate = {2023-04-04}, abstract = {The impact of magnetism on predicted atomic short-range order in Ni-based high-entropy alloys is studied using a first-principles, all-electron, Landau-type linear response theory, coupled with lattice-based atomistic modelling. We perform two sets of linear-response calculations: one in which the paramagnetic state is modelled within the disordered local moment picture, and one in which systems are modelled in a magnetically ordered state. We show that the treatment of magnetism can have significant impact both on the predicted temperature of atomic ordering and also the nature of atomic order itself. In CrCoNi, we find that the nature of atomic order changes from being \$L1\_2\$-like when modelled in the paramagnetic state to MoPt\$\_2\$-like when modelled assuming the system has magnetically ordered. In CrFeCoNi, atomic correlations between Fe and the other elements present are dramatically strengthened when we switch from treating the system as magnetically disordered to magnetically ordered. Our results show it is necessary to consider the magnetic state when modelling multicomponent alloys containing mid- to late-\$3d\$ elements. Further, we suggest that there may be high-entropy alloy compositions containing \$3d\$ transition metals that will exhibit specific atomic short-range order when thermally treated in an applied magnetic field.}, pubstate = {preprint}, - keywords = {\_tablet,/unread,CPA,DFT,high-entropy alloys,KKR,n-ary alloys,transition metals}, + keywords = {/unread,\_tablet,CPA,DFT,high-entropy alloys,KKR,n-ary alloys,transition metals}, file = {/Users/wasmer/Nextcloud/Zotero/Woodgate et al_2023_Interplay between magnetism and short-range order in Ni-based high-entropy.pdf;/Users/wasmer/Zotero/storage/RAQ822ZS/2303.html} } @@ -15613,7 +17421,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, urldate = {2022-10-05}, abstract = {If only scientists understood exactly how electrons act in molecules, they’d be able to predict the behavior of everything from experimental drugs to high-temperature superconductors.}, langid = {english}, - organization = {{Quanta Magazine}}, + organization = {Quanta Magazine}, keywords = {DeepMind,DFT,DM21,for introductions,ML-DFA,ML-DFT,ML-ESM,molecules,popular science}, file = {/Users/wasmer/Zotero/storage/Y7SURWT5/quantum-complexity-tamed-by-machine-learning-20220207.html} } @@ -15626,7 +17434,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, url = {https://zenodo.org/record/7891361}, urldate = {2023-05-08}, abstract = {FLEUR is an all-electron DFT code based on the full-potential linearized augmented plane-wave method (FLAPW). It is mainly developed at the Forschungsentrum Jülich, Germany and available for the materials research community.}, - organization = {{Forschungsentrum Jülich}}, + organization = {Forschungsentrum Jülich}, version = {MaX-R6.2}, keywords = {All-electron,Density functional theory,DFT,FLAPW,FLEUR}, file = {/Users/wasmer/Zotero/storage/KYZY7YXB/7891361.html} @@ -15659,7 +17467,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, volume = {9}, number = {2}, pages = {513--530}, - publisher = {{The Royal Society of Chemistry}}, + publisher = {The Royal Society of Chemistry}, issn = {2041-6539}, doi = {10.1039/C7SC02664A}, url = {https://pubs.rsc.org/en/content/articlelanding/2018/sc/c7sc02664a}, @@ -15679,7 +17487,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, volume = {5}, number = {1}, pages = {1--10}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2397-2106}, doi = {10.1038/s41529-020-00148-z}, url = {https://www.nature.com/articles/s41529-020-00148-z}, @@ -15688,7 +17496,12 @@ Ying-Wai Li (Los Alamos National Laboratory)}, issue = {1}, langid = {english}, keywords = {descriptor comparison,descriptors,dimensionality reduction,KRR,materials discovery,ML,models,sketchmap,SOAP}, - annotation = {Bandiera\_abtest: a Cc\_license\_type: cc\_by Cg\_type: Nature Research Journals Primary\_atype: Research Subject\_term: Computational methods;Corrosion;Mathematics and computing;Theoretical chemistry Subject\_term\_id: computational-methods;corrosion;mathematics-and-computing;theoretical-chemistry}, + annotation = {Bandiera\_abtest: a\\ +Cc\_license\_type: cc\_by\\ +Cg\_type: Nature Research Journals\\ +Primary\_atype: Research\\ +Subject\_term: Computational methods;Corrosion;Mathematics and computing;Theoretical chemistry\\ +Subject\_term\_id: computational-methods;corrosion;mathematics-and-computing;theoretical-chemistry}, file = {/Users/wasmer/Nextcloud/Zotero/Würger et al_2021_Exploring structure-property relationships in magnesium dissolution modulators.pdf;/Users/wasmer/Zotero/storage/NM6RVQRY/s41529-020-00148-z.html} } @@ -15707,7 +17520,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, url = {https://ieeexplore.ieee.org/document/9416834/}, urldate = {2023-11-14}, abstract = {Graphs are widely used as a popular representation of the network structure of connected data. Graph data can be found in a broad spectrum of application domains such as social systems, ecosystems, biological networks, knowledge graphs, and information systems. With the continuous penetration of artificial intelligence technologies, graph learning (i.e., machine learning on graphs) is gaining attention from both researchers and practitioners. Graph learning proves effective for many tasks, such as classification, link prediction, and matching. Generally, graph learning methods extract relevant features of graphs by taking advantage of machine learning algorithms. In this survey, we present a comprehensive overview on the state-of-the-art of graph learning. Special attention is paid to four categories of existing graph learning methods, including graph signal processing, matrix factorization, random walk, and deep learning. Major models and algorithms under these categories are reviewed, respectively. We examine graph learning applications in areas such as text, images, science, knowledge graphs, and combinatorial optimization. In addition, we discuss several promising research directions in this field.}, - keywords = {/unread,General ML,GNN,graph,graph ML,ML,review,review-of-graph-ML}, + keywords = {General ML,GNN,graph,graph ML,ML,review,review-of-graph-ML}, file = {/Users/wasmer/Nextcloud/Zotero/Xia et al_2021_Graph Learning.pdf} } @@ -15720,7 +17533,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, volume = {120}, number = {14}, pages = {145301}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevLett.120.145301}, url = {https://link.aps.org/doi/10.1103/PhysRevLett.120.145301}, urldate = {2022-09-27}, @@ -15738,7 +17551,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, volume = {100}, number = {17}, pages = {174513}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevB.100.174513}, url = {https://link.aps.org/doi/10.1103/PhysRevB.100.174513}, urldate = {2023-05-06}, @@ -15777,7 +17590,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, volume = {8}, number = {1}, pages = {1--8}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2057-3960}, doi = {10.1038/s41524-021-00666-7}, url = {https://www.nature.com/articles/s41524-021-00666-7}, @@ -15828,7 +17641,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, volume = {9}, number = {1}, pages = {1--9}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2057-3960}, doi = {10.1038/s41524-023-01092-7}, url = {https://www.nature.com/articles/s41524-023-01092-7}, @@ -15856,6 +17669,23 @@ Ying-Wai Li (Los Alamos National Laboratory)}, file = {/Users/wasmer/Nextcloud/Zotero/Xu et al_2020_Survey on Multi-Output Learning.pdf;/Users/wasmer/Zotero/storage/9TWMMATA/8892612.html} } +@article{yagerDomainspecificChatbotsScience2023, + title = {Domain-Specific Chatbots for Science Using Embeddings}, + author = {Yager, Kevin G.}, + date = {2023-10-10}, + journaltitle = {Digital Discovery}, + shortjournal = {Digital Discovery}, + publisher = {RSC}, + issn = {2635-098X}, + doi = {10.1039/D3DD00112A}, + url = {https://pubs.rsc.org/en/content/articlelanding/2023/dd/d3dd00112a}, + urldate = {2023-12-03}, + abstract = {Large language models (LLMs) have emerged as powerful machine-learning systems capable of handling a myriad of tasks. Tuned versions of these systems have been turned into chatbots that can respond to user queries on a vast diversity of topics, providing informative and creative replies. However, their application to physical science research remains limited owing to their incomplete knowledge in these areas, contrasted with the needs of rigor and sourcing in science domains. Here, we demonstrate how existing methods and software tools can be easily combined to yield a domain-specific chatbot. The system ingests scientific documents in existing formats, and uses text embedding lookup to provide the LLM with domain-specific contextual information when composing its reply. We similarly demonstrate that existing image embedding methods can be used for search and retrieval across publication figures. These results confirm that LLMs are already suitable for use by physical scientists in accelerating their research efforts.}, + langid = {english}, + keywords = {AI4Science,AML,chatbot,ChatGPT,embedding,GPT,language models,library,LLM,ML,multimodal input,nanomaterials,NLP,vector database,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Yager_2023_Domain-specific chatbots for science using embeddings.pdf;/Users/wasmer/Zotero/storage/KSR4CDBN/Yager - 2023 - Domain-specific chatbots for science using embeddi.pdf} +} + @article{yamadaPredictingMaterialsProperties2019, title = {Predicting {{Materials Properties}} with {{Little Data Using Shotgun Transfer Learning}}}, author = {Yamada, Hironao and Liu, Chang and Wu, Stephen and Koyama, Yukinori and Ju, Shenghong and Shiomi, Junichiro and Morikawa, Junko and Yoshida, Ryo}, @@ -15865,7 +17695,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, volume = {5}, number = {10}, pages = {1717--1730}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, issn = {2374-7943}, doi = {10.1021/acscentsci.9b00804}, url = {https://doi.org/10.1021/acscentsci.9b00804}, @@ -15884,7 +17714,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, volume = {102}, number = {21}, pages = {214439}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevB.102.214439}, url = {https://link.aps.org/doi/10.1103/PhysRevB.102.214439}, urldate = {2023-03-13}, @@ -15902,7 +17732,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, volume = {91}, number = {9}, pages = {093703}, - publisher = {{The Physical Society of Japan}}, + publisher = {The Physical Society of Japan}, issn = {0031-9015}, doi = {10.7566/JPSJ.91.093703}, url = {https://journals.jps.jp/doi/full/10.7566/JPSJ.91.093703}, @@ -15935,13 +17765,13 @@ Ying-Wai Li (Los Alamos National Laboratory)}, volume = {107}, number = {1}, pages = {014407}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevB.107.014407}, url = {https://link.aps.org/doi/10.1103/PhysRevB.107.014407}, urldate = {2023-03-22}, abstract = {We present a systematic method to automatically generate symmetry-adapted magnetic structures for a given crystal structure and general propagation vector k as an efficient approach of the analysis of complex modulated magnetic structures. The method is developed as an extension of the generation scheme based on the multipole expansion, which was demonstrated only for the propagation vector k=0 [M.-T. Suzuki et al., Phys. Rev. B 99, 174407 (2019)]. The symmetry-adapted magnetic structures characterized with an ordering vector k are obtained by mapping the multipole magnetic alignments on a virtual cluster to the periodic crystal structure with the phase factor for the wave vector k. This method provides all magnetic bases compatible with irreducible representations under a k group for a given crystal structure and wave vector k. Multiple-k magnetic structures are derived from a superposition of single-k magnetic bases related to the space group symmetry. We apply the scheme to deduce the magnetic structures of α-Mn and CoM3S6 (M=Nb, Ta), in which the large anomalous Hall effect has recently been observed in antiferromagnetic phases, and identify the magnetic structures inducing anomalous Hall effect without net magnetization. The physical phenomena originating from emergent multipoles in the ordered phases are also discussed based on the Landau theory.}, - keywords = {/unread,AML,descriptors,feature engineering,Ferromagnetism,invariance,magnetism,ML,rec-by-kipp,spin-dependent}, - file = {/Users/wasmer/Zotero/storage/G9EKXIZ4/Yanagi et al. - 2023 - Generation of modulated magnetic structures based .pdf;/Users/wasmer/Zotero/storage/ILBXKEJP/PhysRevB.107.html} + keywords = {\_tablet,AML,descriptors,feature engineering,Ferromagnetism,invariance,magnetism,ML,rec-by-kipp,spin-dependent}, + file = {/Users/wasmer/Zotero/storage/G9EKXIZ4/Yanagi et al_2023_Generation of modulated magnetic structures based on cluster multipole expansion.pdf;/Users/wasmer/Zotero/storage/ILBXKEJP/PhysRevB.107.html} } @article{yangDecipheringChemicalOrder2017, @@ -15952,7 +17782,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, volume = {542}, number = {7639}, pages = {75--79}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {1476-4687}, doi = {10.1038/nature21042}, url = {https://www.nature.com/articles/nature21042}, @@ -15973,7 +17803,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, volume = {154}, number = {23}, pages = {234704}, - publisher = {{American Institute of Physics}}, + publisher = {American Institute of Physics}, issn = {0021-9606}, doi = {10.1063/5.0049665}, url = {https://aip.scitation.org/doi/10.1063/5.0049665}, @@ -15992,7 +17822,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, volume = {9}, number = {8}, pages = {2261--2269}, - publisher = {{Royal Society of Chemistry}}, + publisher = {Royal Society of Chemistry}, doi = {10.1039/C7SC04934J}, url = {https://pubs.rsc.org/en/content/articlelanding/2018/sc/c7sc04934j}, urldate = {2023-08-24}, @@ -16001,6 +17831,22 @@ Ying-Wai Li (Los Alamos National Laboratory)}, file = {/Users/wasmer/Nextcloud/Zotero/Yao et al_2018_The TensorMol-0.pdf} } +@online{yuanEquivariantNeuralNetwork2024, + title = {Equivariant {{Neural Network Force Fields}} for {{Magnetic Materials}}}, + author = {Yuan, Zilong and Xu, Zhiming and Li, He and Cheng, Xinle and Tao, Honggeng and Tang, Zechen and Zhou, Zhiyuan and Duan, Wenhui and Xu, Yong}, + date = {2024-02-07}, + eprint = {2402.04864}, + eprinttype = {arxiv}, + eprintclass = {cond-mat}, + doi = {10.48550/arXiv.2402.04864}, + url = {http://arxiv.org/abs/2402.04864}, + urldate = {2024-04-18}, + abstract = {Neural network force fields have significantly advanced ab initio atomistic simulations across diverse fields. However, their application in the realm of magnetic materials is still in its early stage due to challenges posed by the subtle magnetic energy landscape and the difficulty of obtaining training data. Here we introduce a data-efficient neural network architecture to represent density functional theory total energy, atomic forces, and magnetic forces as functions of atomic and magnetic structures. Our approach incorporates the principle of equivariance under the three-dimensional Euclidean group into the neural network model. Through systematic experiments on various systems, including monolayer magnets, curved nanotube magnets, and moir\textbackslash 'e-twisted bilayer magnets of \$\textbackslash text\{CrI\}\_\{3\}\$, we showcase the method's high efficiency and accuracy, as well as exceptional generalization ability. The work creates opportunities for exploring magnetic phenomena in large-scale materials systems.}, + pubstate = {preprint}, + keywords = {\_tablet,2D material,AML,CNT,constrained DFT,DeepH,Landau-Lifshits-Gilbert equation,magnetism,magnon dispersion,magnons,ML,MLP,non-collinear,prediction from magnetic configuration,prediction from structure,prediction of energy,prediction of forces,skyrmions,spin dynamics,spin wave,twisted bilayer,VASP}, + file = {/Users/wasmer/Nextcloud/Zotero/Yuan et al_2024_Equivariant Neural Network Force Fields for Magnetic Materials.pdf;/Users/wasmer/Zotero/storage/FB8XK7MY/2402.html} +} + @online{yuCapturingLongrangeInteraction2022, title = {Capturing Long-Range Interaction with Reciprocal Space Neural Network}, author = {Yu, Hongyu and Hong, Liangliang and Chen, Shiyou and Gong, Xingao and Xiang, Hongjun}, @@ -16017,6 +17863,24 @@ Ying-Wai Li (Los Alamos National Laboratory)}, file = {/Users/wasmer/Nextcloud/Zotero/Yu et al_2022_Capturing long-range interaction with reciprocal space neural network.pdf;/Users/wasmer/Zotero/storage/KHNJJ4VF/2211.html} } +@article{yuComplexSpinHamiltonian2022, + title = {Complex Spin {{Hamiltonian}} Represented by an Artificial Neural Network}, + author = {Yu, Hongyu and Xu, Changsong and Li, Xueyang and Lou, Feng and Bellaiche, L. and Hu, Zhenpeng and Gong, Xingao and Xiang, Hongjun}, + date = {2022-05-18}, + journaltitle = {Physical Review B}, + shortjournal = {Phys. Rev. B}, + volume = {105}, + number = {17}, + pages = {174422}, + publisher = {American Physical Society}, + doi = {10.1103/PhysRevB.105.174422}, + url = {https://link.aps.org/doi/10.1103/PhysRevB.105.174422}, + urldate = {2024-04-18}, + abstract = {The effective spin Hamiltonian method is very useful for simulating and understanding the behavior of magnetism. However, it is not easy to construct an appropriate spin Hamiltonian for a magnetic system, especially for complex magnets such as itinerant topological magnets. Here, we put forward a machine learning (ML) approach, applying an artificial neural network (ANN) and a local spin descriptor to construct an effective spin Hamiltonian for any magnetic system. The obtained Hamiltonians include an explicit Heisenberg part and an implicit nonlinear ANN part. Such a method successfully reproduces artificially constructed models and also accurately describes the itinerant magnetism of bulk Fe3GeTe2. Our work paves a new way for investigating complex magnetic phenomena (e.g., skyrmions) using ML techniques.}, + keywords = {alloys,AML,constrained DFT,exchange interaction,Heisenberg model,higher order,higher-order exchange interactions,magnetism,ML,ML-DFT,ML-ESM,non-collinear,prediction of Hamiltonian matrix,spin-constrained DFT,spin-dependent,ternary systems}, + file = {/Users/wasmer/Nextcloud/Zotero/Yu et al_2022_Complex spin Hamiltonian represented by an artificial neural network.pdf} +} + @online{yuEfficientEquivariantGraph2023, title = {Efficient and {{Equivariant Graph Networks}} for {{Predicting Quantum Hamiltonian}}}, author = {Yu, Haiyang and Xu, Zhao and Qian, Xiaofeng and Qian, Xiaoning and Ji, Shuiwang}, @@ -16051,6 +17915,22 @@ Ying-Wai Li (Los Alamos National Laboratory)}, file = {/Users/wasmer/Zotero/storage/ZHVDM662/Yu et al. - 2018 - ELSI A unified software interface for Kohn–Sham e.pdf;/Users/wasmer/Zotero/storage/RFWBX7DN/S0010465517302941.html} } +@online{yuGeneralTimereversalEquivariant2024, + title = {General Time-Reversal Equivariant Neural Network Potential for Magnetic Materials}, + author = {Yu, Hongyu and Liu, Boyu and Zhong, Yang and Hong, Liangliang and Ji, Junyi and Xu, Changsong and Gong, Xingao and Xiang, Hongjun}, + date = {2024-01-08}, + eprint = {2211.11403}, + eprinttype = {arxiv}, + eprintclass = {cond-mat, physics:physics}, + doi = {10.48550/arXiv.2211.11403}, + url = {http://arxiv.org/abs/2211.11403}, + urldate = {2024-04-12}, + abstract = {This study introduces time-reversal E(3)-equivariant neural network and SpinGNN++ framework for constructing a comprehensive interatomic potential for magnetic systems, encompassing spin-orbit coupling and noncollinear magnetic moments. SpinGNN++ integrates multitask spin equivariant neural network with explicit spin-lattice terms, including Heisenberg, Dzyaloshinskii-Moriya, Kitaev, single-ion anisotropy, and biquadratic interactions, and employs time-reversal equivariant neural network to learn high-order spin-lattice interactions using time-reversal E(3)-equivariant convolutions. To validate SpinGNN++, a complex magnetic model dataset is introduced as a benchmark and employed to demonstrate its capabilities. SpinGNN++ provides accurate descriptions of the complex spin-lattice coupling in monolayer CrI\$\_3\$ and CrTe\$\_2\$, achieving sub-meV errors. Importantly, it facilitates large-scale parallel spin-lattice dynamics, thereby enabling the exploration of associated properties, including the magnetic ground state and phase transition. Remarkably, SpinGNN++ identifies a new ferrimagnetic state as the ground magnetic state for monolayer CrTe2, thereby enriching its phase diagram and providing deeper insights into the distinct magnetic signals observed in various experiments.}, + pubstate = {preprint}, + keywords = {\_tablet,Allegro,AML,GNN,ML,ML-DFT,ML-ESM,MLP,MPNN,NequIP,non-collinear,prediction of Jij,SOC,spin-dependent,TRS,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Yu et al_2024_General time-reversal equivariant neural network potential for magnetic.pdf;/Users/wasmer/Zotero/storage/XV2X47EC/2211.html} +} + @online{yuQH9QuantumHamiltonian2023, title = {{{QH9}}: {{A Quantum Hamiltonian Prediction Benchmark}} for {{QM9 Molecules}}}, shorttitle = {{{QH9}}}, @@ -16094,20 +17974,20 @@ Ying-Wai Li (Los Alamos National Laboratory)}, urldate = {2023-10-14}, abstract = {The development of machine learning interatomic potentials has immensely contributed to the accuracy of simulations of molecules and crystals. However, creating interatomic potentials for magnetic systems that account for both magnetic moments and structural degrees of freedom remains a challenge. This work introduces SpinGNN, a spin-dependent interatomic potential approach that employs the graph neural network (GNN) to describe magnetic systems. SpinGNN consists of two types of edge GNNs: Heisenberg edge GNN (HEGNN) and spin-distance edge GNN (SEGNN). HEGNN is tailored to capture Heisenberg-type spin-lattice interactions, while SEGNN accurately models multi-body and high-order spin-lattice coupling. The effectiveness of SpinGNN is demonstrated by its exceptional precision in fitting a high-order spin Hamiltonian and two complex spin-lattice Hamiltonians with great precision. Furthermore, it successfully models the subtle spin-lattice coupling in BiFeO3 and performs large-scale spin-lattice dynamics simulations, predicting its antiferromagnetic ground state, magnetic phase transition, and domain wall energy landscape with high accuracy. Our study broadens the scope of graph neural network potentials to magnetic systems, serving as a foundation for carrying out large-scale spin-lattice dynamic simulations of such systems.}, pubstate = {preprint}, - keywords = {\_tablet,Allegro,AML,collinear,Computer Science - Machine Learning,Condensed Matter - Disordered Systems and Neural Networks,DimeNet,equivariant,GNN,HDNNP,heat transport,Heisenberg model,Jij,LAMMPS,LAMMPS SPIN,magnetism,MD,mHDNNP,ML,MLP,mMTP,MTP,multiferroic,non-collinear,original publication,PES,Physics - Computational Physics,prediction of Jij,prediction of magnetic ground state,skyrmions,spin dynamics,Spin-Allegro,spin-dependent,Spin-Dimenet,spin-lattice coupling,SpinGNN,with-code}, + keywords = {\_tablet,Allegro,AML,collinear,DimeNet,equivariant,GNN,HDNNP,heat transport,Heisenberg model,Jij,LAMMPS,LAMMPS SPIN,magnetism,MD,mHDNNP,ML,MLP,mMTP,MTP,multiferroic,non-collinear,original publication,PES,prediction of Jij,prediction of magnetic ground state,skyrmions,spin dynamics,Spin-Allegro,spin-dependent,Spin-Dimenet,spin-lattice coupling,SpinGNN,with-code}, file = {/Users/wasmer/Nextcloud/Zotero/false;/Users/wasmer/Zotero/storage/B3HN773D/2203.html} } @book{zabloudilElectronScatteringSolid2005, title = {Electron {{Scattering}} in {{Solid Matter}}}, editor = {Zabloudil, Jan and Hammerling, Robert and Weinberger, Peter and Szunyogh, Laszlo}, - editorb = {Cardona, Manuel and Fulde, Peter and Von Klitzing, Klaus and Queisser, Hans-Joachim and Merlin, Roberto and Störmer, Horst}, - editorbtype = {redactor}, + editora = {Cardona, Manuel and Fulde, Peter and Von Klitzing, Klaus and Queisser, Hans-Joachim and Merlin, Roberto and Störmer, Horst}, + editoratype = {redactor}, date = {2005}, series = {Springer {{Series}} in {{Solid-State Sciences}}}, volume = {147}, - publisher = {{Springer}}, - location = {{Berlin, Heidelberg}}, + publisher = {Springer}, + location = {Berlin, Heidelberg}, doi = {10.1007/b138290}, url = {http://link.springer.com/10.1007/b138290}, urldate = {2023-09-19}, @@ -16124,7 +18004,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, url = {https://twitter.com/ZachLGlick/status/1384860348730298375}, urldate = {2021-05-13}, langid = {english}, - organization = {{@ZachLGlick}}, + organization = {@ZachLGlick}, keywords = {ML}, file = {/Users/wasmer/Zotero/storage/WCHKALVA/1384860348730298375.html} } @@ -16147,7 +18027,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, volume = {17}, number = {10}, pages = {6658--6670}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, issn = {1549-9618}, doi = {10.1021/acs.jctc.1c00527}, url = {https://doi.org/10.1021/acs.jctc.1c00527}, @@ -16182,7 +18062,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, volume = {127}, number = {21}, pages = {215108}, - publisher = {{American Institute of Physics}}, + publisher = {American Institute of Physics}, issn = {0021-8979}, doi = {10.1063/5.0002252}, url = {https://aip.scitation.org/doi/10.1063/5.0002252}, @@ -16195,11 +18075,11 @@ Ying-Wai Li (Los Alamos National Laboratory)}, @report{zellerCorrelatedElectronsModels2012, title = {Correlated Electrons: From Models to Materials}, author = {Zeller, Rudolf}, - editorb = {Pavarini, Eva and Anders, Frithjof and Koch, Erik and Jarrell, Mark}, - editorbtype = {redactor}, + editor = {Pavarini, Eva and Anders, Frithjof and Koch, Erik and Jarrell, Mark}, + editortype = {redactor}, date = {2012}, number = {PreJuSER-136393}, - institution = {{Forschungszentrum Jülich GmbH Zenralbibliothek, Verlag}}, + institution = {Forschungszentrum Jülich GmbH Zenralbibliothek, Verlag}, url = {https://juser.fz-juelich.de/record/136393/}, urldate = {2022-06-28}, abstract = {Density-functional theory (DFT) is considered the Standard Model of solid-state physics. The state-of-the-art approximations to DFT, the local-density approximation (LDA) or its simple extensions, fail, however, even qualitatively, for strongly-correlated systems. When correlations are strong, electrons become entangled and novel properties emerge. Mott-transitions, Kondo- and heavy-fermion behavior, non-conventional superconductivity and orbital-order are just some examples of this emergent behavior. The realistic description of emergent properties is one of the grand-challenges of modern condensed-matter physics. To understand this physics beyond the Standard Model, nonperturbative many-body techniques are essential. Still, DFT-based methods are needed to devise materials-specific Hamiltonians for strong correlations. Mastering these novel techniques requires a vast background, ranging from DFT to model building and many-body physics. The aim of this school is to introduce advanced graduate students and up to the modern methods for modeling emergent properties of correlated electrons and to explore the relation of electron correlations with quantum entanglement and concepts from quantum information. A school of this size and scope requires support and help from many sources. We are very grateful for all the financial and practical support we have received. The Institute for Advanced Simulation and the German Research School for Simulation Sciences at the Forschungszentrum Jülich provided the funding and were vital for the organization of the school and the production of this book. The DFG Forschergruppe FOR1346 offered travel grants for students and the Institute for Complex Adaptive Matter (ICAM) travel support for international speakers and participants. The nature of a school makes it desirable to have the lecture-notes available already during the lectures. In this way the participants get the chance to work through the lectures thoroughly while they are given. We are therefore extremely grateful to the lecturers that, despite a tight schedule, provided their manuscripts in time for the production of this book. We are confident that the lecture notes collected here will not only serve the participants of the school but will also be useful for other students entering the exciting field of strongly correlated materials. We thank Mrs. H. Lexis of the Forschungszentrum Jülich Verlag and Mr. D. Laufenberg of the Graphische Betriebe for providing their expert support in producing the present volume on a tight schedule and for making even seemingly impossible requests possible. We heartily thank our students and postdocs that helped in proofreading the manuscripts, often on short notice: Carmine Autieri, Fabio Baruffa, Michael Baumgärtel, Monica Bugeanu, Andreas Flesch, Evgeny Gorelov, Amin Kiani Sheikhabadi, Joaquin Miranda, German Ulm, and Guoren Zhang. Finally, our special thanks go to Dipl.-Ing. R. Hölzle for his invaluable advice on all questions concerning the organization of such a school and to Mrs. L. Snyders and Mrs. E. George for expertly handling all practical issues. Pavarini, Eva; Koch, Erik; Anders, Frithjof; Jarrell, Mark (Eds. )}, @@ -16215,8 +18095,8 @@ Ying-Wai Li (Los Alamos National Laboratory)}, author = {Zeng, Bei and Chen, Xie and Zhou, Duan-Lu and Wen, Xiao-Gang}, date = {2019}, series = {Quantum {{Science}} and {{Technology}}}, - publisher = {{Springer}}, - location = {{New York, NY}}, + publisher = {Springer}, + location = {New York, NY}, doi = {10.1007/978-1-4939-9084-9}, url = {http://link.springer.com/10.1007/978-1-4939-9084-9}, urldate = {2023-08-24}, @@ -16235,7 +18115,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, volume = {154}, number = {22}, pages = {224112}, - publisher = {{American Institute of Physics}}, + publisher = {American Institute of Physics}, issn = {0021-9606}, doi = {10.1063/5.0052961}, url = {https://aip.scitation.org/doi/10.1063/5.0052961}, @@ -16261,6 +18141,23 @@ Ying-Wai Li (Los Alamos National Laboratory)}, file = {/Users/wasmer/Zotero/storage/RXPDN4KU/Zenil et al. - 2023 - The Future of Fundamental Science Led by Generativ.pdf;/Users/wasmer/Zotero/storage/FRVYJZDW/2307.html} } +@online{zeniMatterGenGenerativeModel2023, + title = {{{MatterGen}}: A Generative Model for Inorganic Materials Design}, + shorttitle = {{{MatterGen}}}, + author = {Zeni, Claudio and Pinsler, Robert and Zügner, Daniel and Fowler, Andrew and Horton, Matthew and Fu, Xiang and Shysheya, Sasha and Crabbé, Jonathan and Sun, Lixin and Smith, Jake and Tomioka, Ryota and Xie, Tian}, + date = {2023-12-06}, + eprint = {2312.03687}, + eprinttype = {arxiv}, + eprintclass = {cond-mat}, + doi = {10.48550/arXiv.2312.03687}, + url = {http://arxiv.org/abs/2312.03687}, + urldate = {2023-12-17}, + abstract = {The design of functional materials with desired properties is essential in driving technological advances in areas like energy storage, catalysis, and carbon capture. Generative models provide a new paradigm for materials design by directly generating entirely novel materials given desired property constraints. Despite recent progress, current generative models have low success rate in proposing stable crystals, or can only satisfy a very limited set of property constraints. Here, we present MatterGen, a model that generates stable, diverse inorganic materials across the periodic table and can further be fine-tuned to steer the generation towards a broad range of property constraints. To enable this, we introduce a new diffusion-based generative process that produces crystalline structures by gradually refining atom types, coordinates, and the periodic lattice. We further introduce adapter modules to enable fine-tuning towards any given property constraints with a labeled dataset. Compared to prior generative models, structures produced by MatterGen are more than twice as likely to be novel and stable, and more than 15 times closer to the local energy minimum. After fine-tuning, MatterGen successfully generates stable, novel materials with desired chemistry, symmetry, as well as mechanical, electronic and magnetic properties. Finally, we demonstrate multi-property materials design capabilities by proposing structures that have both high magnetic density and a chemical composition with low supply-chain risk. We believe that the quality of generated materials and the breadth of MatterGen's capabilities represent a major advancement towards creating a universal generative model for materials design.}, + pubstate = {preprint}, + keywords = {/unread,AI4Science,AML,database generation,DFT,diffusion model,generative models,M3GNet,magnetic density,materials discovery,Microsoft Research,ML,n-ary alloys,structure prediction,symmetry,workflows}, + file = {/Users/wasmer/Nextcloud/Zotero/Zeni et al_2023_MatterGen.pdf;/Users/wasmer/Zotero/storage/9J3S7YXT/2312.html} +} + @article{zepeda-nunezDeepDensityCircumventing2021, title = {Deep {{Density}}: {{Circumventing}} the {{Kohn-Sham}} Equations via Symmetry Preserving Neural Networks}, shorttitle = {Deep {{Density}}}, @@ -16326,7 +18223,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, volume = {6}, number = {8}, pages = {584--588}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {1745-2481}, doi = {10.1038/nphys1689}, url = {https://www.nature.com/articles/nphys1689}, @@ -16347,7 +18244,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, volume = {20}, number = {4}, pages = {044056}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevApplied.20.044056}, url = {https://link.aps.org/doi/10.1103/PhysRevApplied.20.044056}, urldate = {2023-11-05}, @@ -16369,6 +18266,23 @@ Ying-Wai Li (Los Alamos National Laboratory)}, file = {/Users/wasmer/Nextcloud/Zotero/Zhang_2018_Deep Potential Molecular Dynamics.pdf} } +@online{zhangDPA2UniversalLarge2023, + title = {{{DPA-2}}: {{Towards}} a Universal Large Atomic Model for Molecular and Material Simulation}, + shorttitle = {{{DPA-2}}}, + author = {Zhang, Duo and Liu, Xinzijian and Zhang, Xiangyu and Zhang, Chengqian and Cai, Chun and Bi, Hangrui and Du, Yiming and Qin, Xuejian and Huang, Jiameng and Li, Bowen and Shan, Yifan and Zeng, Jinzhe and Zhang, Yuzhi and Liu, Siyuan and Li, Yifan and Chang, Junhan and Wang, Xinyan and Zhou, Shuo and Liu, Jianchuan and Luo, Xiaoshan and Wang, Zhenyu and Jiang, Wanrun and Wu, Jing and Yang, Yudi and Yang, Jiyuan and Yang, Manyi and Gong, Fu-Qiang and Zhang, Linshuang and Shi, Mengchao and Dai, Fu-Zhi and York, Darrin M. and Liu, Shi and Zhu, Tong and Zhong, Zhicheng and Lv, Jian and Cheng, Jun and Jia, Weile and Chen, Mohan and Ke, Guolin and E, Weinan and Zhang, Linfeng and Wang, Han}, + date = {2023-12-24}, + eprint = {2312.15492}, + eprinttype = {arxiv}, + eprintclass = {cond-mat, physics:physics}, + doi = {10.48550/arXiv.2312.15492}, + url = {http://arxiv.org/abs/2312.15492}, + urldate = {2024-05-06}, + abstract = {The rapid development of artificial intelligence (AI) is driving significant changes in the field of atomic modeling, simulation, and design. AI-based potential energy models have been successfully used to perform large-scale and long-time simulations with the accuracy of ab initio electronic structure methods. However, the model generation process still hinders applications at scale. We envision that the next stage would be a model-centric ecosystem, in which a large atomic model (LAM), pre-trained with as many atomic datasets as possible and can be efficiently fine-tuned and distilled to downstream tasks, would serve the new infrastructure of the field of molecular modeling. We propose DPA-2, a novel architecture for a LAM, and develop a comprehensive pipeline for model fine-tuning, distillation, and application, associated with automatic workflows. We show that DPA-2 can accurately represent a diverse range of chemical systems and materials, enabling high-quality simulations and predictions with significantly reduced efforts compared to traditional methods. Our approach paves the way for a universal large atomic model that can be widely applied in molecular and material simulation research, opening new opportunities for scientific discoveries and industrial applications.}, + pubstate = {preprint}, + keywords = {ablation study,Allegro,AML,attention,benchmarking,dataset,DeePMD-kit,Equiformer,equivariant,fine-tuning,foundation models,GemNet,generalization,ML,MLP,multi-task learning,NequIP,periodic table,pretrained models,property prediction,transfer learning,transformer,universal potential,with-code,with-data,zero-shot generalization}, + file = {/Users/wasmer/Nextcloud/Zotero/Zhang et al_2023_DPA-2.pdf;/Users/wasmer/Zotero/storage/LAGXG4QQ/2312.html} +} + @article{zhangHighthroughputDesignMagnetic2021, title = {High-Throughput Design of Magnetic Materials}, author = {Zhang, Hongbin}, @@ -16378,7 +18292,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, volume = {3}, number = {3}, pages = {033001}, - publisher = {{IOP Publishing}}, + publisher = {IOP Publishing}, issn = {2516-1075}, doi = {10.1088/2516-1075/abbb25}, url = {https://dx.doi.org/10.1088/2516-1075/abbb25}, @@ -16398,7 +18312,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, volume = {100}, number = {5}, pages = {054205}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevB.100.054205}, url = {https://link.aps.org/doi/10.1103/PhysRevB.100.054205}, urldate = {2023-09-19}, @@ -16416,7 +18330,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, volume = {96}, number = {24}, pages = {245119}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevB.96.245119}, url = {https://link.aps.org/doi/10.1103/PhysRevB.96.245119}, urldate = {2021-05-21}, @@ -16434,7 +18348,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, volume = {120}, number = {6}, pages = {066401}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevLett.120.066401}, url = {https://link.aps.org/doi/10.1103/PhysRevLett.120.066401}, urldate = {2021-05-21}, @@ -16456,7 +18370,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, urldate = {2023-02-23}, abstract = {Determining thermal and physical quantities across a broad temperature domain, especially up to the ultra-high temperature region, is a formidable theoretical and experimental challenge. At the same time it is essential for understanding the performance of ultra-high temperature ceramic (UHTC) materials. Here we present the development of a machine-learning force field for ZrB2, one of the primary members of the UHTC family with a complex bonding structure. The force field exhibits chemistry accuracy for both energies and forces and can reproduce structural, elastic and phonon properties, including thermal expansion and thermal transport. A thorough comparison with available empirical potentials shows that our force field outperforms the competitors. Most importantly, its effectiveness is extended from room temperature to the ultra-high temperature region (up to \textasciitilde{} 2,500 K), where measurements are very difficult, costly and some time impossible. Our work demonstrates that machine-learning force fields can be used for simulations of materials in a harsh environment, where no experimental tools are available, but crucial for a number of engineering applications, such as in aerospace, aviation and nuclear.}, pubstate = {preprint}, - keywords = {\_tablet,/unread,Condensed Matter - Materials Science,Quantum Physics}, + keywords = {/unread,\_tablet,Condensed Matter - Materials Science,Quantum Physics}, file = {/Users/wasmer/Nextcloud/Zotero/Zhang et al_2019_Pushing the limits of atomistic simulations towards ultra-high temperature.pdf;/Users/wasmer/Zotero/storage/IIZUJ8Y9/1911.html} } @@ -16469,7 +18383,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, volume = {118}, number = {21}, pages = {216401}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevLett.118.216401}, url = {https://link.aps.org/doi/10.1103/PhysRevLett.118.216401}, urldate = {2021-05-21}, @@ -16503,7 +18417,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, volume = {4}, number = {1}, pages = {1--8}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2057-3960}, doi = {10.1038/s41524-018-0081-z}, url = {https://www.nature.com/articles/s41524-018-0081-z}, @@ -16533,6 +18447,18 @@ Ying-Wai Li (Los Alamos National Laboratory)}, file = {/Users/wasmer/Nextcloud/Zotero/Zhang_Zhang_2013_Topological insulators from the perspective of first-principles calculations.pdf;/Users/wasmer/Zotero/storage/ZU36AAB4/pssr.html} } +@unpublished{zhaoDeepLearningFunctionals2024, + type = {Poster}, + title = {Deep {{Learning Functionals}} Based on the {{Adiabatic Connection}}}, + author = {Zhao, Heng and Vuckovic, Stefan}, + date = {2024-03}, + url = {https://meetings.aps.org/Meeting/MAR24/Session/J00.376}, + urldate = {2024-01-01}, + eventtitle = {{{APS March Meeting}}}, + keywords = {/unread,AML,ML,ML-DFA,ML-DFT,ML-ESM,todo-tagging}, + file = {/Users/wasmer/Zotero/storage/AQGN5964/J00.html} +} + @article{zhaoQuantumOscillationsIrondoped2019, title = {Quantum Oscillations in Iron-Doped Single Crystals of the Topological Insulator \$\textbackslash mathrm\{\vphantom\}{{S}}\vphantom\{\}\{\textbackslash mathrm\{b\}\}\_\{2\}\textbackslash mathrm\{\vphantom\}{{T}}\vphantom\{\}\{\textbackslash mathrm\{e\}\}\_\{3\}\$}, author = {Zhao, Weiyao and Cortie, David and Chen, Lei and Li, Zhi and Yue, Zengji and Wang, Xiaolin}, @@ -16542,7 +18468,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, volume = {99}, number = {16}, pages = {165133}, - publisher = {{American Physical Society}}, + publisher = {American Physical Society}, doi = {10.1103/PhysRevB.99.165133}, url = {https://link.aps.org/doi/10.1103/PhysRevB.99.165133}, urldate = {2022-04-28}, @@ -16550,6 +18476,26 @@ Ying-Wai Li (Los Alamos National Laboratory)}, file = {/Users/wasmer/Nextcloud/Zotero/Zhao et al_2019_Quantum oscillations in iron-doped single crystals of the topological insulator.pdf;/Users/wasmer/Zotero/storage/GGTED6FM/Zhao et al. - 2019 - Quantum oscillations in iron-doped single crystals.pdf;/Users/wasmer/Zotero/storage/8D5JL2DQ/PhysRevB.99.html} } +@article{zhengHopfionRingsCubic2023, + title = {Hopfion Rings in a Cubic Chiral Magnet}, + author = {Zheng, Fengshan and Kiselev, Nikolai S. and Rybakov, Filipp N. and Yang, Luyan and Shi, Wen and Blügel, Stefan and Dunin-Borkowski, Rafal E.}, + date = {2023-11}, + journaltitle = {Nature}, + volume = {623}, + number = {7988}, + pages = {718--723}, + publisher = {Nature Publishing Group}, + issn = {1476-4687}, + doi = {10.1038/s41586-023-06658-5}, + url = {https://www.nature.com/articles/s41586-023-06658-5}, + urldate = {2023-11-24}, + abstract = {Magnetic skyrmions and hopfions are topological solitons1—well-localized field configurations that have gained considerable attention over the past decade owing to their unique particle-like properties, which make them promising objects for spintronic applications. Skyrmions2,3 are two-dimensional solitons resembling vortex-like string structures that can penetrate an entire sample. Hopfions4–9 are three-dimensional solitons confined within a magnetic sample volume and can be considered as closed twisted skyrmion strings that take the shape of a ring in the simplest case. Despite extensive research on magnetic skyrmions, the direct observation of magnetic hopfions is challenging10 and has only been reported in a synthetic material11. Here we present direct observations of hopfions in crystals. In our experiment, we use transmission electron microscopy to observe hopfions forming coupled states with skyrmion strings in B20-type FeGe plates. We provide a protocol for nucleating such hopfion rings, which we verify using Lorentz imaging and electron holography. Our results are highly reproducible and in full agreement with micromagnetic simulations. We provide a unified skyrmion–hopfion homotopy classification and offer insight into the diversity of topological solitons in three-dimensional chiral magnets.}, + issue = {7988}, + langid = {english}, + keywords = {DFT,FZJ,hopfion,magnetic structure,magnetic supperlattice,magnetism,PGI,PGI-1/IAS-1,skyrmions,spin texture,topological}, + file = {/Users/wasmer/Nextcloud/Zotero/Zheng et al_2023_Hopfion rings in a cubic chiral magnet.pdf} +} + @online{zhongAcceleratingElectronicstructureCalculation2023, title = {Accelerating the Electronic-Structure Calculation of Magnetic Systems by Equivariant Neural Networks}, author = {Zhong, Yang and Zhang, Binhua and Yu, Hongyu and Gong, Xingao and Xiang, Hongjun}, @@ -16562,10 +18508,26 @@ Ying-Wai Li (Los Alamos National Laboratory)}, urldate = {2023-06-12}, abstract = {Complex spin-spin interactions in magnets can often lead to magnetic superlattices with complex local magnetic arrangements, and many of the magnetic superlattices have been found to possess non-trivial topological electronic properties. Due to the huge size and complex magnetic moment arrangement of the magnetic superlattices, it is a great challenge to perform a direct DFT calculation on them. In this work, an equivariant deep learning framework is designed to accelerate the electronic calculation of magnetic systems by exploiting both the equivariant constraints of the magnetic Hamiltonian matrix and the physical rules of spin-spin interactions. This framework can bypass the costly self-consistent iterations and build a direct mapping from a magnetic configuration to the ab initio Hamiltonian matrix. After training on the magnets with random magnetic configurations, our model achieved high accuracy on the test structures outside the training set, such as spin spiral and non-collinear antiferromagnetic configurations. The trained model is also used to predict the energy bands of a skyrmion configuration of NiBrI containing thousands of atoms, showing the high efficiency of our model on large magnetic superlattices.}, pubstate = {preprint}, - keywords = {\_tablet,2D material,AFM,AML,DFT,DFT speedup,DFT speedup with ML,Dzyaloshinskii–Moriya interaction,E(3),equivariant,GNN,Hall effect,Heisenberg model,iron,Jij,magnetic Hamiltonian,magnetic supperlattice,magnetism,ML,ML-DFT,ML-ESM,MPNN,non-collinear,OpenMX,prediction from magnetic configuration,prediction of Hamiltonian matrix,prediction of Jij,skyrmions,SO(3),SOC,spin spiral,spin-dependent,SU(2),ternary systems,TRS}, + keywords = {\_tablet,2D material,AFM,AML,DFT,DFT speedup,DFT speedup with ML,Dzyaloshinskii–Moriya interaction,E(3),equivariant,GNN,Hall effect,Heisenberg model,higher-order exchange interactions,iron,Jij,magnetic Hamiltonian,magnetic supperlattice,magnetism,ML,ML-DFT,ML-ESM,MPNN,non-collinear,OpenMX,prediction from magnetic configuration,prediction of Hamiltonian matrix,prediction of Jij,skyrmions,SO(3),SOC,spin spiral,spin-dependent,SU(2),ternary systems,TRS}, file = {/Users/wasmer/Nextcloud/Zotero/Zhong et al_2023_Accelerating the electronic-structure calculation of magnetic systems by.pdf;/Users/wasmer/Zotero/storage/RJIQYZHY/2306.html} } +@online{zhongEdgebasedTensorPrediction2022, + title = {Edge-Based {{Tensor}} Prediction via Graph Neural Networks}, + author = {Zhong, Yang and Yu, Hongyu and Gong, Xingao and Xiang, Hongjun}, + date = {2022-01-15}, + eprint = {2201.05770}, + eprinttype = {arxiv}, + eprintclass = {cond-mat, physics:physics}, + doi = {10.48550/arXiv.2201.05770}, + url = {http://arxiv.org/abs/2201.05770}, + urldate = {2024-04-18}, + abstract = {Message-passing neural networks (MPNN) have shown extremely high efficiency and accuracy in predicting the physical properties of molecules and crystals, and are expected to become the next-generation material simulation tool after the density functional theory (DFT). However, there is currently a lack of a general MPNN framework for directly predicting the tensor properties of the crystals. In this work, a general framework for the prediction of tensor properties was proposed: the tensor property of a crystal can be decomposed into the average of the tensor contributions of all the atoms in the crystal, and the tensor contribution of each atom can be expanded as the sum of the tensor projections in the directions of the edges connecting the atoms. On this basis, the edge-based expansions of force vectors, Born effective charges (BECs), dielectric (DL) and piezoelectric (PZ) tensors were proposed. These expansions are rotationally equivariant, while the coefficients in these tensor expansions are rotationally invariant scalars which are similar to physical quantities such as formation energy and band gap. The advantage of this tensor prediction framework is that it does not require the network itself to be equivariant. Therefore, in this work, we directly designed the edge-based tensor prediction graph neural network (ETGNN) model on the basis of the invariant graph neural network to predict tensors. The validity and high precision of this tensor prediction framework were shown by the tests of ETGNN on the extended systems, random perturbed structures and JARVIS-DFT datasets. This tensor prediction framework is general for nearly all the GNNs and can achieve higher accuracy with more advanced GNNs in the future.}, + pubstate = {preprint}, + keywords = {AML,equivariant,GNN,HEA,invariance,JARVIS-DFT,materials,ML,MPNN,prediction of charge,prediction of forces,tensorial target}, + file = {/Users/wasmer/Nextcloud/Zotero/Zhong et al_2022_Edge-based Tensor prediction via graph neural networks.pdf;/Users/wasmer/Zotero/storage/RQ2TNRCK/2201.html} +} + @article{zhongGeneralTensorPrediction2023, title = {A {{General Tensor Prediction Framework Based}} on {{Graph Neural Networks}}}, author = {Zhong, Yang and Yu, Hongyu and Gong, Xingao and Xiang, Hongjun}, @@ -16573,13 +18535,13 @@ Ying-Wai Li (Los Alamos National Laboratory)}, journaltitle = {The Journal of Physical Chemistry Letters}, shortjournal = {J. Phys. Chem. Lett.}, pages = {6339--6348}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, doi = {10.1021/acs.jpclett.3c01200}, url = {https://doi.org/10.1021/acs.jpclett.3c01200}, urldate = {2023-07-13}, abstract = {Graph neural networks (GNNs) have been shown to be extremely flexible and accurate in predicting the physical properties of molecules and crystals. However, traditional invariant GNNs are not compatible with directional properties, which currently limits their usage to the prediction of only invariant scalar properties. To address this issue, here we propose a general framework, i.e., an edge-based tensor prediction graph neural network, in which a tensor is expressed as the linear combination of the local spatial components projected on the edge directions of clusters with varying sizes. This tensor decomposition is rotationally equivariant and exactly satisfies the symmetry of the local structures. The accuracy and universality of our new framework are demonstrated by the successful prediction of various tensor properties from first to third order. The framework proposed in this work will enable GNNs to step into the broad field of prediction of directional properties.}, - keywords = {AML,benchmarking,GNN,GPR,magnetic anisotropy,magnetism,ML,MLP,MPNN,prediction of magnetic anisotropy,SA-GPR,SOAP,tensorial target}, - file = {/Users/wasmer/Nextcloud/Zotero/Zhong et al_2023_A General Tensor Prediction Framework Based on Graph Neural Networks.pdf;/Users/wasmer/Nextcloud/Zotero/Zhong et al_2023_A General Tensor Prediction Framework Based on Graph Neural Networks2.pdf;/Users/wasmer/Zotero/storage/B7FX9ZP8/acs.jpclett.html} + keywords = {\_tablet,AML,benchmarking,DeePMD-kit,DimeNet++,equivariant,FieldSchNet,GNN,GPR,JARVIS-DFT,magnetic anisotropy,magnetism,ML,MLP,MPNN,PCA,prediction of magnetic anisotropy,SA-GPR,SchNetPack,SOAP,tensorial target}, + file = {/Users/wasmer/Nextcloud/Zotero/false;/Users/wasmer/Nextcloud/Zotero/Zhong et al_2023_A General Tensor Prediction Framework Based on Graph Neural Networks.pdf;/Users/wasmer/Zotero/storage/B7FX9ZP8/acs.jpclett.html} } @article{zhongTransferableEquivariantGraph2023, @@ -16591,7 +18553,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, volume = {9}, number = {1}, pages = {1--13}, - publisher = {{Nature Publishing Group}}, + publisher = {Nature Publishing Group}, issn = {2057-3960}, doi = {10.1038/s41524-023-01130-4}, url = {https://www.nature.com/articles/s41524-023-01130-4}, @@ -16599,10 +18561,42 @@ Ying-Wai Li (Los Alamos National Laboratory)}, abstract = {This work presents an E(3) equivariant graph neural network called HamGNN, which can fit the electronic Hamiltonian matrix of molecules and solids by a complete data-driven method. Unlike invariant models that achieve equivariance approximately through data augmentation, HamGNN employs E(3) equivariant convolutions to construct the Hamiltonian matrix, ensuring strict adherence to all equivariant constraints inherent in the physical system. In contrast to previous models with limited transferability, HamGNN demonstrates exceptional accuracy on various datasets, including QM9 molecular datasets, carbon allotropes, silicon allotropes, SiO2 isomers, and BixSey compounds. The trained HamGNN models exhibit accurate predictions of electronic structures for large crystals beyond the training set, including the Moiré twisted bilayer MoS2 and silicon supercells with dislocation defects, showcasing remarkable transferability and generalization capabilities. The HamGNN model, trained on small systems, can serve as an efficient alternative to density functional theory (DFT) for accurately computing the electronic structures of large systems.}, issue = {1}, langid = {english}, - keywords = {\_tablet,Electronic properties and materials,Electronic structure}, + keywords = {\_tablet,AML,bismuth selenide,DFT,e3nn,GNN,HamGNN,library,line defects,ML,ML-DFT,ML-ESM,MoS2,MPNN,OpenMX,prediction of Hamiltonian matrix,PyTorch,SOC,spin-dependent,SU(2),TB,tight binding,TMDC,TRS,twisted bilayer,VASP,with-code,with-data}, file = {/Users/wasmer/Nextcloud/Zotero/Zhong et al_2023_Transferable equivariant graph neural networks for the Hamiltonians of.pdf} } +@online{zhongTransferableEquivariantParameterization2023, + title = {Transferable {{E}}(3) Equivariant Parameterization for {{Hamiltonian}} of Molecules and Solids}, + author = {Zhong, Yang and Yu, Hongyu and Su, Mao and Gong, Xingao and Xiang, Hongjun}, + date = {2023-02-04}, + eprint = {2210.16190}, + eprinttype = {arxiv}, + eprintclass = {cond-mat, physics:physics}, + doi = {10.1038/s41524-023-01130-4}, + url = {http://arxiv.org/abs/2210.16190}, + urldate = {2024-04-18}, + abstract = {Using the message-passing mechanism in machine learning (ML) instead of self-consistent iterations to directly build the mapping from structures to electronic Hamiltonian matrices will greatly improve the efficiency of density functional theory (DFT) calculations. In this work, we proposed a general analytic Hamiltonian representation in an E(3) equivariant framework, which can fit the ab initio Hamiltonian of molecules and solids by a complete data-driven method and are equivariant under rotation, space inversion, and time reversal operations. Our model reached state-of-the-art precision in the benchmark test and accurately predicted the electronic Hamiltonian matrices and related properties of various periodic and aperiodic systems, showing high transferability and generalization ability. This framework provides a general transferable model that can be used to accelerate the electronic structure calculations on different large systems with the same network weights trained on small structures.}, + pubstate = {preprint}, + keywords = {AML,bismuth selenide,DFT,e3nn,GNN,HamGNN,library,ML,ML-DFT,ML-ESM,MoS2,MPNN,OpenMX,prediction of Hamiltonian matrix,PyTorch,todo-tagging,twisted bilayer,VASP,with-code,with-data}, + file = {/Users/wasmer/Nextcloud/Zotero/Zhong et al_2023_Transferable E(3) equivariant parameterization for Hamiltonian of molecules and2.pdf;/Users/wasmer/Zotero/storage/SCZR9M92/2210.html} +} + +@online{zhongUniversalMachineLearning2024, + title = {Universal {{Machine Learning Kohn-Sham Hamiltonian}} for {{Materials}}}, + author = {Zhong, Yang and Yu, Hongyu and Yang, Jihui and Guo, Xingyu and Xiang, Hongjun and Gong, Xingao}, + date = {2024-04-15}, + eprint = {2402.09251}, + eprinttype = {arxiv}, + eprintclass = {cond-mat, physics:physics}, + doi = {10.48550/arXiv.2402.09251}, + url = {http://arxiv.org/abs/2402.09251}, + urldate = {2024-04-18}, + abstract = {While density functional theory (DFT) serves as a prevalent computational approach in electronic structure calculations, its computational demands and scalability limitations persist. Recently, leveraging neural networks to parameterize the Kohn-Sham DFT Hamiltonian has emerged as a promising avenue for accelerating electronic structure computations. Despite advancements, challenges such as the necessity for computing extensive DFT training data to explore each new system and the complexity of establishing accurate ML models for multi-elemental materials still exist. Addressing these hurdles, this study introduces a universal electronic Hamiltonian model trained on Hamiltonian matrices obtained from first-principles DFT calculations of nearly all crystal structures on the Materials Project. We demonstrate its generality in predicting electronic structures across the whole periodic table, including complex multi-elemental systems, solid-state electrolytes, Moir\textbackslash 'e twisted bilayer heterostructure, and metal-organic frameworks (MOFs). Moreover, we utilize the universal model to conduct high-throughput calculations of electronic structures for crystals in GeNOME datasets, identifying 3,940 crystals with direct band gaps and 5,109 crystals with flat bands. By offering a reliable efficient framework for computing electronic properties, this universal Hamiltonian model lays the groundwork for advancements in diverse fields, such as easily providing a huge data set of electronic structures and also making the materials design across the whole periodic table possible.}, + pubstate = {preprint}, + keywords = {\_tablet,ACE,AML,chemical species scaling problem,DeepH,DFT,foundation models,GNoME,HamGNN,HEA,heterostructures,materials,materials project,ML,ML-DFT,ML-ESM,OpenMX,PhiSNet,prediction from energy,prediction from structure,prediction of Hamiltonian matrix,SchNOrb,twisted bilayer,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Zhong et al_2024_Universal Machine Learning Kohn-Sham Hamiltonian for Materials.pdf;/Users/wasmer/Zotero/storage/QIP9JFLF/2402.html} +} + @online{zhouComprehensiveSurveyPretrained2023, title = {A {{Comprehensive Survey}} on {{Pretrained Foundation Models}}: {{A History}} from {{BERT}} to {{ChatGPT}}}, shorttitle = {A {{Comprehensive Survey}} on {{Pretrained Foundation Models}}}, @@ -16620,6 +18614,22 @@ Ying-Wai Li (Los Alamos National Laboratory)}, file = {/Users/wasmer/Nextcloud/Zotero/Zhou et al_2023_A Comprehensive Survey on Pretrained Foundation Models.pdf;/Users/wasmer/Zotero/storage/CWZ9H6CB/2302.html} } +@online{zhouContinuityRotationRepresentations2020, + title = {On the {{Continuity}} of {{Rotation Representations}} in {{Neural Networks}}}, + author = {Zhou, Yi and Barnes, Connelly and Lu, Jingwan and Yang, Jimei and Li, Hao}, + date = {2020-06-08}, + eprint = {1812.07035}, + eprinttype = {arxiv}, + eprintclass = {cs, stat}, + doi = {10.48550/arXiv.1812.07035}, + url = {http://arxiv.org/abs/1812.07035}, + urldate = {2024-04-05}, + abstract = {In neural networks, it is often desirable to work with various representations of the same space. For example, 3D rotations can be represented with quaternions or Euler angles. In this paper, we advance a definition of a continuous representation, which can be helpful for training deep neural networks. We relate this to topological concepts such as homeomorphism and embedding. We then investigate what are continuous and discontinuous representations for 2D, 3D, and n-dimensional rotations. We demonstrate that for 3D rotations, all representations are discontinuous in the real Euclidean spaces of four or fewer dimensions. Thus, widely used representations such as quaternions and Euler angles are discontinuous and difficult for neural networks to learn. We show that the 3D rotations have continuous representations in 5D and 6D, which are more suitable for learning. We also present continuous representations for the general case of the n-dimensional rotation group SO(n). While our main focus is on rotations, we also show that our constructions apply to other groups such as the orthogonal group and similarity transforms. We finally present empirical results, which show that our continuous rotation representations outperform discontinuous ones for several practical problems in graphics and vision, including a simple autoencoder sanity test, a rotation estimator for 3D point clouds, and an inverse kinematics solver for 3D human poses.}, + pubstate = {preprint}, + keywords = {/unread,general ML,geometric deep learning,group theory,ML,rec-by-katsumoto,rotational symmetry,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Zhou et al_2020_On the Continuity of Rotation Representations in Neural Networks.pdf;/Users/wasmer/Zotero/storage/IXG2BA7I/1812.html} +} + @article{zhouGraphNeuralNetworks2020, title = {Graph Neural Networks: {{A}} Review of Methods and Applications}, shorttitle = {Graph Neural Networks}, @@ -16634,7 +18644,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, url = {https://www.sciencedirect.com/science/article/pii/S2666651021000012}, urldate = {2023-11-14}, abstract = {Lots of learning tasks require dealing with graph data which contains rich relation information among elements. Modeling physics systems, learning molecular fingerprints, predicting protein interface, and classifying diseases demand a model to learn from graph inputs. In other domains such as learning from non-structural data like texts and images, reasoning on extracted structures (like the dependency trees of sentences and the scene graphs of images) is an important research topic which also needs graph reasoning models. Graph neural networks (GNNs) are neural models that capture the dependence of graphs via message passing between the nodes of graphs. In recent years, variants of GNNs such as graph convolutional network (GCN), graph attention network (GAT), graph recurrent network (GRN) have demonstrated ground-breaking performances on many deep learning tasks. In this survey, we propose a general design pipeline for GNN models and discuss the variants of each component, systematically categorize the applications, and propose four open problems for future research.}, - keywords = {/unread,General ML,GNN,graph,graph ML,ML,review,review-of-graph-ML}, + keywords = {General ML,GNN,graph,graph ML,ML,review,review-of-graph-ML}, file = {/Users/wasmer/Nextcloud/Zotero/Zhou et al_2020_Graph neural networks.pdf;/Users/wasmer/Zotero/storage/YML8J4GK/S2666651021000012.html} } @@ -16646,7 +18656,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, volume = {115}, number = {28}, pages = {E6411-E6417}, - publisher = {{Proceedings of the National Academy of Sciences}}, + publisher = {Proceedings of the National Academy of Sciences}, doi = {10.1073/pnas.1801181115}, url = {https://www.pnas.org/doi/10.1073/pnas.1801181115}, urldate = {2023-07-12}, @@ -16655,14 +18665,30 @@ Ying-Wai Li (Los Alamos National Laboratory)}, file = {/Users/wasmer/Zotero/storage/HN96KIR2/Zhou et al. - 2018 - Learning atoms for materials discovery.pdf} } +@online{zhouMultilevelMethodManyElectron2023, + title = {A {{Multilevel Method}} for {{Many-Electron Schr}}\textbackslash "\{o\}dinger {{Equations Based}} on the {{Atomic Cluster Expansion}}}, + author = {Zhou, Dexuan and Chen, Huajie and Ho, Cheuk Hin and Ortner, Christoph}, + date = {2023-05-04}, + eprint = {2304.04260}, + eprinttype = {arxiv}, + eprintclass = {physics}, + doi = {10.48550/arXiv.2304.04260}, + url = {http://arxiv.org/abs/2304.04260}, + urldate = {2023-12-18}, + abstract = {The atomic cluster expansion (ACE) (Drautz, 2019) yields a highly efficient and intepretable parameterisation of symmetric polynomials that has achieved great success in modelling properties of many-particle systems. In the present work we extend the practical applicability of the ACE framework to the computation of many-electron wave functions. To that end, we develop a customized variational Monte-Carlo algorithm that exploits the sparsity and hierarchical properties of ACE wave functions. We demonstrate the feasibility on a range of proof-of-concept applications to one-dimensional systems.}, + pubstate = {preprint}, + keywords = {ACE,AML,descriptors,ML,ML-ESM,ML-WFT,VMC}, + file = {/Users/wasmer/Nextcloud/Zotero/Zhou et al_2023_A Multilevel Method for Many-Electron Schr- o dinger Equations Based on the.pdf;/Users/wasmer/Zotero/storage/9V3IPFCI/2304.html} +} + @book{zhuBogoliubovdeGennesMethod2016, title = {Bogoliubov-de {{Gennes Method}} and {{Its Applications}}}, author = {Zhu, Jian-Xin}, date = {2016}, series = {Lecture {{Notes}} in {{Physics}}}, volume = {924}, - publisher = {{Springer International Publishing}}, - location = {{Cham}}, + publisher = {Springer International Publishing}, + location = {Cham}, doi = {10.1007/978-3-319-31314-6}, url = {http://link.springer.com/10.1007/978-3-319-31314-6}, urldate = {2023-05-03}, @@ -16677,7 +18703,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, author = {Zimmermann, Bernd}, date = {2014}, number = {FZJ-2014-05437}, - institution = {{Forschungszentrum Jülich GmbH Zentralbibliothek, Verlag}}, + institution = {Forschungszentrum Jülich GmbH Zentralbibliothek, Verlag}, url = {http://hdl.handle.net/2128/8063}, urldate = {2022-08-12}, abstract = {This thesis attempts to shed light on various spin-orbit driven transport phenomenain materials, as a crucial for the further development of the field of spintronics. Inparticular, we address the skew-scattering mechanism in dilute alloys, which gives rise to the anomalous and spin Hall effect, as well as spin-relaxation processes. We create the tools to access these quantities from \$\textbackslash textit\{ab initio\}\$ calculations in the framework of the full-potential all-electron Korringa-Kohn-Rostoker Green-function method, by (a) developing and implementing a new tetrahedron method for the calculation of complicated, multi-sheeted Fermi surfaces even of complex transition-metal compounds, and (b) developing an efficiently parallelized and thus highly scalable computer program (up to thousands of processors) for the precise calculation of scattering properties. In a first application of the new tetrahedron method, we calculate the Elliott-Yafet spin-mixing parameter on the Fermi surfaces of 5\$\textbackslash textit\{d\}\$ and 6\$\textbackslash textit\{sp\}\$ metals, and discover a yet unexplored dependence on the electron's spin-polarization direction. As we show, this anisotropy can reach gigantic values in uniaxial hcp crystals due to the emergenceof large spin-ip hot-areas or hot-loops on the Fermi surface, supported by the low symmetry of the hcp crystal. A simple model is able to reveal an interesting interplay between the orbital character of the states at special points, lines or areas in the Brillouin zone and the matrix-elements of the spin-flip part of the spin-orbit coupling operator. We further calculate the skew-scattering contribution to the anomalous Hall effect(AHE) in dilute alloys based on a ferromagnetic host for the first time. A systematic study of 3\$\textbackslash textit\{d\}\$ impurities in bcc Fe, as well as the non-magnetic hosts Pd, Pt and Au, allows us to identify trends across the periodic table. In all our calculations, we also observe a strong correlation between the spin Hall effect and anomalous Hall effect in these materials, which is of interest for the creation and detection of strongly spin-polarized currents. A Fermi-surface analysis of the contributions to the AHE reveals a non-trivial, peaked behavior at small hot-spots around spin-orbit lifted degeneracies. We then proceed to the more complicated \$\textbackslash textit\{L\}\$1\$\_\{0\}\$-ordered alloy FePt and address different kinds of disorder. We showcase the power of our method by treating the very complicated compounds Fe\$\_\{x\}\$Mn\$\_\{1-x\}\$Si and MnSi\$\_\{1-x\}\$Ge\$\_\{x\}\$, based on the non-Fermi liquid manganese silicide (MnSi). Finally, we also calculate the pure spin Hall effect for 4\$\textbackslash textit\{d\}\$/5\$\textbackslash textit\{sp\}\$ and 5\$\textbackslash textit\{d\}\$/6\$\textbackslash textit\{sp\}\$ impurities in fcc Ir and hcp Re hosts. For the latter, we discover a strong dependence on the electron's spin-polarization direction. Zimmermann, Bernd}, @@ -16687,6 +18713,17 @@ Ying-Wai Li (Los Alamos National Laboratory)}, file = {/Users/wasmer/Nextcloud/Zotero/Zimmermann_2014_Ab initio description of transverse transport due to impurity scattering in.pdf;/Users/wasmer/Zotero/storage/QL7I6VYG/171881.html} } +@online{zitnickOpenCatalystIntro2024, + type = {YouTube playlist}, + title = {Open {{Catalyst Intro Series}} - {{YouTube}}}, + author = {Zitnick, C. Lawrence}, + date = {2024-11-23}, + url = {https://www.youtube.com/playlist?list=PLU7acyFOb6DXgCTAi2TwKXaFD_i3C6hSL}, + urldate = {2024-05-07}, + keywords = {/unread,AML,language models,ML}, + file = {/Users/wasmer/Zotero/storage/MAFGMB4D/playlist.html} +} + @article{zungerUnderstandingDopingQuantum2021, title = {Understanding {{Doping}} of {{Quantum Materials}}}, author = {Zunger, Alex and Malyi, Oleksandr I.}, @@ -16696,7 +18733,7 @@ Ying-Wai Li (Los Alamos National Laboratory)}, volume = {121}, number = {5}, pages = {3031--3060}, - publisher = {{American Chemical Society}}, + publisher = {American Chemical Society}, issn = {0009-2665}, doi = {10.1021/acs.chemrev.0c00608}, url = {https://doi.org/10.1021/acs.chemrev.0c00608}, @@ -16704,3 +18741,22 @@ Ying-Wai Li (Los Alamos National Laboratory)}, abstract = {Doping mobile carriers into ordinary semiconductors such as Si, GaAs, and ZnO was the enabling step in the electronic and optoelectronic revolutions. The recent emergence of a class of “quantum materialsâ€, where uniquely quantum interactions between the components produce specific behaviors such as topological insulation, unusual magnetism, superconductivity, spin–orbit-induced and magnetically induced spin splitting, polaron formation, and transparency of electrical conductors, pointed attention to a range of doping-related phenomena associated with chemical classes that differ from the traditional semiconductors. These include wide-gap oxides, compounds containing open-shell d electrons, and compounds made of heavy elements yet having significant band gaps. The atomistic electronic structure theory of doping that has been developed over the past two decades in the subfield of semiconductor physics has recently been extended and applied to quantum materials. The present review focuses on explaining the main concepts needed for a basic understanding of the doping phenomenology and indeed peculiarities in quantum materials from the perspective of condensed matter theory, with the hope of forging bridges to the chemists that have enabled the synthesis of some of the most interesting compounds in this field.}, file = {/Users/wasmer/Nextcloud/Zotero/Zunger_Malyi_2021_Understanding Doping of Quantum Materials.pdf} } + +@article{zuoPerformanceCostAssessment2020, + title = {Performance and {{Cost Assessment}} of {{Machine Learning Interatomic Potentials}}}, + author = {Zuo, Yunxing and Chen, Chi and Li, Xiangguo and Deng, Zhi and Chen, Yiming and Behler, Jörg and Csányi, Gábor and Shapeev, Alexander V. and Thompson, Aidan P. and Wood, Mitchell A. and Ong, Shyue Ping}, + date = {2020-01-30}, + journaltitle = {The Journal of Physical Chemistry A}, + shortjournal = {J. Phys. Chem. A}, + volume = {124}, + number = {4}, + pages = {731--745}, + publisher = {American Chemical Society}, + issn = {1089-5639}, + doi = {10.1021/acs.jpca.9b08723}, + url = {https://doi.org/10.1021/acs.jpca.9b08723}, + urldate = {2023-12-10}, + abstract = {Machine learning of the quantitative relationship between local environment descriptors and the potential energy surface of a system of atoms has emerged as a new frontier in the development of interatomic potentials (IAPs). Here, we present a comprehensive evaluation of machine learning IAPs (ML-IAPs) based on four local environment descriptors—atom-centered symmetry functions (ACSF), smooth overlap of atomic positions (SOAP), the spectral neighbor analysis potential (SNAP) bispectrum components, and moment tensors—using a diverse data set generated using high-throughput density functional theory (DFT) calculations. The data set comprising bcc (Li, Mo) and fcc (Cu, Ni) metals and diamond group IV semiconductors (Si, Ge) is chosen to span a range of crystal structures and bonding. All descriptors studied show excellent performance in predicting energies and forces far surpassing that of classical IAPs, as well as predicting properties such as elastic constants and phonon dispersion curves. We observe a general trade-off between accuracy and the degrees of freedom of each model and, consequently, computational cost. We will discuss these trade-offs in the context of model selection for molecular dynamics and other applications.}, + keywords = {ACE,AML,benchmarking,descriptor comparison,descriptors,GAP,high impact,ML,MLP,MLP comparison,model evaluation,MTP,Pareto front,SNAP,SOAP}, + file = {/Users/wasmer/Nextcloud/Zotero/Zuo et al_2020_Performance and Cost Assessment of Machine Learning Interatomic Potentials2.pdf} +}