diff --git a/bib/bibliography.bib b/bib/bibliography.bib index a5910888a4f66e04cdbae6f6e84c236f5a0cb3fe..03389e0f6ea66497e741e09ee7c137dd48933ee1 100644 --- a/bib/bibliography.bib +++ b/bib/bibliography.bib @@ -260,6 +260,22 @@ file = {/Users/wasmer/Nextcloud/Zotero/Akiba et al_2019_Optuna.pdf;/Users/wasmer/Zotero/storage/5LHKTHSE/1907.html} } +@online{alamparaMatTextLanguageModels2024, + title = {{{MatText}}: {{Do Language Models Need More}} than {{Text}} \& {{Scale}} for {{Materials Modeling}}?}, + shorttitle = {{{MatText}}}, + author = {Alampara, Nawaf and Miret, Santiago and Jablonka, Kevin Maik}, + date = {2024-06-28}, + eprint = {2406.17295}, + eprinttype = {arXiv}, + doi = {10.48550/arXiv.2406.17295}, + url = {http://arxiv.org/abs/2406.17295}, + urldate = {2024-10-17}, + abstract = {Effectively representing materials as text has the potential to leverage the vast advancements of large language models (LLMs) for discovering new materials. While LLMs have shown remarkable success in various domains, their application to materials science remains underexplored. A fundamental challenge is the lack of understanding of how to best utilize text-based representations for materials modeling. This challenge is further compounded by the absence of a comprehensive benchmark to rigorously evaluate the capabilities and limitations of these text representations in capturing the complexity of material systems. To address this gap, we propose MatText, a suite of benchmarking tools and datasets designed to systematically evaluate the performance of language models in modeling materials. MatText encompasses nine distinct text-based representations for material systems, including several novel representations. Each representation incorporates unique inductive biases that capture relevant information and integrate prior physical knowledge about materials. Additionally, MatText provides essential tools for training and benchmarking the performance of language models in the context of materials science. These tools include standardized dataset splits for each representation, probes for evaluating sensitivity to geometric factors, and tools for seamlessly converting crystal structures into text. Using MatText, we conduct an extensive analysis of the capabilities of language models in modeling materials. Our findings reveal that current language models consistently struggle to capture the geometric information crucial for materials modeling across all representations. Instead, these models tend to leverage local information, which is emphasized in some of our novel representations. Our analysis underscores MatText's ability to reveal shortcomings of text-based methods for materials design.}, + pubstate = {prepublished}, + keywords = {/unread,Computer Science - Machine Learning,Condensed Matter - Materials Science}, + file = {/Users/wasmer/Nextcloud/Zotero/Alampara et al. - 2024 - MatText Do Language Models Need More than Text & Scale for Materials Modeling.pdf;/Users/wasmer/Zotero/storage/W4GB4USX/2406.html} +} + @article{alberi2019MaterialsDesign2018, title = {The 2019 Materials by Design Roadmap}, author = {Alberi, Kirstin and Nardelli, Marco Buongiorno and Zakutayev, Andriy and Mitas, Lubos and Curtarolo, Stefano and Jain, Anubhav and Fornari, Marco and Marzari, Nicola and Takeuchi, Ichiro and Green, Martin L. and Kanatzidis, Mercouri and Toney, Mike F. and Butenko, Sergiy and Meredig, Bryce and Lany, Stephan and Kattner, Ursula and Davydov, Albert and Toberer, Eric S. and Stevanovic, Vladan and Walsh, Aron and Park, Nam-Gyu and Aspuru-Guzik, Alán and Tabor, Daniel P. and Nelson, Jenny and Murphy, James and Setlur, Anant and Gregoire, John and Li, Hong and Xiao, Ruijuan and Ludwig, Alfred and Martin, Lane W. and Rappe, Andrew M. and Wei, Su-Huai and Perkins, John}, @@ -928,6 +944,21 @@ Subject\_term\_id: cheminformatics;computational-models;computational-science}, file = {/Users/wasmer/Nextcloud/Zotero/Barrett et al_2022_Autoregressive neural-network wavefunctions for ab initio quantum chemistry.pdf} } +@online{barroso-luqueOpenMaterials20242024, + title = {Open {{Materials}} 2024 ({{OMat24}}) {{Inorganic Materials Dataset}} and {{Models}}}, + author = {Barroso-Luque, Luis and Shuaibi, Muhammed and Fu, Xiang and Wood, Brandon M. and Dzamba, Misko and Gao, Meng and Rizvi, Ammar and Zitnick, C. Lawrence and Ulissi, Zachary W.}, + date = {2024-10-16}, + eprint = {2410.12771}, + eprinttype = {arXiv}, + doi = {10.48550/arXiv.2410.12771}, + url = {http://arxiv.org/abs/2410.12771}, + urldate = {2024-10-21}, + abstract = {The ability to discover new materials with desirable properties is critical for numerous applications from helping mitigate climate change to advances in next generation computing hardware. AI has the potential to accelerate materials discovery and design by more effectively exploring the chemical space compared to other computational methods or by trial-and-error. While substantial progress has been made on AI for materials data, benchmarks, and models, a barrier that has emerged is the lack of publicly available training data and open pre-trained models. To address this, we present a Meta FAIR release of the Open Materials 2024 (OMat24) large-scale open dataset and an accompanying set of pre-trained models. OMat24 contains over 110 million density functional theory (DFT) calculations focused on structural and compositional diversity. Our EquiformerV2 models achieve state-of-the-art performance on the Matbench Discovery leaderboard and are capable of predicting ground-state stability and formation energies to an F1 score above 0.9 and an accuracy of 20 meV/atom, respectively. We explore the impact of model size, auxiliary denoising objectives, and fine-tuning on performance across a range of datasets including OMat24, MPtraj, and Alexandria. The open release of the OMat24 dataset and models enables the research community to build upon our efforts and drive further advancements in AI-assisted materials science.}, + pubstate = {prepublished}, + keywords = {AML,dataset,large dataset,MatBench,materials database,Meta Research,ML,OMat24,Open Catalyst,original publication,universal potential,with-code,with-data}, + file = {/Users/wasmer/Nextcloud/Zotero/Barroso-Luque et al. - 2024 - Open Materials 2024 (OMat24) Inorganic Materials Dataset and Models.pdf;/Users/wasmer/Zotero/storage/I84TAV6W/2410.html} +} + @article{barthLocalExchangecorrelationPotential1972, title = {A Local Exchange-Correlation Potential for the Spin Polarized Case. i}, author = {family=Barth, given=U., prefix=von, useprefix=false and Hedin, L.}, @@ -1498,7 +1529,7 @@ Subject\_term\_id: electronic-properties-and-materials;phase-transitions-and-cri urldate = {2024-06-16}, abstract = {As machine learning models are becoming mainstream tools for molecular and materials research, there is an urgent need to improve the nature, quality, and accessibility of atomistic data. In turn, there are opportunities for a new generation of generally applicable datasets and distillable models.}, langid = {english}, - keywords = {/unread,AI4Science,AML,commentary,data augmentation,database generation,foundation models,ML,ML-FPO,op-ed,roadmap,synthetic data}, + keywords = {AI4Science,AML,commentary,data augmentation,database generation,foundation models,ML,ML-FPO,op-ed,roadmap,synthetic data}, file = {/Users/wasmer/Nextcloud/Zotero/Ben Mahmoud et al_2024_Data as the next challenge in atomistic machine learning.pdf} } @@ -1590,6 +1621,21 @@ Subject\_term\_id: electronic-properties-and-materials;phase-transitions-and-cri file = {/Users/wasmer/Nextcloud/Zotero/Bernevig et al_2022_Progress and prospects in magnetic topological materials.pdf} } +@book{betheQuantumMechanicsOne1977, + title = {Quantum {{Mechanics}} of {{One-}} and {{Two-Electron Atoms}}}, + author = {Bethe, Hans A. and Salpeter, Edwin E.}, + date = {1977}, + publisher = {Springer US}, + location = {Boston, MA}, + doi = {10.1007/978-1-4613-4104-8}, + url = {http://link.springer.com/10.1007/978-1-4613-4104-8}, + urldate = {2024-12-15}, + isbn = {978-0-306-20022-9 978-1-4613-4104-8}, + langid = {english}, + keywords = {graduate,Many-body theory,physics,Quantum Physics,rec-by-katsumoto,textbook}, + file = {/Users/wasmer/Nextcloud/Zotero/Bethe and Salpeter - 1977 - Quantum Mechanics of One- and Two-Electron Atoms.pdf} +} + @article{bhardwajTopologicalMaterials2020, title = {Topological {{Materials}}}, author = {Bhardwaj, Vishal and Chatterjee, Ratnamala}, @@ -1912,7 +1958,7 @@ Subject\_term\_id: electronic-properties-and-materials;phase-transitions-and-cri abstract = {Contents 1. Introduction- Methods and software for electronic structure based simulations of chemistry and materials 2. Density Functional Theory: Formalism and Current Directions 3. Density functional methods - implementation, challenges, successes 4. Green’s function based many-body perturbation theory 5. Wave-function theory approaches – explicit approaches to electron correlation 6. Quantum Monte Carlo and stochastic electronic structure methods 7. Heavy element relativity, spin-orbit physics, and magnetism 8. Semiempirical methods 9. Simulating Nuclear Dynamics with Quantum Effects 10. Real-Time Propagation in Electronic Structure Theory 11. Spectroscopy 12. Tools for exploring potential energy surfaces 13. Managing complex computational workflows 14. Current and Future Computer Architectures 15. Electronic structure software engineering 16. Education and Training in Electronic Structure Theory: Navigating an Evolving Landscape 17. Electronic structure theory facing industry and realistic modeling of experiments 18. List of Acronyms}, langid = {english}, keywords = {DFT,FZJ,PGI,PGI-1/IAS-1,review-of-DFT,roadmap}, - file = {/Users/wasmer/Nextcloud/Zotero/Blum et al_2024_Roadmap on methods and software for electronic structure based simulations in2.pdf} + file = {/Users/wasmer/Nextcloud/Zotero/Blum et al_2024_Roadmap on methods and software for electronic structure based simulations in.pdf} } @book{blundellMagnetismCondensedMatter2001, @@ -1961,7 +2007,7 @@ Subject\_term\_id: electronic-properties-and-materials;phase-transitions-and-cri urldate = {2024-05-23}, abstract = {The Atomic Cluster Expansion provides local, complete basis functions that enable efficient parametrization of many-atom interactions. We extend the Atomic Cluster Expansion to incorporate graph basis functions. This naturally leads to representations that enable the efficient description of semilocal interactions in physically and chemically transparent form. Simplification of the graph expansion by tensor decomposition results in an iterative procedure that comprises current message-passing machine learning interatomic potentials. We demonstrate the accuracy and efficiency of the graph Atomic Cluster Expansion for a number of small molecules, clusters and a general-purpose model for carbon. We further show that the graph Atomic Cluster Expansion scales linearly with number of neighbors and layer depth of the graph basis functions.}, pubstate = {prepublished}, - keywords = {ACE,AML,carbon,clusters,descriptors,equivariant,GNN,graph ACE,ML,MLP,MPNN,semilocal interactions,smal organic molecules,symmetry}, + keywords = {ACE,AML,carbon,clusters,descriptors,equivariant,GNN,GRACE,ML,MLP,MPNN,semilocal interactions,smal organic molecules,symmetry}, file = {/Users/wasmer/Nextcloud/Zotero/Bochkarev et al_2024_Graph Atomic Cluster Expansion for semilocal interactions beyond equivariant.pdf;/Users/wasmer/Zotero/storage/VRLXR3D3/2311.html} } @@ -1983,6 +2029,24 @@ Subject\_term\_id: electronic-properties-and-materials;phase-transitions-and-cri file = {/Users/wasmer/Nextcloud/Zotero/Bochkarev et al_2024_Graph Atomic Cluster Expansion for Semilocal Interactions beyond Equivariant2.pdf;/Users/wasmer/Zotero/storage/XIUFRJDM/PhysRevX.14.html} } +@article{bochkarevGraphAtomicCluster2024b, + title = {Graph {{Atomic Cluster Expansion}} for {{Semilocal Interactions}} beyond {{Equivariant Message Passing}}}, + author = {Bochkarev, Anton and Lysogorskiy, Yury and Drautz, Ralf}, + date = {2024-06-03}, + journaltitle = {Physical Review X}, + shortjournal = {Phys. Rev. X}, + volume = {14}, + number = {2}, + pages = {021036}, + publisher = {American Physical Society}, + doi = {10.1103/PhysRevX.14.021036}, + url = {https://link.aps.org/doi/10.1103/PhysRevX.14.021036}, + urldate = {2024-10-11}, + abstract = {The atomic cluster expansion provides local, complete basis functions that enable efficient parametrization of many-atom interactions. We extend the atomic cluster expansion to incorporate graph basis functions. This naturally leads to representations that enable the efficient description of semilocal interactions in physically and chemically transparent form. Simplification of the graph expansion by tensor decomposition results in an iterative procedure that comprises current message-passing machine learning interatomic potentials. We demonstrate the accuracy and efficiency of the graph atomic cluster expansion for a number of small molecules, clusters, and a general-purpose model for carbon. We further show that the graph atomic cluster expansion scales linearly with the number of neighbors and layer depth of the graph basis functions.}, + keywords = {ACE,AML,carbon,chemical species scaling problem,clusters,descriptors,equivariant,GRACE,library,ML,MLP,MPNN,semilocal interactions,tensor decomposition,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Bochkarev et al. - 2024 - Graph Atomic Cluster Expansion for Semilocal Interactions beyond Equivariant Message Passing.pdf;/Users/wasmer/Zotero/storage/DBFDSQN8/PhysRevX.14.html} +} + @online{bochkarevMultilayerAtomicCluster2022a, title = {Multilayer Atomic Cluster Expansion for Semi-Local Interactions}, author = {Bochkarev, Anton and Lysogorskiy, Yury and Ortner, Christoph and Csányi, Gábor and Drautz, Ralf}, @@ -2127,6 +2191,21 @@ Subject\_term\_id: computational-chemistry;computational-science}, file = {/Users/wasmer/Nextcloud/Zotero/Bondesan_Welling_2021_The Hintons in your Neural Network.pdf;/Users/wasmer/Zotero/storage/E2RNIICV/2103.html} } +@online{bonnevilleAcceleratingPhaseField2024, + title = {Accelerating {{Phase Field Simulations Through}} a {{Hybrid Adaptive Fourier Neural Operator}} with {{U-Net Backbone}}}, + author = {Bonneville, Christophe and Bieberdorf, Nathan and Hegde, Arun and Asta, Mark and Najm, Habib N. and Capolungo, Laurent and Safta, Cosmin}, + date = {2024-07-08}, + eprint = {2406.17119}, + eprinttype = {arXiv}, + doi = {10.48550/arXiv.2406.17119}, + url = {http://arxiv.org/abs/2406.17119}, + urldate = {2024-10-21}, + abstract = {Prolonged contact between a corrosive liquid and metal alloys can cause progressive dealloying. For such liquid-metal dealloying (LMD) process, phase field models have been developed. However, the governing equations often involve coupled non-linear partial differential equations (PDE), which are challenging to solve numerically. In particular, stiffness in the PDEs requires an extremely small time steps (e.g. \$10\textasciicircum\{-12\}\$ or smaller). This computational bottleneck is especially problematic when running LMD simulation until a late time horizon is required. This motivates the development of surrogate models capable of leaping forward in time, by skipping several consecutive time steps at-once. In this paper, we propose U-Shaped Adaptive Fourier Neural Operators (U-AFNO), a machine learning (ML) model inspired by recent advances in neural operator learning. U-AFNO employs U-Nets for extracting and reconstructing local features within the physical fields, and passes the latent space through a vision transformer (ViT) implemented in the Fourier space (AFNO). We use U-AFNOs to learn the dynamics mapping the field at a current time step into a later time step. We also identify global quantities of interest (QoI) describing the corrosion process (e.g. the deformation of the liquid-metal interface) and show that our proposed U-AFNO model is able to accurately predict the field dynamics, in-spite of the chaotic nature of LMD. Our model reproduces the key micro-structure statistics and QoIs with a level of accuracy on-par with the high-fidelity numerical solver. We also investigate the opportunity of using hybrid simulations, in which we alternate forward leap in time using the U-AFNO with high-fidelity time stepping. We demonstrate that while advantageous for some surrogate model design choices, our proposed U-AFNO model in fully auto-regressive settings consistently outperforms hybrid schemes.}, + pubstate = {prepublished}, + keywords = {/unread,accelerated discovery,AML,continuum physics,ML,neural operator,phase field method,surrogate model}, + file = {/Users/wasmer/Nextcloud/Zotero/Bonneville et al. - 2024 - Accelerating Phase Field Simulations Through a Hybrid Adaptive Fourier Neural Operator with U-Net Ba.pdf;/Users/wasmer/Zotero/storage/C887MECB/2406.html} +} + @article{borchaniSurveyMultioutputRegression2015, title = {A Survey on Multi-Output Regression}, author = {Borchani, Hanen and Varando, Gherardo and Bielza, Concha and Larrañaga, Pedro}, @@ -2348,6 +2427,21 @@ Subject\_term\_id: computational-chemistry;computational-science}, file = {/Users/wasmer/Nextcloud/Zotero/Braun_Ebert_2021_The Impact of Spin–Orbit Interaction on the Image States of High-Z Materials.pdf} } +@online{brehmerDoesEquivarianceMatter2024, + title = {Does Equivariance Matter at Scale?}, + author = {Brehmer, Johann and Behrends, Sönke and family=Haan, given=Pim, prefix=de, useprefix=false and Cohen, Taco}, + date = {2024-10-30}, + eprint = {2410.23179}, + eprinttype = {arXiv}, + doi = {10.48550/arXiv.2410.23179}, + url = {http://arxiv.org/abs/2410.23179}, + urldate = {2024-11-08}, + abstract = {Given large data sets and sufficient compute, is it beneficial to design neural architectures for the structure and symmetries of each problem? Or is it more efficient to learn them from data? We study empirically how equivariant and non-equivariant networks scale with compute and training samples. Focusing on a benchmark problem of rigid-body interactions and on general-purpose transformer architectures, we perform a series of experiments, varying the model size, training steps, and dataset size. We find evidence for three conclusions. First, equivariance improves data efficiency, but training non-equivariant models with data augmentation can close this gap given sufficient epochs. Second, scaling with compute follows a power law, with equivariant models outperforming non-equivariant ones at each tested compute budget. Finally, the optimal allocation of a compute budget onto model size and training duration differs between equivariant and non-equivariant models.}, + pubstate = {prepublished}, + keywords = {/unread,equivariant,General ML,large models,ML,scaling,symmetry}, + file = {/Users/wasmer/Nextcloud/Zotero/Brehmer et al. - 2024 - Does equivariance matter at scale.pdf;/Users/wasmer/Zotero/storage/GPBHA68S/2410.html} +} + @online{brehmerReducedBasisSurrogates2023, title = {Reduced Basis Surrogates for Quantum Spin Systems Based on Tensor Networks}, author = {Brehmer, Paul and Herbst, Michael F. and Wessel, Stefan and Rizzi, Matteo and Stamm, Benjamin}, @@ -2515,7 +2609,7 @@ Subject\_term\_id: computational-chemistry;computational-science}, journaltitle = {Foundations and Trends® in Machine Learning}, shortjournal = {FNT in Machine Learning}, volume = {8}, - number = {3-4}, + number = {3--4}, pages = {231--357}, issn = {1935-8237, 1935-8245}, doi = {10.1561/2200000050}, @@ -2601,6 +2695,25 @@ Subject\_term\_id: computational-chemistry;computational-science}, file = {/Users/wasmer/Nextcloud/Zotero/Butler et al_2018_Machine learning for molecular and materials science.pdf} } +@article{butlerSettingStandardsData2024, + title = {Setting Standards for Data Driven Materials Science}, + author = {Butler, Keith T. and Choudhary, Kamal and Csanyi, Gabor and Ganose, Alex M. and Kalinin, Sergei V. and Morgan, Dane}, + date = {2024-10-01}, + journaltitle = {npj Computational Materials}, + shortjournal = {npj Comput Mater}, + volume = {10}, + number = {1}, + pages = {1--3}, + publisher = {Nature Publishing Group}, + issn = {2057-3960}, + doi = {10.1038/s41524-024-01411-6}, + url = {https://www.nature.com/articles/s41524-024-01411-6}, + urldate = {2025-01-08}, + langid = {english}, + keywords = {AML,best practices,checklist,community standards,data-driven,materials,materials database,ML,standardization,with-data}, + file = {/Users/wasmer/Nextcloud/Zotero/Butler et al. - 2024 - Setting standards for data driven materials science.pdf} +} + @article{bystromCIDERExpressiveNonlocal2022, title = {{{CIDER}}: {{An Expressive}}, {{Nonlocal Feature Set}} for {{Machine Learning Density Functionals}} with {{Exact Constraints}}}, shorttitle = {{{CIDER}}}, @@ -2791,6 +2904,23 @@ Subject\_term\_id: computational-chemistry;computational-science}, file = {/Users/wasmer/Nextcloud/Zotero/Cancès et al_2023_Numerical stability and efficiency of response property calculations in density.pdf;/Users/wasmer/Zotero/storage/XSX6PBPG/2210.html} } +@online{cangiMaterialsLearningAlgorithms2024, + title = {Materials {{Learning Algorithms}} ({{MALA}}): {{Scalable Machine Learning}} for {{Electronic Structure Calculations}} in {{Large-Scale Atomistic Simulations}}}, + shorttitle = {Materials {{Learning Algorithms}} ({{MALA}})}, + author = {Cangi, Attila and Fiedler, Lenz and Brzoza, Bartosz and Shah, Karan and Callow, Timothy J. and Kotik, Daniel and Schmerler, Steve and Barry, Matthew C. and Goff, James M. and Rohskopf, Andrew and Vogel, Dayton J. and Modine, Normand and Thompson, Aidan P. and Rajamanickam, Sivasankaran}, + date = {2024-11-29}, + eprint = {2411.19617}, + eprinttype = {arXiv}, + eprintclass = {cond-mat}, + doi = {10.48550/arXiv.2411.19617}, + url = {http://arxiv.org/abs/2411.19617}, + urldate = {2025-01-08}, + abstract = {We present the Materials Learning Algorithms (MALA) package, a scalable machine learning framework designed to accelerate density functional theory (DFT) calculations suitable for large-scale atomistic simulations. Using local descriptors of the atomic environment, MALA models efficiently predict key electronic observables, including local density of states, electronic density, density of states, and total energy. The package integrates data sampling, model training and scalable inference into a unified library, while ensuring compatibility with standard DFT and molecular dynamics codes. We demonstrate MALA's capabilities with examples including boron clusters, aluminum across its solid-liquid phase boundary, and predicting the electronic structure of a stacking fault in a large beryllium slab. Scaling analyses reveal MALA's computational efficiency and identify bottlenecks for future optimization. With its ability to model electronic structures at scales far beyond standard DFT, MALA is well suited for modeling complex material systems, making it a versatile tool for advanced materials research.}, + pubstate = {prepublished}, + keywords = {ACE,AML,bispectrum,grid-based descriptors,hyperparameters optimization,linear-scaling DFT,MALA,ML,ML-DFT,ML-ESM,parallelization,prediction of DOS,prediction of electron density,prediction of LDOS,prediction of total energy,STM,tutorial,twisted bilayer}, + file = {/Users/wasmer/Nextcloud/Zotero/Cangi et al. - 2024 - Materials Learning Algorithms (MALA) Scalable Machine Learning for Electronic Structure Calculation.pdf;/Users/wasmer/Zotero/storage/ZWKA5RZA/2411.html} +} + @article{cangiPotentialFunctionalsDensity2013, title = {Potential Functionals versus Density Functionals}, author = {Cangi, Attila and Gross, E. K. U. and Burke, Kieron}, @@ -2883,6 +3013,21 @@ Subject\_term\_id: computational-chemistry;computational-science}, file = {/Users/wasmer/Nextcloud/Zotero/Capelle_2006_A bird's-eye view of density-functional theory.pdf;/Users/wasmer/Zotero/storage/8TLEU4M3/0211443.html} } +@online{carareRandomSpinCommittee2024, + title = {Random {{Spin Committee Approach For Smooth Interatomic Potentials}}}, + author = {Cărare, Vlad and Deringer, Volker L. and Csányi, Gábor}, + date = {2024-10-24}, + eprint = {2410.16252}, + eprinttype = {arXiv}, + doi = {10.48550/arXiv.2410.16252}, + url = {http://arxiv.org/abs/2410.16252}, + urldate = {2024-11-20}, + abstract = {Training interatomic potentials for spin-polarized systems continues to be a difficult task for the molecular modeling community. In this note, a proof-of-concept, random initial spin committee approach is proposed for obtaining the ground state of spin-polarized systems with a controllable degree of accuracy. The approach is tested on two toy models of elemental sulfur where the exact optimal spin configuration can be known. Machine-learning potentials are trained on the resulting data, and increasingly accurate fits with respect to the ground state are achieved, marking a step towards machine-learning force fields for general bulk spin-polarized systems.}, + pubstate = {prepublished}, + keywords = {AML,database generation,magnetic ML-IAP,magnetism,ML,MLP,spin-dependent,spin-polarized}, + file = {/Users/wasmer/Nextcloud/Zotero/Cărare et al. - 2024 - Random Spin Committee Approach For Smooth Interatomic Potentials.pdf;/Users/wasmer/Zotero/storage/NXYWHYIW/2410.html} +} + @online{carboneInvestigationMagneticProperties2023, title = {Investigation of Magnetic Properties of 4f-Adatoms on Graphene}, author = {Carbone, Johanna P. and Bouaziz, Juba and Bihlmayer, Gustav and Blügel, Stefan}, @@ -3330,6 +3475,26 @@ Subject\_term\_id: computational-methods;electronic-structure;theory-and-computa file = {/Users/wasmer/Nextcloud/Zotero/Chard et al_2018_DLHub.pdf;/Users/wasmer/Zotero/storage/VT5H6PP6/1811.html} } +@article{chengCartesianAtomicCluster2024, + title = {Cartesian Atomic Cluster Expansion for Machine Learning Interatomic Potentials}, + author = {Cheng, Bingqing}, + date = {2024-07-18}, + journaltitle = {npj Computational Materials}, + shortjournal = {npj Comput Mater}, + volume = {10}, + number = {1}, + pages = {1--10}, + publisher = {Nature Publishing Group}, + issn = {2057-3960}, + doi = {10.1038/s41524-024-01332-4}, + url = {https://www.nature.com/articles/s41524-024-01332-4}, + urldate = {2024-12-13}, + abstract = {Machine learning interatomic potentials are revolutionizing large-scale, accurate atomistic modeling in material science and chemistry. Many potentials use atomic cluster expansion or equivariant message-passing frameworks. Such frameworks typically use spherical harmonics as angular basis functions, followed by Clebsch-Gordan contraction to maintain rotational symmetry. We propose a mathematically equivalent and simple alternative that performs all operations in the Cartesian coordinates. This approach provides a complete set of polynormially independent features of atomic environments while maintaining interaction body orders. Additionally, we integrate low-dimensional embeddings of various chemical elements, trainable radial channel coupling, and inter-atomic message passing. The resulting potential, named Cartesian Atomic Cluster Expansion (CACE), exhibits good accuracy, stability, and generalizability. We validate its performance in diverse systems, including bulk water, small molecules, and 25-element high-entropy alloys.}, + langid = {english}, + keywords = {ACE,ACE-related,alternative approaches,alternative to spherical harmonics,AML,body-order,Cartesian coordinates,chemical species scaling problem,HEA,library,MD17,ML,MLP,MPNN,spherical harmonics,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Cheng - 2024 - Cartesian atomic cluster expansion for machine learning interatomic potentials.pdf} +} + @online{chengEquivariantNeuralOperator2023, title = {Equivariant {{Neural Operator Learning}} with {{Graphon Convolution}}}, author = {Cheng, Chaoran and Peng, Jian}, @@ -3530,7 +3695,8 @@ Subject\_term\_id: computational-methods;electronic-structure;theory-and-computa url = {https://doi.org/10.1021/acs.jpclett.4c01126}, urldate = {2024-09-05}, abstract = {Large language models (LLMs) such as generative pretrained transformers (GPTs) have shown potential for various commercial applications, but their applicability for materials design remains underexplored. In this Letter, AtomGPT is introduced as a model specifically developed for materials design based on transformer architectures, demonstrating capabilities for both atomistic property prediction and structure generation. This study shows that a combination of chemical and structural text descriptions can efficiently predict material properties with accuracy comparable to graph neural network models, including formation energies, electronic bandgaps from two different methods, and superconducting transition temperatures. Furthermore, AtomGPT can generate atomic structures for tasks such as designing new superconductors, with the predictions validated through density functional theory calculations. This work paves the way for leveraging LLMs in forward and inverse materials design, offering an efficient approach to the discovery and optimization of materials.}, - keywords = {/unread,ALIGNN,AML,generative models,GPT,inverse design,JARVIS,language models,library,ML,property prediction,structure prediction,transformer,with-code} + keywords = {ALIGNN,AML,generative models,GPT,inverse design,JARVIS,language models,library,ML,property prediction,structure prediction,transformer,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Choudhary - 2024 - AtomGPT Atomistic Generative Pretrained Transformer for Forward and Inverse Materials Design.pdf} } @article{choudharyAtomisticLineGraph2021, @@ -3550,10 +3716,30 @@ Subject\_term\_id: computational-methods;electronic-structure;theory-and-computa abstract = {Graph neural networks (GNN) have been shown to provide substantial performance improvements for atomistic material representation and modeling compared with descriptor-based machine learning models. While most existing GNN models for atomistic predictions are based on atomic distance information, they do not explicitly incorporate bond angles, which are critical for distinguishing many atomic structures. Furthermore, many material properties are known to be sensitive to slight changes in bond angles. We present an Atomistic Line Graph Neural Network (ALIGNN), a GNN architecture that performs message passing on both the interatomic bond graph and its line graph corresponding to bond angles. We demonstrate that angle information can be explicitly and efficiently included, leading to improved performance on multiple atomistic prediction tasks. We ALIGNN models for predicting 52 solid-state and molecular properties available in the JARVIS-DFT, Materials project, and QM9 databases. ALIGNN can outperform some previously reported GNN models on atomistic prediction tasks by up to 85\% in accuracy with better or comparable model training speed.}, issue = {1}, langid = {english}, - keywords = {ALIGNN,GNN,ML,MPNN,original publication}, + keywords = {ALIGNN,AML,GNN,JARVIS,JARVIS-DFT,ML,MPNN,original publication}, file = {/Users/wasmer/Nextcloud/Zotero/Choudhary_DeCost_2021_Atomistic Line Graph Neural Network for improved materials property predictions.pdf;/Users/wasmer/Zotero/storage/F8XSYTPV/s41524-021-00650-1.html} } +@article{choudharyChemNLPNaturalLanguageProcessingBased2023, + title = {{{ChemNLP}}: {{A Natural Language-Processing-Based Library}} for {{Materials Chemistry Text Data}}}, + shorttitle = {{{ChemNLP}}}, + author = {Choudhary, Kamal and Kelley, Mathew L.}, + date = {2023-09-07}, + journaltitle = {The Journal of Physical Chemistry C}, + shortjournal = {J. Phys. Chem. C}, + volume = {127}, + number = {35}, + pages = {17545--17555}, + publisher = {American Chemical Society}, + issn = {1932-7447}, + doi = {10.1021/acs.jpcc.3c03106}, + url = {https://doi.org/10.1021/acs.jpcc.3c03106}, + urldate = {2025-01-08}, + abstract = {In this work, we present the ChemNLP library that can be used for (1) curating open access datasets for materials and chemistry literature, developing and comparing traditional machine learning, transformers and graph neural network models for (2) classifying and clustering texts, (3) named entity recognition for large-scale text-mining, (4) abstractive summarization for generating titles of articles from abstracts, (5) text generation for suggesting abstracts from titles, (6) integration with density functional theory dataset for identifying potential candidate materials such as superconductors, and (7) web-interface development for text and reference query. We primarily use the publicly available arXiv and PubChem datasets, but the tools can be used for other datasets as well. Moreover, as new models are developed, they can be easily integrated in the library. ChemNLP is available at the websites: https://github.com/usnistgov/chemnlp and https://jarvis.nist.gov/jarvischemnlp/.}, + keywords = {AML,ChemNLP,clustering,language models,library,ML,natural language generation,NLP,original publication,with-code,with-data}, + file = {/Users/wasmer/Nextcloud/Zotero/Choudhary and Kelley - 2023 - ChemNLP A Natural Language-Processing-Based Library for Materials Chemistry Text Data_1.pdf} +} + @article{choudharyJARVISLeaderboardLargeScale2024, title = {{{JARVIS-Leaderboard}}: A Large Scale Benchmark of Materials Design Methods}, shorttitle = {{{JARVIS-Leaderboard}}}, @@ -3575,6 +3761,26 @@ Subject\_term\_id: computational-methods;electronic-structure;theory-and-computa file = {/Users/wasmer/Nextcloud/Zotero/Choudhary et al_2024_JARVIS-Leaderboard.pdf} } +@article{choudharyJointAutomatedRepository2020, + title = {The Joint Automated Repository for Various Integrated Simulations ({{JARVIS}}) for Data-Driven Materials Design}, + author = {Choudhary, Kamal and Garrity, Kevin F. and Reid, Andrew C. E. and DeCost, Brian and Biacchi, Adam J. and Hight Walker, Angela R. and Trautt, Zachary and Hattrick-Simpers, Jason and Kusne, A. Gilad and Centrone, Andrea and Davydov, Albert and Jiang, Jie and Pachter, Ruth and Cheon, Gowoon and Reed, Evan and Agrawal, Ankit and Qian, Xiaofeng and Sharma, Vinit and Zhuang, Houlong and Kalinin, Sergei V. and Sumpter, Bobby G. and Pilania, Ghanshyam and Acar, Pinar and Mandal, Subhasish and Haule, Kristjan and Vanderbilt, David and Rabe, Karin and Tavazza, Francesca}, + date = {2020-11-12}, + journaltitle = {npj Computational Materials}, + shortjournal = {npj Comput Mater}, + volume = {6}, + number = {1}, + pages = {1--13}, + publisher = {Nature Publishing Group}, + issn = {2057-3960}, + doi = {10.1038/s41524-020-00440-1}, + url = {https://www.nature.com/articles/s41524-020-00440-1}, + urldate = {2025-01-08}, + abstract = {The Joint Automated Repository for Various Integrated Simulations (JARVIS) is an integrated infrastructure to accelerate materials discovery and design using density functional theory (DFT), classical force-fields (FF), and machine learning (ML) techniques. JARVIS is motivated by the Materials Genome Initiative (MGI) principles of developing open-access databases and tools to reduce the cost and development time of materials discovery, optimization, and deployment. The major features of JARVIS are: JARVIS-DFT, JARVIS-FF, JARVIS-ML, and JARVIS-tools. To date, JARVIS consists of ≈40,000 materials and ≈1 million calculated properties in JARVIS-DFT, ≈500 materials and ≈110 force-fields in JARVIS-FF, and ≈25 ML models for material-property predictions in JARVIS-ML, all of which are continuously expanding. JARVIS-tools provides scripts and workflows for running and analyzing various simulations. We compare our computational data to experiments or high-fidelity computational methods wherever applicable to evaluate error/uncertainty in predictions. In addition to the existing workflows, the infrastructure can support a wide variety of other technologically important applications as part of the data-driven materials design paradigm. The JARVIS datasets and tools are publicly available at the website: https://jarvis.nist.gov.}, + langid = {english}, + keywords = {/unread,computational platform,JARVIS,JARVIS-DFT,materials,materials database,Materials genome initiative,original publication,with-data,workflows}, + file = {/Users/wasmer/Nextcloud/Zotero/Choudhary et al. - 2020 - The joint automated repository for various integrated simulations (JARVIS) for data-driven materials.pdf} +} + @online{choudharyLargeScaleBenchmark2023, title = {Large {{Scale Benchmark}} of {{Materials Design Methods}}}, author = {Choudhary, Kamal and Wines, Daniel and Li, Kangming and Garrity, Kevin F. and Gupta, Vishu and Romero, Aldo H. and Krogel, Jaron T. and Saritas, Kayahan and Fuhr, Addis and Ganesh, Panchapakesan and Kent, Paul R. C. and Yan, Keqiang and Lin, Yuchao and Ji, Shuiwang and Blaiszik, Ben and Reiser, Patrick and Friederich, Pascal and Agrawal, Ankit and Tiwary, Pratyush and Beyerle, Eric and Minch, Peter and Rhone, Trevor David and Takeuchi, Ichiro and Wexler, Robert B. and Mannodi-Kanakkithodi, Arun and Ertekin, Elif and Mishra, Avanish and Mathew, Nithin and Baird, Sterling G. and Wood, Mitchell and Rohskopf, Andrew Dale and Hattrick-Simpers, Jason and Wang, Shih-Han and Achenie, Luke E. K. and Xin, Hongliang and Williams, Maureen and Biacchi, Adam J. and Tavazza, Francesca}, @@ -3607,6 +3813,42 @@ Subject\_term\_id: computational-methods;electronic-structure;theory-and-computa file = {/Users/wasmer/Nextcloud/Zotero/Choudhary et al_2018_Machine learning with force-field-inspired descriptors for materials Author's Manuscript.pdf;/Users/wasmer/Nextcloud/Zotero/Choudhary et al_2018_Machine learning with force-field-inspired descriptors for materials Suppl JARVIS-ML.pdf;/Users/wasmer/Nextcloud/Zotero/Choudhary et al_2018_Machine learning with force-field-inspired descriptors for materials.pdf;/Users/wasmer/Zotero/storage/88LWP9IL/Choudhary et al_2018_Machine learning with force-field-inspired descriptors for materials Suppl Feature Importance.xlsx;/Users/wasmer/Zotero/storage/8U5VA8X6/Choudhary et al_2018_Machine learning with force-field-inspired descriptors for materials Suppl Feature Importance.xlsx;/Users/wasmer/Zotero/storage/NYHDKNR3/PhysRevMaterials.2.html} } +@article{choudharyRecentAdvancesApplications2022, + title = {Recent Advances and Applications of Deep Learning Methods in Materials Science}, + author = {Choudhary, Kamal and DeCost, Brian and Chen, Chi and Jain, Anubhav and Tavazza, Francesca and Cohn, Ryan and Park, Cheol Woo and Choudhary, Alok and Agrawal, Ankit and Billinge, Simon J. L. and Holm, Elizabeth and Ong, Shyue Ping and Wolverton, Chris}, + date = {2022-04-05}, + journaltitle = {npj Computational Materials}, + shortjournal = {npj Comput Mater}, + volume = {8}, + number = {1}, + pages = {1--26}, + publisher = {Nature Publishing Group}, + issn = {2057-3960}, + doi = {10.1038/s41524-022-00734-6}, + url = {https://www.nature.com/articles/s41524-022-00734-6}, + urldate = {2025-01-08}, + abstract = {Deep learning (DL) is one of the fastest-growing topics in materials data science, with rapidly emerging applications spanning atomistic, image-based, spectral, and textual data modalities. DL allows analysis of unstructured data and automated identification of features. The recent development of large materials databases has fueled the application of DL methods in atomistic prediction in particular. In contrast, advances in image and spectral data have largely leveraged synthetic data enabled by high-quality forward models as well as by generative unsupervised DL methods. In this article, we present a high-level overview of deep learning methods followed by a detailed discussion of recent developments of deep learning in atomistic simulation, materials imaging, spectral analysis, and natural language processing. For each modality we discuss applications involving both theoretical and experimental data, typical modeling approaches with their strengths and limitations, and relevant publicly available software and datasets. We conclude the review with a discussion of recent cross-cutting work related to uncertainty quantification in this field and a brief perspective on limitations, challenges, and potential growth areas for DL methods in materials science.}, + langid = {english}, + keywords = {/unread,AML,CNN,Computational methods,generative models,GNN,list of software,lists,ML,NLP,reinforcement-learning,representation learning,review,review-of-AML,uncertainty quantification}, + file = {/Users/wasmer/Nextcloud/Zotero/Choudhary et al. - 2022 - Recent advances and applications of deep learning methods in materials science.pdf} +} + +@online{choudharySLMatComprehensiveServerless2024, + title = {{{SLMat}}: {{A Comprehensive Serverless Toolkit}} for {{Advanced Materials Design}}}, + shorttitle = {{{SLMat}}}, + author = {Choudhary, Kamal}, + date = {2024-09-03}, + eprinttype = {ChemRxiv}, + doi = {10.26434/chemrxiv-2024-fqq27}, + url = {https://chemrxiv.org/engage/chemrxiv/article-details/66d379afa4e53c487644ff98}, + urldate = {2025-01-07}, + abstract = {SLMat is a serverless, browser-based toolkit that revolutionizes computational materials science by offering a scalable and efficient alternative to traditional server-based platforms like Google Colab. By eliminating the need for server management and providing persistent setups, SLMat enhances productivity and security, enabling researchers to focus on innovation rather than infrastructure. The toolkit integrates seamlessly with materials databases, supports AI model development, and offers advanced features like AI-guided coding and chatbot integration. With its streamlined workflow, SLMat accelerates production time, promotes reproducibility, and democratizes access to powerful computational resources. This makes SLMat an essential tool for modern materials science, offering a versatile and cost-effective solution for researchers across various domains. Code: https://github.com/deepmaterials/slmat WebPage: https://deepmaterials.github.io/slmat/}, + langid = {english}, + pubstate = {prepublished}, + keywords = {AI tools,alternative approaches,alternative to Jupyter,AML,atomistic simulation,developer tools,Jupyter,library,ML,notebooks,Pyodide,reproducibility,RSE,serverless computing,with-code,workflow}, + file = {/Users/wasmer/Nextcloud/Zotero/Choudhary - 2024 - SLMat A Comprehensive Serverless Toolkit for Advanced Materials Design.pdf} +} + @unpublished{chouhanImprovingScalabilityReliability2021, title = {Improving Scalability and Reliability of {{MPI-agnostic}} Transparent Checkpointing for Production Workloads at {{NERSC}}}, author = {Chouhan, Prashant Singh and Khetawat, Harsh and Resnik, Neil and Jain, Twinkle and Garg, Rohan and Cooperman, Gene and Hartman-Baker, Rebecca and Zhao, Zhengji}, @@ -4445,6 +4687,21 @@ Subject\_term\_id: computational-methods;electronic-structure;theory-and-computa file = {/Users/wasmer/Nextcloud/Zotero/Deng et al_2023_Melting of $-mathrm MgSi -mathrm O _ 3 $ determined by machine learning.pdf;/Users/wasmer/Zotero/storage/4DSIHJXI/PhysRevB.107.html} } +@online{dengOvercomingSystematicSoftening2024, + title = {Overcoming Systematic Softening in Universal Machine Learning Interatomic Potentials by Fine-Tuning}, + author = {Deng, Bowen and Choi, Yunyeong and Zhong, Peichen and Riebesell, Janosh and Anand, Shashwat and Li, Zhuohan and Jun, KyuJung and Persson, Kristin A. and Ceder, Gerbrand}, + date = {2024-05-11}, + eprint = {2405.07105}, + eprinttype = {arXiv}, + doi = {10.48550/arXiv.2405.07105}, + url = {http://arxiv.org/abs/2405.07105}, + urldate = {2024-10-17}, + abstract = {Machine learning interatomic potentials (MLIPs) have introduced a new paradigm for atomic simulations. Recent advancements have seen the emergence of universal MLIPs (uMLIPs) that are pre-trained on diverse materials datasets, providing opportunities for both ready-to-use universal force fields and robust foundations for downstream machine learning refinements. However, their performance in extrapolating to out-of-distribution complex atomic environments remains unclear. In this study, we highlight a consistent potential energy surface (PES) softening effect in three uMLIPs: M3GNet, CHGNet, and MACE-MP-0, which is characterized by energy and force under-prediction in a series of atomic-modeling benchmarks including surfaces, defects, solid-solution energetics, phonon vibration modes, ion migration barriers, and general high-energy states. We find that the PES softening behavior originates from a systematic underprediction error of the PES curvature, which derives from the biased sampling of near-equilibrium atomic arrangements in uMLIP pre-training datasets. We demonstrate that the PES softening issue can be effectively rectified by fine-tuning with a single additional data point. Our findings suggest that a considerable fraction of uMLIP errors are highly systematic, and can therefore be efficiently corrected. This result rationalizes the data-efficient fine-tuning performance boost commonly observed with foundational MLIPs. We argue for the importance of a comprehensive materials dataset with improved PES sampling for next-generation foundational MLIPs.}, + pubstate = {prepublished}, + keywords = {/unread,Computer Science - Artificial Intelligence,Computer Science - Machine Learning,Condensed Matter - Materials Science}, + file = {/Users/wasmer/Nextcloud/Zotero/Deng et al. - 2024 - Overcoming systematic softening in universal machine learning interatomic potentials by fine-tuning.pdf;/Users/wasmer/Zotero/storage/FWECST2B/2405.html} +} + @article{dennerEfficientLearningOnedimensional2020, title = {Efficient Learning of a One-Dimensional Density Functional Theory}, author = {Denner, M. Michael}, @@ -4899,7 +5156,7 @@ Subject\_term\_id: density-functional-theory;electronic-properties-and-materials } @article{drautzSpinclusterExpansionParametrization2004, - title = {Spin-Cluster Expansion: {{Parametrization}} of the General Adiabatic Magnetic Energy Surface with {\emph{Ab Initio}} Accuracy}, + title = {Spin-Cluster Expansion: {{Parametrization}} of the General Adiabatic Magnetic Energy Surface with {\mkbibemph{Ab Initio}} Accuracy}, shorttitle = {Spin-Cluster Expansion}, author = {Drautz, R.}, date = {2004}, @@ -5531,6 +5788,22 @@ Junqi Yin\\ file = {/Users/wasmer/Nextcloud/Zotero/Multiple Scattering Theory.pdf;/Users/wasmer/Zotero/storage/UYLUXULV/978-0-7503-1490-9.html} } +@online{feiAlabOSPythonbasedReconfigurable2024, + title = {{{AlabOS}}: {{A Python-based Reconfigurable Workflow Management Framework}} for {{Autonomous Laboratories}}}, + shorttitle = {{{AlabOS}}}, + author = {Fei, Yuxing and Rendy, Bernardus and Kumar, Rishi and Dartsi, Olympia and Sahasrabuddhe, Hrushikesh P. and McDermott, Matthew J. and Wang, Zheren and Szymanski, Nathan J. and Walters, Lauren N. and Milsted, David and Zeng, Yan and Jain, Anubhav and Ceder, Gerbrand}, + date = {2024-08-30}, + eprint = {2405.13930}, + eprinttype = {arXiv}, + doi = {10.48550/arXiv.2405.13930}, + url = {http://arxiv.org/abs/2405.13930}, + urldate = {2024-10-17}, + abstract = {The recent advent of autonomous laboratories, coupled with algorithms for high-throughput screening and active learning, promises to accelerate materials discovery and innovation. As these autonomous systems grow in complexity, the demand for robust and efficient workflow management software becomes increasingly critical. In this paper, we introduce AlabOS, a general-purpose software framework for orchestrating experiments and managing resources, with an emphasis on automated laboratories for materials synthesis and characterization. AlabOS features a reconfigurable experiment workflow model and a resource reservation mechanism, enabling the simultaneous execution of varied workflows composed of modular tasks while eliminating conflicts between tasks. To showcase its capability, we demonstrate the implementation of AlabOS in a prototype autonomous materials laboratory, A-Lab, with around 3,500 samples synthesized over 1.5 years.}, + pubstate = {prepublished}, + keywords = {autonomous research systems,chemical synthesis,experimental science,experimental workflows,High-throughput,library,materials acceleration platforms,materials screening,materials synthesis,ML,robotics,scientific workflows,self-driving lab,software framework,synt,with-code,workflows}, + file = {/Users/wasmer/Nextcloud/Zotero/Fei et al. - 2024 - AlabOS A Python-based Reconfigurable Workflow Management Framework for Autonomous Laboratories.pdf;/Users/wasmer/Zotero/storage/G3UDHXAE/2405.html} +} + @patent{feinbergSystemsMethodsSpatial2023, type = {patentus}, title = {Systems and {{Methods}} for {{Spatial Graph Convolutions}} with {{Applications}} to {{Drug Discovery}} and {{Molecular Simulation}}}, @@ -5558,6 +5831,21 @@ Junqi Yin\\ file = {/Users/wasmer/Nextcloud/Zotero/Feinberg et al_2023_Systems and methods for spatial graph convolutions with applications to drug2.pdf} } +@online{fengEfficientSamplingMachine2024, + title = {Efficient {{Sampling}} for {{Machine Learning Electron Density}} and {{Its Response}} in {{Real Space}}}, + author = {Feng, Chaoqiang and Zhang, Yaolong and Jiang, Bin}, + date = {2024-10-07}, + eprint = {2410.04977}, + eprinttype = {arXiv}, + doi = {10.48550/arXiv.2410.04977}, + url = {http://arxiv.org/abs/2410.04977}, + urldate = {2024-10-17}, + abstract = {Electron density is a fundamental quantity, which can in principle determine all ground state electronic properties of a given system. Although machine learning (ML) models for electron density based on either an atom-centered basis or a real-space grid have been proposed, the demand for the number of high-order basis functions or grid points is enormous. In this work, we propose an efficient grid-point sampling strategy that combines a targeted sampling favoring large density and a screening of grid points associated with linearly independent atomic features. This new sampling strategy is integrated with a field-induced recursively embedded atom neural network model to develop a real-space grid-based ML model for electron density and its response to an electric field. This approach is applied to a QM9 molecular dataset, a H2O/Pt(111) interfacial system, and an Au(100) electrode under an electric field. The number of training points is found much smaller than previous models, when yielding comparably accurate predictions for the electron density of the entire grid. The resultant machine learned electron density model enables us to properly partition partial charge onto each atom and analyze the charge variation upon proton transfer in the H2O/Pt(111) system. The machined learned electronic response model allows us to predict charge transfer and the electrostatic potential change induced by an electric field in an Au(100) electrode.}, + pubstate = {prepublished}, + keywords = {/unread,Physics - Chemical Physics}, + file = {/Users/wasmer/Nextcloud/Zotero/Feng et al. - 2024 - Efficient Sampling for Machine Learning Electron Density and Its Response in Real Space.pdf;/Users/wasmer/Zotero/storage/N7G8KJ25/2410.html} +} + @online{fengMayForceBe2023, title = {May the {{Force}} Be with {{You}}: {{Unified Force-Centric Pre-Training}} for {{3D Molecular Conformations}}}, shorttitle = {May the {{Force}} Be with {{You}}}, @@ -5685,6 +5973,19 @@ Junqi Yin\\ file = {/Users/wasmer/Nextcloud/Zotero/Fiedler et al_2022_Deep dive into machine learning density functional theory for materials science.pdf;/Users/wasmer/Zotero/storage/62FHUUPB/PhysRevMaterials.6.html} } +@thesis{fiedlerDevelopmentApplicationScalable2024, + title = {Development and {{Application}} of {{Scalable Density Functional Theory Machine Learning Models}}}, + author = {Fiedler, Lenz and Cowan, Thomas and Pribram-Jones, Aurora and Cangi, Attila}, + date = {2024-08-28}, + institution = {Technische Universität Dresden}, + location = {Dresden}, + url = {https://nbn-resolving.de/urn:nbn:de:bsz:14-qucosa2-936753}, + urldate = {2024-11-25}, + abstract = {Electronic structure simulations allow researchers to compute fundamental properties of materials without the need for experimentation. As such, they routinely aid in propelling scientific advancements across materials science and chemical applications. Over the past decades, density functional theory (DFT) has emerged as the most popular technique for electronic structure simulations, due to its excellent balance between accuracy and computational cost. Yet, pressing societal and technological questions demand solutions for problems of ever-increasing complexity. Even the most efficient DFT implementations are no longer capable of providing answers in an adequate amount of time and with available computational resources. Thus, there is a growing interest in machine learning (ML) based approaches within the electronic structure community, aimed at providing models that replicate the predictive power of DFT at negligible cost. Within this work it will be shown that such ML-DFT approaches, up until now, do not succeed in fully encapsulating the level of electronic structure predictions DFT provides. Based on this assessment, a novel approach to ML-DFT models is presented within this thesis. An exhaustive framework for training ML-DFT models based on a local representation of the electronic structure is developed, including minute treatment of technical issues such as data generation techniques and hyperparameter optimization strategies. Models found via this framework recover the wide array of predictive capabilities of DFT simulations at drastically reduced cost, while retaining DFT levels of accuracy. It is further demonstrated how such models can be used across differently sized atomic systems, phase boundaries and temperature ranges, underlining the general usefulness of this approach.}, + keywords = {AML,CASUS,grid-based descriptors,hyperparameters optimization,HZDR,library,MALA,ML,ML-Density,ML-DFT,ML-FPO,prediction of DOS,prediction of electron density,thesis,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Fiedler et al. - 2024 - Development and Application of Scalable Density Functional Theory Machine Learning Models.pdf} +} + @online{fiedlerMachineLearningElectronic2023, title = {Machine Learning the Electronic Structure of Matter across Temperatures}, author = {Fiedler, Lenz and Modine, Normand A. and Miller, Kyle D. and Cangi, Attila}, @@ -5799,6 +6100,46 @@ Junqi Yin\\ file = {/Users/wasmer/Nextcloud/Zotero/Fiedler et al_2022_Training-free hyperparameter optimization of neural networks for electronic2.pdf} } +@article{filotEDPProgramProjecting2023, + title = {{{EDP}}: A Program for Projecting Electron Densities from {{VASP}} onto Planes}, + shorttitle = {{{EDP}}}, + author = {family=Filot, given=I., prefix=a w, useprefix=false}, + date = {2023-07-06}, + journaltitle = {Journal of Open Source Software}, + volume = {8}, + number = {87}, + pages = {5417}, + issn = {2475-9066}, + doi = {10.21105/joss.05417}, + url = {https://joss.theoj.org/papers/10.21105/joss.05417}, + urldate = {2024-11-19}, + abstract = {Filot, I., (2023). EDP: a program for projecting electron densities from VASP onto planes. Journal of Open Source Software, 8(87), 5417, https://doi.org/10.21105/joss.05417}, + langid = {english}, + keywords = {/unread,C++,charge density,DFT,library,VASP,visualization,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Filot - 2023 - EDP a program for projecting electron densities from VASP onto planes.pdf} +} + +@book{filotElementsElectronicStructure2024, + title = {Elements of {{Electronic Structure Theory}}}, + author = {Filot, Ivo}, + date = {2024-10-13}, + url = {https://ifilot.pages.tue.nl/elements-of-electronic-structure-theory/index.html}, + urldate = {2024-11-19}, + keywords = {/unread}, + file = {/Users/wasmer/Nextcloud/Zotero/Filot - Elements of Electronic Structure Theory.pdf;/Users/wasmer/Zotero/storage/YGYWIJF2/index.html} +} + +@unpublished{filotPlaneWaveDensity2024, + title = {Plane {{Wave Density Functional Theory}} from {{Scratch}}}, + author = {Filot, Ivo}, + date = {2024-11-12}, + url = {https://github.com/ifilot/pwdft-lecture-notes?tab=readme-ov-file}, + urldate = {2024-11-19}, + howpublished = {Lecture script}, + keywords = {/unread}, + file = {/Users/wasmer/Nextcloud/Zotero/Filot - 2024 - Plane Wave Density Functional Theory from Scratch.pdf;/Users/wasmer/Zotero/storage/ECKRRZVJ/pwdft-lecture-notes.html} +} + @article{finkbeinerGeneratingMinimalTraining2024, title = {Generating {{Minimal Training Sets}} for {{Machine Learned Potentials}}}, author = {Finkbeiner, Jan and Tovey, Samuel and Holm, Christian}, @@ -6212,7 +6553,7 @@ Subject\_term: Quantum physics, Publishing, Peer review}, urldate = {2023-09-18}, abstract = {In 2011-2012 we performed experiments on hybrid superconductor-semiconductor nanowire devices which yielded signatures of Majorana fermions based on zero-bias peaks in tunneling measurements. The research field that grew out of those findings and other contemporary works has advanced significantly, and a lot of new knowledge and insights were gained. However, key smoking gun evidence of Majorana is still lacking. In this paper, we report that while reviewing our old data recently, armed with a decade of knowledge, we realized that back in 2012 our results contained two breakthrough Majorana discoveries. Specifically, we have observed quantized zero-bias peaks, the hallmark of ideal Majorana states. Furthermore, we have observed the closing and re-opening of the induced gap perfectly correlated with the emergence of the zero-bias peak - clear evidence of the topological quantum phase superconducting transition. These insights should pave the way to topological Majorana qubits, and you should also check supplementary information for important disclosures.}, pubstate = {prepublished}, - keywords = {/unread,experimental,failure,FZJ,Majorana,MZM,PGI,physics,superconductor,topological,Topological Superconductor}, + keywords = {experimental,failure,FZJ,Majorana,MZM,PGI,physics,superconductor,topological,Topological Superconductor}, file = {/Users/wasmer/Nextcloud/Zotero/Frolov_Mourik_2022_We cannot believe we overlooked these Majorana discoveries.pdf;/Users/wasmer/Zotero/storage/IR3K8NZ9/2203.html} } @@ -6653,6 +6994,21 @@ Subject\_term: Quantum physics, Publishing, Peer review}, file = {/Users/wasmer/Nextcloud/Zotero/Gerard et al_2022_Gold-standard solutions to the Schr-odinger equation using deep learning.pdf;/Users/wasmer/Zotero/storage/DWVRHXZW/2205.html} } +@online{gerardTransferableNeuralWavefunctions2024, + title = {Transferable {{Neural Wavefunctions}} for {{Solids}}}, + author = {Gerard, Leon and Scherbela, Michael and Sutterud, Halvard and Foulkes, Matthew and Grohs, Philipp}, + date = {2024-05-13}, + eprint = {2405.07599}, + eprinttype = {arXiv}, + doi = {10.48550/arXiv.2405.07599}, + url = {http://arxiv.org/abs/2405.07599}, + urldate = {2024-11-04}, + abstract = {Deep-Learning-based Variational Monte Carlo (DL-VMC) has recently emerged as a highly accurate approach for finding approximate solutions to the many-electron Schr\textbackslash "odinger equation. Despite its favorable scaling with the number of electrons, \$\textbackslash mathcal\{O\}(n\_\textbackslash text\{el\}\textasciicircum\{4\})\$, the practical value of DL-VMC is limited by the high cost of optimizing the neural network weights for every system studied. To mitigate this problem, recent research has proposed optimizing a single neural network across multiple systems, reducing the cost per system. Here we extend this approach to solids, where similar but distinct calculations using different geometries, boundary conditions, and supercell sizes are often required. We show how to optimize a single ansatz across all of these variations, reducing the required number of optimization steps by an order of magnitude. Furthermore, we exploit the transfer capabilities of a pre-trained network. We successfully transfer a network, pre-trained on 2x2x2 supercells of LiH, to 3x3x3 supercells. This reduces the number of optimization steps required to simulate the large system by a factor of 50 compared to previous work.}, + pubstate = {prepublished}, + keywords = {AML,DeepErwin,materials,ML,ML-QMBP,ML-WFT,prediction of wavefunction,QMC,VMC,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Gerard et al. - 2024 - Transferable Neural Wavefunctions for Solids.pdf;/Users/wasmer/Zotero/storage/7HGH54WQ/2405.html} +} + @online{gerhorstPhononsDensityFunctionalPerturbation2023, title = {Phonons from {{Density-Functional Perturbation Theory}} Using the {{All-Electron Full-Potential Linearized Augmented Plane-Wave Method FLEUR}}}, author = {Gerhorst, Christian-Roman and Neukirchen, Alexander and Klüppelberg, Daniel A. and Bihlmayer, Gustav and Betzinger, Markus and Michalicek, Gregor and Wortmann, Daniel and Blügel, Stefan}, @@ -6880,6 +7236,21 @@ Subject\_term\_id: condensed-matter-physics;theory-and-computation}, file = {/Users/wasmer/Nextcloud/Zotero/Gilligan et al_2023_A rule-free workflow for the automated generation of databases from scientific.pdf;/Users/wasmer/Zotero/storage/W8WDMBDK/2301.html} } +@online{gilliganSamplingLatentMaterialProperty2024, + title = {Sampling {{Latent Material-Property Information From LLM-Derived Embedding Representations}}}, + author = {Gilligan, Luke P. J. and Cobelli, Matteo and Sayeed, Hasan M. and Sparks, Taylor D. and Sanvito, Stefano}, + date = {2024-09-18}, + eprint = {2409.11971}, + eprinttype = {arXiv}, + doi = {10.48550/arXiv.2409.11971}, + url = {http://arxiv.org/abs/2409.11971}, + urldate = {2024-11-20}, + abstract = {Vector embeddings derived from large language models (LLMs) show promise in capturing latent information from the literature. Interestingly, these can be integrated into material embeddings, potentially useful for data-driven predictions of materials properties. We investigate the extent to which LLM-derived vectors capture the desired information and their potential to provide insights into material properties without additional training. Our findings indicate that, although LLMs can be used to generate representations reflecting certain property information, extracting the embeddings requires identifying the optimal contextual clues and appropriate comparators. Despite this restriction, it appears that LLMs still have the potential to be useful in generating meaningful materials-science representations.}, + pubstate = {prepublished}, + keywords = {/unread,AML,descriptors,language models,LLM,representation learning}, + file = {/Users/wasmer/Nextcloud/Zotero/Gilligan et al. - 2024 - Sampling Latent Material-Property Information From LLM-Derived Embedding Representations.pdf;/Users/wasmer/Zotero/storage/3G649NCN/2409.html} +} + @online{gilmerNeuralMessagePassing2017, title = {Neural {{Message Passing}} for {{Quantum Chemistry}}}, author = {Gilmer, Justin and Schoenholz, Samuel S. and Riley, Patrick F. and Vinyals, Oriol and Dahl, George E.}, @@ -7223,6 +7594,21 @@ Subject\_term\_id: condensed-matter-physics;theory-and-computation}, file = {/Users/wasmer/Nextcloud/Zotero/Goscinski et al_2023_A Suite of Generalisable Machine Learning Methods Born out of Chemistry and.pdf;/Users/wasmer/Zotero/storage/IAJ8UA3M/v2.html} } +@online{gouldTransferableDiversityDatadriven2024, + title = {Transferable Diversity – a Data-Driven Representation of Chemical Space}, + author = {Gould, Tim and Chang, Bun and Dale, Stephen and Vuckovic, Stefan}, + date = {2024-04-29}, + eprinttype = {ChemRxiv}, + doi = {10.26434/chemrxiv-2023-5075x-v3}, + url = {https://chemrxiv.org/engage/chemrxiv/article-details/662f79ac21291e5d1d023431}, + urldate = {2024-10-22}, + abstract = {Transferability, especially in the context of model generalization, is a paradigm of all scientific disciplines. However, the rapid advancement of machine learned model development threatens this paradigm, as it can be difficult to understand how transferability is embedded (or missed) in complex models. While transferability in general chemistry machine learning should benefit from diverse training data, a rigorous understanding of transferability together with its interplay with chemical representation remains an open problem. We introduce a transferability framework and apply it to a controllable data-driven model for developing density functional approximations (DFAs), an indispensable tool in everyday chemistry research. We reveal that human intuition introduces chemical biases that can hamper the transferability of data-driven DFAs, and we identify strategies for their elimination. We then show that uncritical use of large training sets can actually hinder the transferability of DFAs, in contradiction to typical “more is more†expectations. Finally, our transferability framework yields transferable diversity, a cornerstone principle for data curation for developing general-purpose machine learning models in chemistry}, + langid = {english}, + pubstate = {prepublished}, + keywords = {/unread,AML,generalization,ML,ML-DFA,ML-DFT,ML-ESM,transfer learning}, + file = {/Users/wasmer/Nextcloud/Zotero/Gould et al. - 2024 - Transferable diversity – a data-driven representation of chemical space.pdf} +} + @online{grassanoHighthroughputScreeningWeyl2023, title = {High-Throughput Screening of {{Weyl}} Semimetals}, author = {Grassano, Davide and Marzari, Nicola and Campi, Davide}, @@ -7239,6 +7625,35 @@ Subject\_term\_id: condensed-matter-physics;theory-and-computation}, file = {/Users/wasmer/Nextcloud/Zotero/Grassano et al_2023_High-throughput screening of Weyl semimetals.pdf;/Users/wasmer/Zotero/storage/QWHL9R54/2308.html} } +@book{griffithsIntroductionQuantumMechanics2018, + title = {Introduction to {{Quantum Mechanics}}}, + author = {Griffiths, David J. and Schroeter, Darrell F.}, + date = {2018-08-16}, + edition = {3}, + publisher = {Cambridge University Press}, + doi = {10.1017/9781316995433}, + url = {https://www.cambridge.org/highereducation/product/9781316995433/book}, + urldate = {2025-01-10}, + abstract = {Changes and additions to the new edition of this classic textbook include a new chapter on symmetries, new problems and examples, improved explanations, more numerical problems to be worked on a computer, new applications to solid state physics, and consolidated treatment of time-dependent potentials.}, + isbn = {978-1-316-99543-3 978-1-107-18963-8}, + keywords = {/unread} +} + +@online{grisafiAcceleratingQMMM2024, + title = {Accelerating {{QM}}/{{MM}} Simulations of Electrochemical Interfaces through Machine Learning of Electronic Charge Densities}, + author = {Grisafi, Andrea and Salanne, Mathieu}, + date = {2024-05-12}, + eprint = {2405.07370}, + eprinttype = {arXiv}, + doi = {10.48550/arXiv.2405.07370}, + url = {http://arxiv.org/abs/2405.07370}, + urldate = {2024-10-17}, + abstract = {A crucial aspect in the simulation of electrochemical interfaces consists in treating the distribution of electronic charge of electrode materials that are put in contact with an electrolyte solution. Recently, it has been shown how a machine-learning method that specifically targets the electronic charge density, also known as SALTED, can be used to predict the long-range response of metal electrodes in model electrochemical cells. In this work, we provide a full integration of SALTED with MetalWalls, a program for performing classical simulations of electrochemical systems. We do so by deriving a spherical harmonics extension of the Ewald summation method, which allows us to efficiently compute the electric field originated by the predicted electrode charge distribution. We show how to use this method to drive the molecular dynamics of an aqueous electrolyte solution under the quantum electric field of a gold electrode, which is matched to the accuracy of density-functional theory. Notably, we find that the resulting atomic forces present a small error of the order of 1 meV/\{\textbackslash AA\}, demonstrating the great effectiveness of adopting an electron-density path in predicting the electrostatics of the system. Upon running the data-driven dynamics over about 3 ns, we observe qualitative differences in the interfacial distribution of the electrolyte with respect to the results of a classical simulation. By greatly accelerating quantum-mechanics/molecular-mechanics approaches applied to electrochemical systems, our method opens the door to nanoseconds timescales in the accurate atomistic description of the electrical double layer.}, + pubstate = {prepublished}, + keywords = {/unread,Condensed Matter - Materials Science}, + file = {/Users/wasmer/Zotero/storage/4W994GJW/MM simulations of electrochemical interfaces through machine learning of electronic.pdf;/Users/wasmer/Zotero/storage/9FSSCQKZ/2405.html} +} + @article{grisafiElectronicStructurePropertiesAtomCentered2022, title = {Electronic-{{Structure Properties}} from {{Atom-Centered Predictions}} of the {{Electron Density}}}, author = {Grisafi, Andrea and Lewis, Alan M. and Rossi, Mariana and Ceriotti, Michele}, @@ -7601,7 +8016,7 @@ Subject\_term\_id: condensed-matter-physics;theory-and-computation}, } @inproceedings{hammermeshGroupTheoryIts1963, - title = {\emph{Group }{{\emph{Theory}}}\emph{ and }{{\emph{Its Application}}}\emph{ to }{{\emph{Physical Problems}}}}, + title = {\mkbibemph{Group }{{\mkbibemph{Theory}}}\mkbibemph{ and }{{\mkbibemph{Its Application}}}\mkbibemph{ to }{{\mkbibemph{Physical Problems}}}}, booktitle = {Physics {{Today}}}, author = {Hammermesh, Morton and Flammer, Carson}, date = {1963-02-01}, @@ -8337,6 +8752,21 @@ Subject\_term\_id: condensed-matter-physics;theory-and-computation}, file = {/Users/wasmer/Zotero/storage/62QDRCHY/3609779.html} } +@online{hofgardRelaxedEquivariantGraph2024, + title = {Relaxed {{Equivariant Graph Neural Networks}}}, + author = {Hofgard, Elyssa and Wang, Rui and Walters, Robin and Smidt, Tess}, + date = {2024-07-30}, + eprint = {2407.20471}, + eprinttype = {arXiv}, + doi = {10.48550/arXiv.2407.20471}, + url = {http://arxiv.org/abs/2407.20471}, + urldate = {2024-10-17}, + abstract = {3D Euclidean symmetry equivariant neural networks have demonstrated notable success in modeling complex physical systems. We introduce a framework for relaxed \$E(3)\$ graph equivariant neural networks that can learn and represent symmetry breaking within continuous groups. Building on the existing e3nn framework, we propose the use of relaxed weights to allow for controlled symmetry breaking. We show empirically that these relaxed weights learn the correct amount of symmetry breaking.}, + pubstate = {prepublished}, + keywords = {/unread,Computer Science - Machine Learning}, + file = {/Users/wasmer/Nextcloud/Zotero/Hofgard et al. - 2024 - Relaxed Equivariant Graph Neural Networks.pdf;/Users/wasmer/Zotero/storage/94BF64ZA/2407.html} +} + @article{hohenbergInhomogeneousElectronGas1964, title = {Inhomogeneous {{Electron Gas}}}, author = {Hohenberg, P.}, @@ -8552,6 +8982,22 @@ Subject\_term\_id: condensed-matter-physics;theory-and-computation}, file = {/Users/wasmer/Nextcloud/Zotero/Hou et al_2024_Physics-informed active learning for accelerating quantum chemical simulations.pdf;/Users/wasmer/Zotero/storage/BSLJJWPZ/2404.html} } +@online{houUnsupervisedLearningIndividual2024, + title = {Unsupervised {{Learning}} of {{Individual Kohn-Sham States}}: {{Interpretable Representations}} and {{Consequences}} for {{Downstream Predictions}} of {{Many-Body Effects}}}, + shorttitle = {Unsupervised {{Learning}} of {{Individual Kohn-Sham States}}}, + author = {Hou, Bowen and Wu, Jinyuan and Qiu, Diana Y.}, + date = {2024-04-22}, + eprint = {2404.14601}, + eprinttype = {arXiv}, + doi = {10.48550/arXiv.2404.14601}, + url = {http://arxiv.org/abs/2404.14601}, + urldate = {2024-10-17}, + abstract = {Representation learning for the electronic structure problem is a major challenge of machine learning in computational condensed matter and materials physics. Within quantum mechanical first principles approaches, Kohn-Sham density functional theory (DFT) is the preeminent tool for understanding electronic structure, and the high-dimensional wavefunctions calculated in this approach serve as the building block for downstream calculations of correlated many-body excitations and related physical observables. Here, we use variational autoencoders (VAE) for the unsupervised learning of high-dimensional DFT wavefunctions and show that these wavefunctions lie in a low-dimensional manifold within the latent space. Our model autonomously determines the optimal representation of the electronic structure, avoiding limitations due to manual feature engineering and selection in prior work. To demonstrate the utility of the latent space representation of the DFT wavefunction, we use it for the supervised training of neural networks (NN) for downstream prediction of the quasiparticle bandstructures within the GW formalism, which includes many-electron correlations beyond DFT. The GW prediction achieves a low error of 0.11 eV for a combined test set of metals and semiconductors drawn from the Computational 2D Materials Database (C2DB), suggesting that latent space representation captures key physical information from the original data. Finally, we explore the interpretability of the VAE representation and show that the successful representation learning and downstream prediction by our model is derived from the smoothness of the VAE latent space, which also enables the generation of wavefunctions on arbitrary points in latent space. Our work provides a novel and general machine-learning framework for investigating electronic structure and many-body physics.}, + pubstate = {prepublished}, + keywords = {AML,autoencoder,dimensionality reduction,GW approximation,KS-DFT,ML,ML-DFT,ML-ESM,multi-step model,prediction of bandstructure,prediction of wavefunction,unsupervised learning,VAE}, + file = {/Users/wasmer/Nextcloud/Zotero/Hou et al. - 2024 - Unsupervised Learning of Individual Kohn-Sham States Interpretable Representations and Consequences.pdf;/Users/wasmer/Zotero/storage/GPD7BRW7/2404.html} +} + @article{huAisNetUniversalInteratomic2023, title = {{{AisNet}}: {{A Universal Interatomic Potential Neural Network}} with {{Encoded Local Environment Features}}}, shorttitle = {{{AisNet}}}, @@ -9013,7 +9459,7 @@ Subject\_term\_id: computational-methods;research-management}, journaltitle = {Computer Physics Communications}, shortjournal = {Computer Physics Communications}, volume = {128}, - number = {1-2}, + number = {1--2}, eprint = {cond-mat/9909130}, eprinttype = {arXiv}, pages = {1--45}, @@ -9123,6 +9569,26 @@ Subject\_term\_id: computational-methods;research-management}, file = {/Users/wasmer/Nextcloud/Zotero/Jacobs et al_2020_The Materials Simulation Toolkit for Machine learning (MAST-ML).pdf;/Users/wasmer/Zotero/storage/5R5YHQE4/S0927025620300355.html} } +@article{jacobssonEfficientParameterisationNoncollinear2022, + title = {Efficient Parameterisation of Non-Collinear Energy Landscapes in Itinerant Magnets}, + author = {Jacobsson, A. and Johansson, G. and Gorbatov, O. I. and Ležaić, M. and Sanyal, B. and Blügel, S. and Etz, C.}, + date = {2022-11-08}, + journaltitle = {Scientific Reports}, + shortjournal = {Sci Rep}, + volume = {12}, + number = {1}, + pages = {18987}, + publisher = {Nature Publishing Group}, + issn = {2045-2322}, + doi = {10.1038/s41598-022-20311-7}, + url = {https://www.nature.com/articles/s41598-022-20311-7}, + urldate = {2024-10-30}, + abstract = {Magnetic exchange interactions determine the magnetic groundstate, as well as magnetic excitations of materials and are thus essential to the emerging and fast evolving fields of spintronics and magnonics. The magnetic force theorem has been used extensively for studying magnetic exchange interactions. However, short-ranged interactions in itinerant magnetic systems are poorly described by this method and numerous strategies have been developed over the years to overcome this deficiency. The present study supplies a fully self-consistent method for systematic investigations of exchange interactions beyond the standard Heisenberg model. In order to better describe finite deviations from the magnetic ground state, an extended Heisenberg model, including multi-spin interactions, is suggested. Using cross-validation analysis, we show that this extended Heisenberg model gives a superior description for non-collinear magnetic configurations. This parameterisation method allows us to describe many different itinerant magnetic systems and can be useful for high-throughput calculations.}, + langid = {english}, + keywords = {/unread,CPA,frozen magnon method,FZJ,Heisenberg model,itinerant magnetism,Jij,KKR,magnetic force theorem,magnetism,magnons,non-collinear,PGI,PGI-1,spin spiral,SPRKKR}, + file = {/Users/wasmer/Nextcloud/Zotero/Jacobsson et al. - 2022 - Efficient parameterisation of non-collinear energy landscapes in itinerant magnets.pdf} +} + @article{jacobssonOpenaccessDatabaseAnalysis2022, title = {An Open-Access Database and Analysis Tool for Perovskite Solar Cells Based on the {{FAIR}} Data Principles}, author = {Jacobsson, T. Jesper and Hultqvist, Adam and GarcÃa-Fernández, Alberto and Anand, Aman and Al-Ashouri, Amran and Hagfeldt, Anders and Crovetto, Andrea and Abate, Antonio and Ricciardulli, Antonio Gaetano and Vijayan, Anuja and Kulkarni, Ashish and Anderson, Assaf Y. and Darwich, Barbara Primera and Yang, Bowen and Coles, Brendan L. and Perini, Carlo A. R. and Rehermann, Carolin and Ramirez, Daniel and Fairen-Jimenez, David and Di Girolamo, Diego and Jia, Donglin and Avila, Elena and Juarez-Perez, Emilio J. and Baumann, Fanny and Mathies, Florian and González, G. S. Anaya and Boschloo, Gerrit and Nasti, Giuseppe and Paramasivam, Gopinath and MartÃnez-Denegri, Guillermo and Näsström, Hampus and Michaels, Hannes and Köbler, Hans and Wu, Hua and Benesperi, Iacopo and Dar, M. Ibrahim and Bayrak Pehlivan, Ilknur and Gould, Isaac E. and Vagott, Jacob N. and Dagar, Janardan and Kettle, Jeff and Yang, Jie and Li, Jinzhao and Smith, Joel A. and Pascual, Jorge and Jerónimo-Rendón, Jose J. and Montoya, Juan Felipe and Correa-Baena, Juan-Pablo and Qiu, Junming and Wang, Junxin and Sveinbjörnsson, Kári and Hirselandt, Katrin and Dey, Krishanu and Frohna, Kyle and Mathies, Lena and Castriotta, Luigi A. and Aldamasy, Mahmoud H. and Vasquez-Montoya, Manuel and Ruiz-Preciado, Marco A. and Flatken, Marion A. and Khenkin, Mark V. and Grischek, Max and Kedia, Mayank and Saliba, Michael and Anaya, Miguel and Veldhoen, Misha and Arora, Neha and Shargaieva, Oleksandra and Maus, Oliver and Game, Onkar S. and Yudilevich, Ori and Fassl, Paul and Zhou, Qisen and Betancur, Rafael and Munir, Rahim and Patidar, Rahul and Stranks, Samuel D. and Alam, Shahidul and Kar, Shaoni and Unold, Thomas and Abzieher, Tobias and Edvinsson, Tomas and David, Tudur Wyn and Paetzold, Ulrich W. and Zia, Waqas and Fu, Weifei and Zuo, Weiwei and Schröder, Vincent R. F. and Tress, Wolfgang and Zhang, Xiaoliang and Chiang, Yu-Hsien and Iqbal, Zafar and Xie, Zhiqiang and Unger, Eva}, @@ -9938,6 +10404,24 @@ Subject\_term\_id: quantum-physics;theoretical-physics}, file = {/Users/wasmer/Nextcloud/Zotero/Kipp et al_2021_The chiral Hall effect in canted ferromagnets and antiferromagnets.pdf} } +@article{kippChiralResponseSpinspiral2021, + title = {Chiral Response of Spin-Spiral States as the Origin of Chiral Transport Fingerprints of Spin Textures}, + author = {Kipp, Jonathan and Lux, Fabian R. and Mokrousov, Yuriy}, + date = {2021-12-03}, + journaltitle = {Physical Review Research}, + shortjournal = {Phys. Rev. Res.}, + volume = {3}, + number = {4}, + pages = {043155}, + publisher = {American Physical Society}, + doi = {10.1103/PhysRevResearch.3.043155}, + url = {https://link.aps.org/doi/10.1103/PhysRevResearch.3.043155}, + urldate = {2024-10-23}, + abstract = {The transport properties of nontrivial spin textures are coming under closer scrutiny as the amount of experimental data and theoretical simulations is increasing. To extend the commonly accepted yet simplifying and approximate picture of transport effects taking place in systems with spatially varying magnetization, it is important to understand the transport properties of building blocks for spin textures—the homochiral spin-spiral states. In this work, by referring to phenomenological symmetry arguments based on the gradient expansion, and explicit calculations within the Kubo framework, we study the transport properties of various types of spin-spirals in a two-dimensional model with strong spin-orbit interaction. In particular, we focus on the contributions to the magnetoconductivity, the planar Hall effect, and the anomalous Hall effect, which are sensitive to the sense of chirality of the spiral states. We analyze the emergence, symmetry, and microscopic properties of the resulting chiral magnetoconductivity, chiral planar Hall effect, and chiral Hall effect in terms of spin-spiral propagation direction, cone angle, spiral pitch, and disorder strength. Our findings suggest that the presence of spin-spiral states in magnets can be readily detected in various types of magnetotransport setups. Moreover, the sizable magnitude of chiral contributions to the conductivity of skyrmions estimated from homochiral spirals implies that chiral, as opposed to topological, magnetotransport can play a prominent role for the detection of nontrivial spin textures.}, + keywords = {/unread,FZJ,magnetism,PGI,PGI-1/IAS-1,spin spiral,TB}, + file = {/Users/wasmer/Nextcloud/Zotero/Kipp et al. - 2021 - Chiral response of spin-spiral states as the origin of chiral transport fingerprints of spin texture.pdf;/Users/wasmer/Zotero/storage/R56JD22C/PhysRevResearch.3.html} +} + @online{kippMachineLearningInspired2024, title = {Machine Learning Inspired Models for {{Hall}} Effects in Non-Collinear Magnets}, author = {Kipp, Jonathan and Lux, Fabian R. and Pürling, Thorben and Morrison, Abigail and Blügel, Stefan and Pinna, Daniele and Mokrousov, Yuriy}, @@ -9948,10 +10432,30 @@ Subject\_term\_id: quantum-physics;theoretical-physics}, abstract = {The anomalous Hall effect has been front and center in solid state research and material science for over a century now, and the complex transport phenomena in nontrivial magnetic textures have gained an increasing amount of attention, both in theoretical and experimental studies. However, a clear path forward to capturing the influence of magnetization dynamics on anomalous Hall effect even in smallest frustrated magnets or spatially extended magnetic textures is still intensively sought after. In this work, we present an expansion of the anomalous Hall tensor into symmetrically invariant objects, encoding the magnetic configuration up to arbitrary power of spin. We show that these symmetric invariants can be utilized in conjunction with advanced regularization techniques in order to build models for the electric transport in magnetic textures which are, on one hand, complete with respect to the point group symmetry of the underlying lattice, and on the other hand, depend on a minimal number of order parameters only. Here, using a four-band tight-binding model on a honeycomb lattice, we demonstrate that the developed method can be used to address the importance and properties of higher-order contributions to transverse transport. The efficiency and breadth enabled by this method provides an ideal systematic approach to tackle the inherent complexity of response properties of noncollinear magnets, paving the way to the exploration of electric transport in intrinsically frustrated magnets as well as large-scale magnetic textures.}, langid = {english}, pubstate = {prepublished}, - keywords = {2D,2D material,AML,electric transport,feature selection,group theory,Hall AHE,Hall effect,higher order,honeycomb lattice,invariance,linear regression,magnetic structure,magnetic supperlattice,magnetism,materials,ML,non-collinear,PCA,physics,point group,spin invariant,spin-dependent,spintronics,SVD,symmetrization,symmetry,TB,tensor decomposition,tight binding}, + keywords = {2D,2D material,AML,electric transport,feature selection,FZJ,group theory,Hall AHE,Hall effect,higher order,honeycomb lattice,invariance,linear regression,magnetic structure,magnetic supperlattice,magnetism,materials,ML,non-collinear,PCA,PGI,PGI-1/IAS-1,physics,point group,spin invariant,spin-dependent,spintronics,SVD,symmetrization,symmetry,TB,tensor decomposition,tight binding}, file = {/Users/wasmer/Nextcloud/Zotero/Kipp et al_2024_Machine learning inspired models for Hall effects in non-collinear magnets.pdf} } +@article{kippMachineLearningInspired2024a, + title = {Machine Learning Inspired Models for {{Hall}} Effects in Non-Collinear Magnets}, + author = {Kipp, Jonathan and Lux, Fabian R. and Pürling, Thorben and Morrison, Abigail and Blügel, Stefan and Pinna, Daniele and Mokrousov, Yuriy}, + date = {2024-06}, + journaltitle = {Machine Learning: Science and Technology}, + shortjournal = {Mach. Learn.: Sci. Technol.}, + volume = {5}, + number = {2}, + pages = {025060}, + publisher = {IOP Publishing}, + issn = {2632-2153}, + doi = {10.1088/2632-2153/ad51ca}, + url = {https://dx.doi.org/10.1088/2632-2153/ad51ca}, + urldate = {2024-10-23}, + abstract = {The anomalous Hall effect has been front and center in solid state research and material science for over a century now, and the complex transport phenomena in nontrivial magnetic textures have gained an increasing amount of attention, both in theoretical and experimental studies. However, a clear path forward to capturing the influence of magnetization dynamics on anomalous Hall effect even in smallest frustrated magnets or spatially extended magnetic textures is still intensively sought after. In this work, we present an expansion of the anomalous Hall tensor into symmetrically invariant objects, encoding the magnetic configuration up to arbitrary power of spin. We show that these symmetric invariants can be utilized in conjunction with advanced regularization techniques in order to build models for the electric transport in magnetic textures which are, on one hand, complete with respect to the point group symmetry of the underlying lattice, and on the other hand, depend on a minimal number of order parameters only. Here, using a four-band tight-binding model on a honeycomb lattice, we demonstrate that the developed method can be used to address the importance and properties of higher-order contributions to transverse transport. The efficiency and breadth enabled by this method provides an ideal systematic approach to tackle the inherent complexity of response properties of noncollinear magnets, paving the way to the exploration of electric transport in intrinsically frustrated magnets as well as large-scale magnetic textures.}, + langid = {english}, + keywords = {2D,2D material,AML,electric transport,feature selection,FZJ,group theory,Hall AHE,Hall effect,higher order,honeycomb lattice,invariance,linear regression,magnetic structure,magnetic supperlattice,magnetism,materials,ML,non-collinear,PCA,PGI,PGI-1/IAS-1,physics,point group,spin invariant,spin-dependent,spintronics,SVD,symmetrization,symmetry,TB,tensor decomposition,tight binding}, + file = {/Users/wasmer/Nextcloud/Zotero/Kipp et al. - 2024 - Machine learning inspired models for Hall effects in non-collinear magnets.pdf} +} + @article{kirkpatrickPushingFrontiersDensity2021, title = {Pushing the Frontiers of Density Functionals by Solving the Fractional Electron Problem}, author = {Kirkpatrick, James and McMorrow, Brendan and Turban, David H. P. and Gaunt, Alexander L. and Spencer, James S. and Matthews, Alexander G. D. G. and Obika, Annette and Thiry, Louis and Fortunato, Meire and Pfau, David and Castellanos, Lara Román and Petersen, Stig and Nelson, Alexander W. R. and Kohli, Pushmeet and Mori-Sánchez, Paula and Hassabis, Demis and Cohen, Aron J.}, @@ -10371,6 +10875,21 @@ Subject\_term\_id: computational-methods;density-functional-theory;method-develo file = {/Users/wasmer/Nextcloud/Zotero/Korshunova et al_2021_OpenChem.pdf;/Users/wasmer/Zotero/storage/U5ZHRH93/acs.jcim.html} } +@online{kosmaInitioInvestigationTopological2024, + title = {Ab-Initio Investigation of the Topological {{Hall}} Effect Caused by Magnetic Skyrmions in {{Pd}}/{{Fe}}/{{Ir}}(111)}, + author = {Kosma, Adamantia and Rüßmann, Philipp and Mokrousov, Yuriy and Blügel, Stefan and Mavropoulos, Phivos}, + date = {2024-10-24}, + eprint = {2410.18600}, + eprinttype = {arXiv}, + doi = {10.48550/arXiv.2410.18600}, + url = {http://arxiv.org/abs/2410.18600}, + urldate = {2024-10-27}, + abstract = {We present an ab-initio computational analysis of the topological Hall effect arising from stable magnetic skyrmions in the Pd/Fe/Ir(111) film using non-collinear spin density functional calculations within the Korringa-Kohn-Rostoker (KKR) Green function method. The semiclassical Boltzmann transport equation is employed for the resistivity and the Hall angle of the system. We explore the influence of the skyrmion size and the impact of disorder on the topological Hall angle.}, + pubstate = {prepublished}, + keywords = {/unread,DFT,disordered,FZJ,Hall effect,Hall THE,impurity embedding,JuKKR,KKR,PGI,PGI-1/IAS-1,skyrmions,topological}, + file = {/Users/wasmer/Nextcloud/Zotero/Kosma et al. - 2024 - Ab-initio investigation of the topological Hall effect caused by magnetic skyrmions in PdFeIr(111).pdf;/Users/wasmer/Zotero/storage/ADE6K6CB/2410.html} +} + @article{kosmaStrongSpinorbitTorque2020, title = {Strong Spin-Orbit Torque Effect on Magnetic Defects Due to Topological Surface State Electrons in {{Bi2Te3}}}, shorttitle = {Strong Spin-Orbit Torque Effect on Magnetic Defects Due to Topological Surface State Electrons In}, @@ -10546,6 +11065,26 @@ Subject\_term\_id: computational-methods;density-functional-theory;method-develo file = {/Users/wasmer/Zotero/storage/9QXF3GUC/Krenn et al. - 2022 - On scientific understanding with artificial intell.pdf} } +@article{krennScientificUnderstandingArtificial2022a, + title = {On Scientific Understanding with Artificial Intelligence}, + author = {Krenn, Mario and Pollice, Robert and Guo, Si Yue and Aldeghi, Matteo and Cervera-Lierta, Alba and Friederich, Pascal and family=Passos~Gomes, given=Gabriel, prefix=dos, useprefix=true and Häse, Florian and Jinich, Adrian and Nigam, AkshatKumar and Yao, Zhenpeng and Aspuru-Guzik, Alán}, + date = {2022-12}, + journaltitle = {Nature Reviews Physics}, + shortjournal = {Nat Rev Phys}, + volume = {4}, + number = {12}, + pages = {761--769}, + publisher = {Nature Publishing Group}, + issn = {2522-5820}, + doi = {10.1038/s42254-022-00518-3}, + url = {https://www.nature.com/articles/s42254-022-00518-3}, + urldate = {2025-01-08}, + abstract = {An oracle that correctly predicts the outcome of every particle physics experiment, the products of every possible chemical reaction or the function of every protein would revolutionize science and technology. However, scientists would not be entirely satisfied because they would want to comprehend how the oracle made these predictions. This is scientific understanding, one of the main aims of science. With the increase in the available computational power and advances in artificial intelligence, a natural question arises: how can advanced computational systems, and specifically artificial intelligence, contribute to new scientific understanding or gain it autonomously? Trying to answer this question, we adopted a definition of ‘scientific understanding’ from the philosophy of science that enabled us to overview the scattered literature on the topic and, combined with dozens of anecdotes from scientists, map out three dimensions of computer-assisted scientific understanding. For each dimension, we review the existing state of the art and discuss future developments. We hope that this Perspective will inspire and focus research directions in this multidisciplinary emerging field.}, + langid = {english}, + keywords = {/unread,agent,AI,AI scientist,AI4Science,AML,ML,philosophy of science,scientific understanding}, + file = {/Users/wasmer/Nextcloud/Zotero/Krenn et al. - 2022 - On scientific understanding with artificial intelligence.pdf} +} + @article{krennSelfreferencingEmbeddedStrings2020, title = {Self-Referencing Embedded Strings ({{SELFIES}}): {{A}} 100\% Robust Molecular String Representation}, shorttitle = {Self-Referencing Embedded Strings ({{SELFIES}})}, @@ -11472,6 +12011,24 @@ Subject\_term\_id: computational-methods;density-functional-theory;method-develo file = {/Users/wasmer/Zotero/storage/F46FQTHF/Liechtenstein et al_1987_Local spin density functional approach to the theory of exchange interactions.pdf;/Users/wasmer/Zotero/storage/23L5VB4T/0304885387907219.html} } +@article{liEfficientFirstPrinciples2024, + title = {Efficient First Principles Based Modeling via Machine Learning: From Simple Representations to High Entropy Materials}, + shorttitle = {Efficient First Principles Based Modeling via Machine Learning}, + author = {Li, Kangming and Choudhary, Kamal and DeCost, Brian and Greenwood, Michael and Hattrick-Simpers, Jason}, + date = {2024}, + journaltitle = {Journal of Materials Chemistry A}, + volume = {12}, + number = {21}, + pages = {12412--12422}, + publisher = {Royal Society of Chemistry}, + doi = {10.1039/D4TA00982G}, + url = {https://pubs.rsc.org/en/content/articlelanding/2024/ta/d4ta00982g}, + urldate = {2025-01-08}, + langid = {english}, + keywords = {ALIGNN,alloys,AML,descriptor comparison,disordered,high-entropy alloys,JARVIS,ML,model comparison,prediction of formation energy,random forest,with-code,with-data,XGB}, + file = {/Users/wasmer/Nextcloud/Zotero/Li et al. - 2024 - Efficient first principles based modeling via machine learning from simple representations to high.pdf} +} + @article{liExploitingRedundancyLarge2023, title = {Exploiting Redundancy in Large Materials Datasets for Efficient Machine Learning with Less Data}, author = {Li, Kangming and Persaud, Daniel and Choudhary, Kamal and DeCost, Brian and Greenwood, Michael and Hattrick-Simpers, Jason}, @@ -11493,6 +12050,41 @@ Subject\_term\_id: computational-methods;density-functional-theory;method-develo file = {/Users/wasmer/Nextcloud/Zotero/Li et al_2023_Exploiting redundancy in large materials datasets for efficient machine.pdf} } +@article{liExploitingRedundancyLarge2023a, + title = {Exploiting Redundancy in Large Materials Datasets for Efficient Machine Learning with Less Data}, + author = {Li, Kangming and Persaud, Daniel and Choudhary, Kamal and DeCost, Brian and Greenwood, Michael and Hattrick-Simpers, Jason}, + date = {2023-11-10}, + journaltitle = {Nature Communications}, + shortjournal = {Nat Commun}, + volume = {14}, + number = {1}, + pages = {7283}, + publisher = {Nature Publishing Group}, + issn = {2041-1723}, + doi = {10.1038/s41467-023-42992-y}, + url = {https://www.nature.com/articles/s41467-023-42992-y}, + urldate = {2025-01-08}, + abstract = {Extensive efforts to gather materials data have largely overlooked potential data redundancy. In this study, we present evidence of a significant degree of redundancy across multiple large datasets for various material properties, by revealing that up to 95\% of data can be safely removed from machine learning training with little impact on in-distribution prediction performance. The redundant data is related to over-represented material types and does not mitigate the severe performance degradation on out-of-distribution samples. In addition, we show that uncertainty-based active learning algorithms can construct much smaller but equally informative datasets. We discuss the effectiveness of informative data in improving prediction performance and robustness and provide insights into efficient data acquisition and machine learning training. This work challenges the “bigger is better†mentality and calls for attention to the information richness of materials data rather than a narrow emphasis on data volume.}, + langid = {english}, + keywords = {/unread,active learning,ALIGNN,AML,data redundancy,database analysis,database optimization,GNN,gradient boosting,JARVIS,MatBench,materials,materials database,materials project,ML,model comparison,MP18,MP21,OQMD,out-of-distribution,pruning,random forest,redundancy,with-code,with-data,XGB}, + file = {/Users/wasmer/Nextcloud/Zotero/Li et al. - 2023 - Exploiting redundancy in large materials datasets for efficient machine learning with less data.pdf} +} + +@article{liHybridLLMGNNIntegratingLarge2024, + title = {Hybrid-{{LLM-GNN}}: Integrating Large Language Models and Graph Neural Networks for Enhanced Materials Property Prediction}, + shorttitle = {Hybrid-{{LLM-GNN}}}, + author = {Li, Youjia and Gupta, Vishu and Talha~Kilic, Muhammed Nur and Choudhary, Kamal and Wines, Daniel and Liao, Wei-keng and Choudhary, Alok and Agrawal, Ankit}, + date = {2024-12-18}, + journaltitle = {Digital Discovery}, + publisher = {Royal Society of Chemistry}, + doi = {10.1039/D4DD00199K}, + url = {https://pubs.rsc.org/en/content/articlelanding/2025/dd/d4dd00199k}, + urldate = {2025-01-08}, + langid = {english}, + keywords = {AML,BERT,ChemNLP,GNN,GNN LLM integration,LLM,MatBERT,ML,transfer learning,VASP,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Li et al. - 2025 - Hybrid-LLM-GNN integrating large language models and graph neural networks for enhanced materials p.pdf} +} + @article{liInitioArtificialIntelligence2023, title = {Ab Initio Artificial Intelligence: {{Future}} Research of {{Materials Genome Initiative}}}, shorttitle = {Ab Initio Artificial Intelligence}, @@ -11649,6 +12241,22 @@ Subject\_term\_id: computational-methods;density-functional-theory;method-develo file = {/Users/wasmer/Zotero/storage/9Z7XGB5F/Lin et al. - 2019 - Numerical methods for Kohn–Sham density functional.pdf} } +@online{liProbingOutofdistributionGeneralization2024, + title = {Probing Out-of-Distribution Generalization in Machine Learning for Materials}, + author = {Li, Kangming and Rubungo, Andre Niyongabo and Lei, Xiangyun and Persaud, Daniel and Choudhary, Kamal and DeCost, Brian and Dieng, Adji Bousso and Hattrick-Simpers, Jason}, + date = {2024-06-10}, + eprint = {2406.06489}, + eprinttype = {arXiv}, + eprintclass = {cond-mat}, + doi = {10.48550/arXiv.2406.06489}, + url = {http://arxiv.org/abs/2406.06489}, + urldate = {2025-01-08}, + abstract = {Scientific machine learning (ML) endeavors to develop generalizable models with broad applicability. However, the assessment of generalizability is often based on heuristics. Here, we demonstrate in the materials science setting that heuristics based evaluations lead to substantially biased conclusions of ML generalizability and benefits of neural scaling. We evaluate generalization performance in over 700 out-of-distribution tasks that features new chemistry or structural symmetry not present in the training data. Surprisingly, good performance is found in most tasks and across various ML models including simple boosted trees. Analysis of the materials representation space reveals that most tasks contain test data that lie in regions well covered by training data, while poorly-performing tasks contain mainly test data outside the training domain. For the latter case, increasing training set size or training time has marginal or even adverse effects on the generalization performance, contrary to what the neural scaling paradigm assumes. Our findings show that most heuristically-defined out-of-distribution tests are not genuinely difficult and evaluate only the ability to interpolate. Evaluating on such tasks rather than the truly challenging ones can lead to an overestimation of generalizability and benefits of scaling.}, + pubstate = {prepublished}, + keywords = {ALIGNN,AML,generalization,GNN,gradient boosting,JARVIS,LLM,materials project,ML,model comparison,OQMD,out-of-distribution,random forest,SHAP,UMAP,with-data,XGB}, + file = {/Users/wasmer/Nextcloud/Zotero/Li et al. - 2024 - Probing out-of-distribution generalization in machine learning for materials.pdf;/Users/wasmer/Zotero/storage/YQY4SS8U/2406.html} +} + @online{liptonTroublingTrendsMachine2018, title = {Troubling {{Trends}} in {{Machine Learning Scholarship}}}, author = {Lipton, Zachary C. and Steinhardt, Jacob}, @@ -11665,6 +12273,26 @@ Subject\_term\_id: computational-methods;density-functional-theory;method-develo file = {/Users/wasmer/Nextcloud/Zotero/Lipton_Steinhardt_2018_Troubling Trends in Machine Learning Scholarship.pdf;/Users/wasmer/Zotero/storage/HK89ZR8C/1807.html} } +@article{liReproducibilityStudyAtomistic2024, + title = {A Reproducibility Study of Atomistic Line Graph Neural Networks for Materials Property Prediction}, + author = {Li, Kangming and DeCost, Brian and Choudhary, Kamal and Hattrick-Simpers, Jason}, + date = {2024-06-12}, + journaltitle = {Digital Discovery}, + shortjournal = {Digital Discovery}, + volume = {3}, + number = {6}, + pages = {1123--1129}, + publisher = {RSC}, + issn = {2635-098X}, + doi = {10.1039/D4DD00064A}, + url = {https://pubs.rsc.org/en/content/articlelanding/2024/dd/d4dd00064a}, + urldate = {2025-01-08}, + abstract = {Use of machine learning has been increasingly popular in materials science as data-driven materials discovery is becoming the new paradigm. Reproducibility of findings is paramount for promoting transparency and accountability in research and building trust in the scientific community. Here we conduct a reproducibility analysis of the work by K. Choudhary and B. Brian [npj Comput. Mater., 7, 2021, 185], in which a new graph neural network architecture was developed with improved performance on multiple atomistic prediction tasks. We examine the reproducibility for the model performance on 29 regression tasks and for an ablation analysis of the graph neural network layers. We find that the reproduced results generally exhibit a good quantitative agreement with the initial study, despite minor disparities in model performance and training efficiency that may be resulting from factors such as hardware difference and stochasticity involved in model training and data splits. The ease of conducting these reproducibility experiments confirms the great benefits of open data and code practices to which the initial work adhered. We also discuss some further enhancements in reproducible practices such as code and data archiving and providing data identifiers used in dataset splits.}, + langid = {english}, + keywords = {ablation study,ALIGNN,AML,GCN,GNN,JARVIS-DFT,ML,reproducibility,reproduction study,with-code,with-data}, + file = {/Users/wasmer/Nextcloud/Zotero/Li et al. - 2024 - A reproducibility study of atomistic line graph neural networks for materials property prediction.pdf} +} + @online{liSelfconsistentGradientlikeEigen2022, title = {Self-Consistent {{Gradient-like Eigen Decomposition}} in {{Solving Schr}}\textbackslash "odinger {{Equations}}}, author = {Li, Xihan and Chen, Xiang and Tutunov, Rasul and Bou-Ammar, Haitham and Wang, Lei and Wang, Jun}, @@ -12204,6 +12832,21 @@ Subject\_term\_id: computational-methods;density-functional-theory;method-develo file = {/Users/wasmer/Nextcloud/Zotero/Lupo Pasini et al_2021_A scalable algorithm for the optimization of neural network architectures.pdf} } +@thesis{luxPhaseSpaceApproach2022, + title = {A Phase Space Approach to the Electronic Properties of Noncollinear Magnets}, + author = {Lux, Fabian Rudolf and Melcher, Christof and Blügel, Stefan}, + date = {2022}, + number = {RWTH-2022-08974}, + institution = {RWTH Aachen University}, + doi = {10.18154/RWTH-2022-08974}, + url = {https://publications.rwth-aachen.de/record/853620}, + urldate = {2024-11-13}, + abstract = {Der steigende Bedarf an datenbasierter Technologie in Verbindung mit den Herausforderungen des Klimawandels macht es erstrebenswert, eine neue Art von Computerspeicher mit einer sehr hohen Informationsdichte bei sehr geringem Energieverbrauch zu finden. Das Gebiet des nichtkollinearen Magnetismus hat sich als Quelle von Ideen erwiesen, die diese beiden Punkte in Angriff nehmen. Gleichmäßig variierende Magnetisierungstexturen wie Domänenwände, Skyrmionen oder Hopfionen wurden hierbei als eine neue Art zukünftiger Informationsträger vorgeschlagen. Das Verständnis ihrer elektronischen Eigenschaften ist von großem Interesse, da dies für ein zuverlässiges Detektionsprotokoll entscheidend ist. Diese Dissertation befasst sich mit der Aufgabe, die elektronischen Eigenschaften von gleichmäßig variierenden, nichtkollinearen magnetischen Strukturen zu beschreiben, indem sie sich auf die nichtkommutative Geometrie des quantenmechanischen Phasenraums bezieht. In vielen Fällen liegt die charakteristische Längenskala dieser magnetischen Zustände um mindestens eine Größenordnung über der Skala, die durch die atomaren Abstände des Kristalls festgelegt ist, welcher den magnetischen Zustand beherbergt. Dies stellt eine ernsthafte theoretische Herausforderung dar, da die numerische Berechnung der elektronischen Struktur ausgedehnter Strukturen sehr rechenintensiv ist. Stattdessen wäre es wünschenswert, einen Formalismus zu haben, der die Eigenschaften des nicht-kollinearen Systems aus den Informationen vorhersagen kann, die bereits aus dem kollinear polarisierten magnetischen Zustand zugänglich sind. In dieser Arbeit wird die Phasenraumformulierung der Quantenmechanik verwendet um einen solchen Ansatz zu liefern, welcher die physikalischen Eigenschaften als eine Reihenentwicklung in den Gradienten der zugrundeliegenden magnetischen Struktur ausdrückt. Allgemeine algebraische Eigenschaften dieser Erweiterung werden ebenso diskutiert wie die Auswirkungen kristallographischer Symmetrien. Der Formalismus wird dann angewandt, um die Orbitalmagnetisierung und den anomalen Hall-Effekt von nichtkollinearen Magneten zu diskutieren, wobei der Schwerpunkt auf der ersten nichttrivialen gradienteninduzierten Korrektur liegt. Ein intrinsischer, geometrischer Beitrag zum anomalen Hall-Effekt nichtkollinearer Magnete wird identifiziert indem das Problem in der Sprache der nichtkommutativen Faserbündel formuliert wird. Das zentrale Ergebnis zeigt, dass die Gradientenentwicklung der Dichtematrix und der Berry-Krümmung durch eine Konstruktion aus der Stringtheorie bestimmt wird, die als Seiberg-Witten-Abbildung bekannt ist. Ursprünglich wurde diese Abbildung im effektiven Niederenergieverhalten von D-Branen entdeckt und bildet nun die geometrische Grundlage für Gradientenentwicklungstechniken in nichtkollinearen Magneten. An increasing demand for data-driven technology combined with the challenges imposed by climate change makes it desirable to identify a new type of computer memory storage with a very high information density at very low energy consumption. The field of noncollinear magnetism has emerged as a source of ideas that address these two points. Smoothly varying magnetization textures such as domain walls, skyrmions, or hopfions have been proposed as a new type of future information carrier. Understanding their electronic properties is a major field of interest since it is crucial for a reliable detection protocol. This thesis treats the task of describing the electronic properties of smoothly varying, noncollinear magnetic textures by referring to the noncommutative geometry of the quantum-mechanical phase space. In many cases, the characteristic length scale of these magnetic states is at least an order of magnitude above the scale which is set by the spacing of atomic sites which form the crystal that supports the magnetic state. This poses a serious theoretical challenge since the numerical calculation of the electronic structure of large-scale structures is computationally very demanding. Instead, it would be desirable to have a formalism that could predict properties of the noncollinear system from information that is accessible already from the collinear polarized magnetic state. In this thesis, the phase space formulation of quantum mechanics is used to provide such an approach, effectively expanding physical properties in terms of the real space gradients of the underlying magnetic texture. General algebraic properties of this expansion are discussed, as well as the impact of crystallographic symmetries. The formalism is then applied to discuss the orbital magnetization and the anomalous Hall effect of noncollinear magnets with a focus on the first nontrivial gradient-induced correction. An intrinsic, geometric contribution to the anomalous Hall effect of noncollinear magnets is identified by formulating the problem in the language of noncommutative fiber bundles. The central finding shows that the gradient expansion of the density matrix and the Berry curvature is governed by a construction from string theory which is known as the Seiberg-Witten map. Originally discovered in the effective low-energy behavior of D-branes, this map now gives a geometrical underpinning to gradient expansion techniques in noncollinear magnets. Lux, Fabian Rudolf; Blügel, Stefan; Melcher, Christof}, + langid = {english}, + keywords = {chiral magnets,chirality,contravariant,covariant,equivariant,FZJ,gauge theory,group theory,Hall AHE,Hall CHE,Hall effect,Magnetic skyrmion,magnetism,non-collinear,orbital magnetism,PGI,PGI-1,representation theory,skyrmions,spin texture,symmetry,tensor decomposition,tensor field,topological}, + file = {/Users/wasmer/Nextcloud/Zotero/Lux et al. - 2022 - A phase space approach to the electronic properties of noncollinear magnets.pdf;/Users/wasmer/Zotero/storage/87W5AH3D/853620.html} +} + @article{lvDeepChargeDeep2023, title = {Deep {{Charge}}: {{Deep}} Learning Model of Electron Density from a One-Shot Density Functional Theory Calculation}, shorttitle = {Deep {{Charge}}}, @@ -12951,6 +13594,21 @@ Subject\_term\_id: computational-chemistry;density-functional-theory;method-deve file = {/Users/wasmer/Nextcloud/Zotero/Merkys et al_2017_A posteriori metadata from automated provenance tracking.pdf;/Users/wasmer/Zotero/storage/9ZIMVPJ8/s13321-017-0242-y.html} } +@online{metzSimulatingContinuousspaceSystems2024, + title = {Simulating Continuous-Space Systems with Quantum-Classical Wave Functions}, + author = {Metz, Friederike and Pescia, Gabriel and Carleo, Giuseppe}, + date = {2024-09-10}, + eprint = {2409.06415}, + eprinttype = {arXiv}, + doi = {10.48550/arXiv.2409.06415}, + url = {http://arxiv.org/abs/2409.06415}, + urldate = {2024-10-17}, + abstract = {Most non-relativistic interacting quantum many-body systems, such as atomic and molecular ensembles or materials, are naturally described in terms of continuous-space Hamiltonians. The simulation of their ground-state properties on digital quantum computers is challenging because current algorithms require discretization, which usually amounts to choosing a finite basis set, inevitably introducing errors. In this work, we propose an alternative, discretization-free approach that combines classical and quantum resources in a global variational ansatz, optimized using the framework of variational Monte Carlo. We introduce both purely quantum as well as hybrid quantum-classical ansatze and benchmark them on three paradigmatic continuous-space systems that are either very challenging or beyond the reach of current quantum approaches: the one-dimensional quantum rotor model, a system of Helium-3 particles in one and two dimensions, and the two-dimensional homogeneous electron gas. We embed relevant constraints such as the antisymmetry of fermionic wave functions directly into the ansatz. Many-body correlations are introduced via backflow transformations represented by parameterized quantum circuits. We demonstrate that the accuracy of the simulation can be systematically improved by increasing the number of circuit parameters and study the effects of shot noise. Furthermore, we show that the hybrid ansatz improves the ground-state energies obtained using the purely classical wave function.}, + pubstate = {prepublished}, + keywords = {/unread,Condensed Matter - Strongly Correlated Electrons,Nuclear Theory,Physics - Computational Physics,Quantum Physics}, + file = {/Users/wasmer/Nextcloud/Zotero/Metz et al. - 2024 - Simulating continuous-space systems with quantum-classical wave functions.pdf;/Users/wasmer/Zotero/storage/QIVULJHC/2409.html} +} + @online{metzVeLOTrainingVersatile2022, title = {{{VeLO}}: {{Training Versatile Learned Optimizers}} by {{Scaling Up}}}, shorttitle = {{{VeLO}}}, @@ -12986,6 +13644,24 @@ Subject\_term\_id: computational-chemistry;density-functional-theory;method-deve file = {/Users/wasmer/Nextcloud/Zotero/Microsoft Quantum et al_2023_InAs-Al hybrid devices passing the topological gap protocol.pdf;/Users/wasmer/Zotero/storage/R35YJAWP/PhysRevB.107.html} } +@article{minchPredictingMagneticProperties2024, + title = {Predicting Magnetic Properties of van Der {{Waals}} Magnets Using Graph Neural Networks}, + author = {Minch, Peter and Bhattarai, Romakanta and Choudhary, Kamal and Rhone, Trevor David}, + date = {2024-11-04}, + journaltitle = {Physical Review Materials}, + shortjournal = {Phys. Rev. Mater.}, + volume = {8}, + number = {11}, + pages = {114002}, + publisher = {American Physical Society}, + doi = {10.1103/PhysRevMaterials.8.114002}, + url = {https://link.aps.org/doi/10.1103/PhysRevMaterials.8.114002}, + urldate = {2024-11-09}, + abstract = {We study two-dimensional (2D) magnetic materials using state-of-the-art machine learning models that use a graph-theory framework. We find that representing materials as graphs allows us to better learn structure-property relationships by leveraging both the chemical properties of the constituent atoms and the connectivity between those atoms. Graph neural network models are capable of predicting global properties of crystal structure (i.e., graphwise properties) and local properties of the constituent atoms (i.e., nodewise properties). We embed physical constraints into our model by simultaneously making predictions of local and global properties. In particular, we use the atomistic line graph neural network (ALIGNN) architecture. We train the ALIGNN model on data comprising local and global magnetic moments of 314 2D structures of the form Crâ¢ð´iiâ¢ðµiâ¢ðµiiâ¢ð‘‹6, based on monolayer Cr2â¢Ge2â¢Te6, calculated from first principles. By learning the relationships between both local and global magnetic properties, we demonstrate an improvement over models that only consider global magnetic properties.}, + keywords = {ALIGNN,AML,GNN,magnetic moment,magnetism,ML,prediction of magnetic moment,vdW,vdW materials,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Minch et al. - 2024 - Predicting magnetic properties of van der Waals magnets using graph neural networks.pdf;/Users/wasmer/Zotero/storage/DT3B7W6L/PhysRevMaterials.8.html} +} + @online{minotakisMachineLearningSurrogateModel2023, title = {Machine-{{Learning Surrogate Model}} for {{Accelerating}} the {{Search}} of {{Stable Ternary Alloys}}}, author = {Minotakis, Michael and Rossignol, Hugo and Cobelli, Matteo and Sanvito, Stefano}, @@ -13035,6 +13711,21 @@ Subject\_term\_id: computational-chemistry;density-functional-theory;method-deve file = {/Users/wasmer/Nextcloud/Zotero/Mirkin et al_2024_Energy transition needs new materials.pdf} } +@online{mirzaAreLargeLanguage2024, + title = {Are Large Language Models Superhuman Chemists?}, + author = {Mirza, Adrian and Alampara, Nawaf and Kunchapu, Sreekanth and Emoekabu, Benedict and Krishnan, Aswanth and Wilhelmi, Mara and Okereke, Macjonathan and Eberhardt, Juliane and Elahi, Amir Mohammad and Greiner, Maximilian and Holick, Caroline T. and Gupta, Tanya and Asgari, Mehrdad and Glaubitz, Christina and Klepsch, Lea C. and Köster, Yannik and Meyer, Jakob and Miret, Santiago and Hoffmann, Tim and Kreth, Fabian Alexander and Ringleb, Michael and Roesner, Nicole and Schubert, Ulrich S. and Stafast, Leanne M. and Wonanke, Dinga and Pieler, Michael and Schwaller, Philippe and Jablonka, Kevin Maik}, + date = {2024-04-01}, + eprint = {2404.01475}, + eprinttype = {arXiv}, + doi = {10.48550/arXiv.2404.01475}, + url = {http://arxiv.org/abs/2404.01475}, + urldate = {2024-10-17}, + abstract = {Large language models (LLMs) have gained widespread interest due to their ability to process human language and perform tasks on which they have not been explicitly trained. This is relevant for the chemical sciences, which face the problem of small and diverse datasets that are frequently in the form of text. LLMs have shown promise in addressing these issues and are increasingly being harnessed to predict chemical properties, optimize reactions, and even design and conduct experiments autonomously. However, we still have only a very limited systematic understanding of the chemical reasoning capabilities of LLMs, which would be required to improve models and mitigate potential harms. Here, we introduce "ChemBench," an automated framework designed to rigorously evaluate the chemical knowledge and reasoning abilities of state-of-the-art LLMs against the expertise of human chemists. We curated more than 7,000 question-answer pairs for a wide array of subfields of the chemical sciences, evaluated leading open and closed-source LLMs, and found that the best models outperformed the best human chemists in our study on average. The models, however, struggle with some chemical reasoning tasks that are easy for human experts and provide overconfident, misleading predictions, such as about chemicals' safety profiles. These findings underscore the dual reality that, although LLMs demonstrate remarkable proficiency in chemical tasks, further research is critical to enhancing their safety and utility in chemical sciences. Our findings also indicate a need for adaptations to chemistry curricula and highlight the importance of continuing to develop evaluation frameworks to improve safe and useful LLMs.}, + pubstate = {prepublished}, + keywords = {/unread,Computer Science - Artificial Intelligence,Computer Science - Machine Learning,Condensed Matter - Materials Science,Physics - Chemical Physics}, + file = {/Users/wasmer/Nextcloud/Zotero/Mirza et al. - 2024 - Are large language models superhuman chemists.pdf;/Users/wasmer/Zotero/storage/CCVBES8B/2404.html} +} + @inproceedings{missierW3CPROVFamily2013, title = {The {{W3C PROV}} Family of Specifications for Modelling Provenance Metadata}, booktitle = {Proceedings of the 16th {{International Conference}} on {{Extending Database Technology}}}, @@ -13052,6 +13743,22 @@ Subject\_term\_id: computational-chemistry;density-functional-theory;method-deve file = {/Users/wasmer/Nextcloud/Zotero/Missier et al_2013_The W3C PROV family of specifications for modelling provenance metadata.pdf} } +@online{mitnikovE3STOOrbitalInspired2024, + title = {{{E3STO}}: {{Orbital Inspired SE}}(3)-{{Equivariant Molecular Representation}} for {{Electron Density Prediction}}}, + shorttitle = {{{E3STO}}}, + author = {Mitnikov, Ilan and Jacobson, Joseph}, + date = {2024-10-08}, + eprint = {2410.06119}, + eprinttype = {arXiv}, + doi = {10.48550/arXiv.2410.06119}, + url = {http://arxiv.org/abs/2410.06119}, + urldate = {2024-10-17}, + abstract = {Electron density prediction stands as a cornerstone challenge in molecular systems, pivotal for various applications such as understanding molecular interactions and conducting precise quantum mechanical calculations. However, the scaling of density functional theory (DFT) calculations is prohibitively expensive. Machine learning methods provide an alternative, offering efficiency and accuracy. We introduce a novel SE(3)-equivariant architecture, drawing inspiration from Slater-Type Orbitals (STO), to learn representations of molecular electronic structures. Our approach offers an alternative functional form for learned orbital-like molecular representation. We showcase the effectiveness of our method by achieving SOTA prediction accuracy of molecular electron density with 30-70\textbackslash\% improvement over other work on Molecular Dynamics data.}, + pubstate = {prepublished}, + keywords = {/unread,Computer Science - Machine Learning,Physics - Chemical Physics,Quantitative Biology - Biomolecules}, + file = {/Users/wasmer/Nextcloud/Zotero/Mitnikov and Jacobson - 2024 - E3STO Orbital Inspired SE(3)-Equivariant Molecular Representation for Electron Density Prediction.pdf;/Users/wasmer/Zotero/storage/D2TBREQD/2410.html} +} + @article{mitranGroundStateCharge2021, title = {Ground State Charge Density Prediction in {{C-BN}} Nanoflakes Using Rotation Equivariant Feature-Free Artificial Neural Networks}, author = {Mitran, Tudor Luca and Nemnes, George Alexandru}, @@ -13360,6 +14067,24 @@ Subject\_term\_id: computational-chemistry;density-functional-theory;method-deve file = {/Users/wasmer/Nextcloud/Zotero/Mozumder et al_2024_High-throughput magnetic co-doping and design of exchange interactions in a.pdf;/Users/wasmer/Zotero/storage/GXLBFXQF/2407.html} } +@article{mozumderHighthroughputMagneticCodoping2024a, + title = {High-Throughput Magnetic Co-Doping and Design of Exchange Interactions in Topological Insulators}, + author = {Mozumder, Rubel and Wasmer, Johannes and Antognini Silva, David and Blügel, Stefan and Rüßmann, Philipp}, + date = {2024-10-17}, + journaltitle = {Physical Review Materials}, + shortjournal = {Phys. Rev. Mater.}, + volume = {8}, + number = {10}, + pages = {104201}, + publisher = {American Physical Society}, + doi = {10.1103/PhysRevMaterials.8.104201}, + url = {https://link.aps.org/doi/10.1103/PhysRevMaterials.8.104201}, + urldate = {2024-10-18}, + abstract = {Using high-throughput automation of ab initio impurity embedding simulations, we create a database of 3â¢ð‘‘ and 4â¢ð‘‘ transition metal defects embedded into the prototypical topological insulators (TIs) Bi2â¢Te3 and Bi2â¢Se3. We simulate both single impurities as well as impurity dimers at different impurity-impurity distances inside the topological insulator matrix. We extract changes to magnetic moments, analyze the polarizability of nonmagnetic impurity atoms via nearby magnetic impurity atoms and calculate the exchange coupling constants for a Heisenberg Hamiltonian. We uncover chemical trends in the exchange coupling constants and discuss the impurities' abilities with respect to magnetic order in the fields of quantum anomalous Hall insulators and topological quantum computing. In particular, we confirm that co-doping of different magnetic dopants is a viable strategy to engineer the magnetic ground state in magnetic TIs.}, + keywords = {AiiDA,aiida-kkr,Bi2Te3,co-doping,database generation,dataset,defects,exchange interaction,FZJ,Heisenberg model,impurity embedding,Jij,JuDFT,juKKR,KKR,Liechtenstein formula,magnetic topological materials,magnetism,Materials Cloud,PGI,PGI-1/IAS-1,physics,point defects,topological,topological insulator,with-data}, + file = {/Users/wasmer/Nextcloud/Zotero/Mozumder et al. - 2024 - High-throughput magnetic co-doping and design of exchange interactions in topological insulators_1.pdf;/Users/wasmer/Nextcloud/Zotero/Mozumder et al. - 2024 - High-throughput magnetic co-doping and design of exchange interactions in topological insulators.pdf;/Users/wasmer/Zotero/storage/T76GEFD2/PhysRevMaterials.8.html} +} + @online{muckleyInterpretableModelsExtrapolation2022, title = {Interpretable Models for Extrapolation in Scientific Machine Learning}, author = {Muckley, Eric S. and Saal, James E. and Meredig, Bryce and Roper, Christopher S. and Martin, John H.}, @@ -13749,7 +14474,7 @@ Subject\_term\_id: magnetic-properties-and-materials}, doi = {10.1101/2020.05.12.077776}, url = {https://www.biorxiv.org/content/10.1101/2020.05.12.077776v1}, urldate = {2021-05-15}, - abstract = {{$<$}p{$>$}Nonlinear data-visualization methods, such as t-SNE and UMAP, have become staple tools for summarizing the complex transcriptomic landscape of single cells in 2D or 3D. However, existing approaches neglect the local density of data points in the original space, often resulting in misleading visualizations where densely populated subpopulations of cells are given more visual space even if they account for only a small fraction of transcriptional diversity within the dataset. We present den-SNE and densMAP, our density-preserving visualization tools based on t-SNE and UMAP, respectively, and demonstrate their ability to facilitate more accurate visual interpretation of single-cell RNA-seq data. On recently published datasets, our methods newly reveal significant changes in transcriptomic variability within a range of biological processes, including cancer, immune cell specialization in human, and the developmental trajectory of \emph{C. elegans}. Our methods are readily applicable to visualizing high-dimensional data in other scientific domains.{$<$}/p{$>$}}, + abstract = {{$<$}p{$>$}Nonlinear data-visualization methods, such as t-SNE and UMAP, have become staple tools for summarizing the complex transcriptomic landscape of single cells in 2D or 3D. However, existing approaches neglect the local density of data points in the original space, often resulting in misleading visualizations where densely populated subpopulations of cells are given more visual space even if they account for only a small fraction of transcriptional diversity within the dataset. We present den-SNE and densMAP, our density-preserving visualization tools based on t-SNE and UMAP, respectively, and demonstrate their ability to facilitate more accurate visual interpretation of single-cell RNA-seq data. On recently published datasets, our methods newly reveal significant changes in transcriptomic variability within a range of biological processes, including cancer, immune cell specialization in human, and the developmental trajectory of \mkbibemph{C. elegans}. Our methods are readily applicable to visualizing high-dimensional data in other scientific domains.{$<$}/p{$>$}}, langid = {english}, keywords = {den-SNE,density-preserving,densMAP,dimensionality reduction,library,t-SNE,UMAP,unsupervised learning,visualization,with-code}, file = {/Users/wasmer/Nextcloud/Zotero/Narayan et al_2020_Density-Preserving Data Visualization Unveils Dynamic Patterns of Single-Cell.pdf;/Users/wasmer/Zotero/storage/6QBY65KW/2020.05.12.html} @@ -13949,6 +14674,21 @@ Subject\_term\_id: magnetic-properties-and-materials}, file = {/Users/wasmer/Nextcloud/Zotero/Nguyen_Rohskopf_2023_Proper orthogonal descriptors for efficient and accurate interatomic potentials2.pdf;/Users/wasmer/Zotero/storage/PYJFVV5U/S0021999123001250.html} } +@online{niblettTransferabilityDatasetsMachineLearning2024, + title = {Transferability of Datasets between {{Machine-Learning Interaction Potentials}}}, + author = {Niblett, Samuel P. and Kourtis, Panagiotis and Magdău, Ioan-Bogdan and Grey, Clare P. and Csányi, Gábor}, + date = {2024-09-09}, + eprint = {2409.05590}, + eprinttype = {arXiv}, + doi = {10.48550/arXiv.2409.05590}, + url = {http://arxiv.org/abs/2409.05590}, + urldate = {2024-11-20}, + abstract = {With the emergence of Foundational Machine Learning Interatomic Potential (FMLIP) models trained on extensive datasets, transferring data between different ML architectures has become increasingly important. In this work, we examine the extent to which training data optimised for one machine-learning forcefield algorithm may be re-used to train different models, aiming to accelerate FMLIP fine-tuning and to reduce the need for costly iterative training. As a test case, we train models of an organic liquid mixture that is commonly used as a solvent in rechargeable battery electrolytes, making it an important target for reactive MLIP development. We assess model performance by analysing the properties of molecular dynamics trajectories, showing that this is a more stringent test than comparing prediction errors for fixed datasets. We consider several types of training data, and several popular MLIPs - notably the recent MACE architecture, a message-passing neural network designed for high efficiency and smoothness. We demonstrate that simple training sets constructed without any ab initio dynamics are sufficient to produce stable models of molecular liquids. For simple neural-network architectures, further iterative training is required to capture thermodynamic and kinetic properties correctly, but MACE performs well with extremely limited datsets. We find that configurations designed by human intuition to correct systematic model deficiencies transfer effectively between algorithms, but active-learned data that are generated by one MLIP do not typically benefit a different algorithm. Finally, we show that any training data which improve model performance also improve its ability to generalise to similar unseen molecules. This suggests that trajectory failure modes are connected with chemical structure rather than being entirely system-specific.}, + pubstate = {prepublished}, + keywords = {active learning,AML,DeepMD,MACE,ML,MLP,transferability}, + file = {/Users/wasmer/Nextcloud/Zotero/Niblett et al. - 2024 - Transferability of datasets between Machine-Learning Interaction Potentials.pdf;/Users/wasmer/Zotero/storage/7N4C9H2Q/2409.html} +} + @online{nigamCompletenessAtomicStructure2023, title = {Completeness of {{Atomic Structure Representations}}}, author = {Nigam, Jigyasa and Pozdnyakov, Sergey N. and Huguenin-Dumittan, Kevin K. and Ceriotti, Michele}, @@ -14345,6 +15085,21 @@ Subject\_term\_id: magnetic-properties-and-materials}, file = {/Users/wasmer/Nextcloud/Zotero/Paleico_Behler_2021_A bin and hash method for analyzing reference data and descriptors in machine.pdf} } +@online{panAtomicClusterExpansion2024, + title = {Atomic Cluster Expansion Interatomic Potential for Defects and Thermodynamics of {{Cu-W}} System}, + author = {Pan, Jiahao and Cheng, Huiqun and Yan, Gaosheng and Zhang, Lei and Yu, Wenshan and Shen, Shengping}, + date = {2024-07-01}, + eprint = {2407.00946}, + eprinttype = {arXiv}, + doi = {10.48550/arXiv.2407.00946}, + url = {http://arxiv.org/abs/2407.00946}, + urldate = {2024-10-17}, + abstract = {The unique properties exhibited in immiscible metals, such as excellent strength, hardness, and radiation-damage tolerance, have stimulated the interest of many researchers. As a typical immiscible metal system, the Cu-W nano-multilayers combine the plasticity of copper and the strength of tungsten, making it a suitable candidate for applications in aerospace, nuclear fusion engineering, and electronic packaging etc. To understand the atomistic origin of the defects and thermodynamics of the Cu-W immiscible system, we have developed an accurate machine learning interatomic potential (ML-IAP) for Cu-W based on the atomic cluster expansion (ACE) method. The Cu-W ACE potential can faithfully reproduce the fundamental properties of Cu and W predicted by density functional theory (DFT). Moreover, the thermodynamical properties, such as the melting point, coefficient of thermal expansion, diffusion coefficient, and equation of the state curve of the Cu-W solid solution, are calculated and compared against DFT and experiments. Monte Carlo Molecular Dynamics (MC-MD) simulations performed with the Cu-W ACE potential predict the experimentally observed phase separation and uphill diffusion phenomena. Our findings not only provide an accurate ACE potential for describing the Cu-W immiscible system, but also shed light on understanding the atomistic mechanism during the Cu-W nano-multilayers formation process.}, + pubstate = {prepublished}, + keywords = {/unread,Condensed Matter - Materials Science}, + file = {/Users/wasmer/Nextcloud/Zotero/Pan et al. - 2024 - Atomic cluster expansion interatomic potential for defects and thermodynamics of Cu-W system.pdf;/Users/wasmer/Zotero/storage/PEH6D6HD/2407.html} +} + @article{pantDFTaidedMachineLearningbased2023, title = {{{DFT-aided}} Machine Learning-Based Discovery of Magnetism in {{Fe-based}} Bimetallic Chalcogenides}, author = {Pant, Dharmendra and Pokharel, Suresh and Mandal, Subhasish and Kc, Dukka B. and Pati, Ranjit}, @@ -14609,6 +15364,21 @@ Subject\_term\_id: magnetic-properties-and-materials}, file = {/Users/wasmer/Zotero/storage/3R7VPZGJ/google-we-have-no-moat-and-neither.html} } +@online{pathrudkarElectronicStructurePrediction2024, + title = {Electronic Structure Prediction of Medium and High Entropy Alloys across Composition Space}, + author = {Pathrudkar, Shashank and Taylor, Stephanie and Keripale, Abhishek and Gangan, Abhijeet Sadashiv and Thiagarajan, Ponkrshnan and Agarwal, Shivang and Marian, Jaime and Ghosh, Susanta and Banerjee, Amartya S.}, + date = {2024-10-10}, + eprint = {2410.08294}, + eprinttype = {arXiv}, + doi = {10.48550/arXiv.2410.08294}, + url = {http://arxiv.org/abs/2410.08294}, + urldate = {2024-10-17}, + abstract = {We propose machine learning (ML) models to predict the electron density -- the fundamental unknown of a material's ground state -- across the composition space of concentrated alloys. From this, other physical properties can be inferred, enabling accelerated exploration. A significant challenge is that the number of sampled compositions and descriptors required to accurately predict fields like the electron density increases rapidly with species. To address this, we employ Bayesian Active Learning (AL), which minimizes training data requirements by leveraging uncertainty quantification capabilities of Bayesian Neural Networks. Compared to strategic tessellation of the composition space, Bayesian-AL reduces the number of training data points by a factor of 2.5 for ternary (SiGeSn) and 1.7 for quaternary (CrFeCoNi) systems. We also introduce easy-to-optimize, body-attached-frame descriptors, which respect physical symmetries and maintain approximately the same descriptor-vector size as alloy elements increase. Our ML models demonstrate high accuracy and generalizability in predicting both electron density and energy across composition space.}, + pubstate = {prepublished}, + keywords = {/unread,Condensed Matter - Disordered Systems and Neural Networks,Condensed Matter - Materials Science,Physics - Computational Physics,Quantum Physics}, + file = {/Users/wasmer/Nextcloud/Zotero/Pathrudkar et al. - 2024 - Electronic structure prediction of medium and high entropy alloys across composition space.pdf;/Users/wasmer/Zotero/storage/LJZQTGZ6/2410.html} +} + @article{pathrudkarMachineLearningBased2022, title = {Machine Learning Based Prediction of the Electronic Structure of Quasi-One-Dimensional Materials under Strain}, author = {Pathrudkar, Shashank and Yu, Hsuan Ming and Ghosh, Susanta and Banerjee, Amartya S.}, @@ -15327,6 +16097,24 @@ Subject\_term\_id: magnetic-properties-and-materials}, file = {/Users/wasmer/Nextcloud/Zotero/Radhakrishnan et al_2022_Transfer Learning with Kernel Methods.pdf;/Users/wasmer/Zotero/storage/WGXWDYIS/2211.html} } +@article{rahmanAcceleratingDefectPredictions2024, + title = {Accelerating Defect Predictions in Semiconductors Using Graph Neural Networks}, + author = {Rahman, Md Habibur and Gollapalli, Prince and Manganaris, Panayotis and Yadav, Satyesh Kumar and Pilania, Ghanshyam and DeCost, Brian and Choudhary, Kamal and Mannodi-Kanakkithodi, Arun}, + date = {2024-03-27}, + journaltitle = {APL Machine Learning}, + shortjournal = {APL Machine Learning}, + volume = {2}, + number = {1}, + pages = {016122}, + issn = {2770-9019}, + doi = {10.1063/5.0176333}, + url = {https://doi.org/10.1063/5.0176333}, + urldate = {2025-01-08}, + abstract = {First-principles computations reliably predict the energetics of point defects in semiconductors but are constrained by the expense of using large supercells and advanced levels of theory. Machine learning models trained on computational data, especially ones that sufficiently encode defect coordination environments, can be used to accelerate defect predictions. Here, we develop a framework for the prediction and screening of native defects and functional impurities in a chemical space of group IV, III–V, and II–VI zinc blende semiconductors, powered by crystal Graph-based Neural Networks (GNNs) trained on high-throughput density functional theory (DFT) data. Using an innovative approach of sampling partially optimized defect configurations from DFT calculations, we generate one of the largest computational defect datasets to date, containing many types of vacancies, self-interstitials, anti-site substitutions, impurity interstitials and substitutions, as well as some defect complexes. We applied three types of established GNN techniques, namely crystal graph convolutional neural network, materials graph network, and Atomistic Line Graph Neural Network (ALIGNN), to rigorously train models for predicting defect formation energy (DFE) in multiple charge states and chemical potential conditions. We find that ALIGNN yields the best DFE predictions with root mean square errors around 0.3~eV, which represents a prediction accuracy of 98\% given the range of values within the dataset, improving significantly on the state-of-the-art. We further show that GNN-based defective structure optimization can take us close to DFT-optimized geometries at a fraction of the cost of full DFT. The current models are based on the semi-local generalized gradient approximation-Perdew–Burke–Ernzerhof (PBE) functional but are highly promising because of the correlation of computed energetics and defect levels with higher levels of theory and experimental data, the accuracy and necessity of discovering novel metastable and low energy defect structures at the PBE level of theory before advanced methods could be applied, and the ability to train multi-fidelity models in the future with new data from non-local functionals. The DFT-GNN models enable prediction and screening across thousands of hypothetical defects based on both unoptimized and partially optimized defective structures, helping identify electronically active defects in technologically important semiconductors.}, + keywords = {ALIGNN,AML,CGCNN,defects,GNN,HSE,MEGNet,ML,model comparison,PBE,prediction of formation energy,Semiconductors,SOC,structure relaxation,VASP,with-code,with-data}, + file = {/Users/wasmer/Nextcloud/Zotero/Rahman et al. - 2024 - Accelerating defect predictions in semiconductors using graph neural networks.pdf;/Users/wasmer/Zotero/storage/6B3GZI82/3279661.html} +} + @online{raissiPhysicsInformedDeep2017, title = {Physics {{Informed Deep Learning}} ({{Part I}}): {{Data-driven Solutions}} of {{Nonlinear Partial Differential Equations}}}, shorttitle = {Physics {{Informed Deep Learning}} ({{Part I}})}, @@ -15696,7 +16484,7 @@ Subject\_term\_id: condensed-matter-physics;history;quantum-physics}, url = {https://doi.org/10.1021/acs.jcim.3c01391}, urldate = {2024-03-31}, abstract = {In the search for novel intermetallic ternary alloys, much of the effort goes into performing a large number of ab initio calculations covering a wide range of compositions and structures. These are essential to building a reliable convex hull diagram. While density functional theory (DFT) provides accurate predictions for many systems, its computational overheads set a throughput limit on the number of hypothetical phases that can be probed. Here, we demonstrate how an ensemble of machine-learning (ML) spectral neighbor-analysis potentials (SNAPs) can be integrated into a workflow for the construction of accurate ternary convex hull diagrams, highlighting regions that are fertile for materials discovery. Our workflow relies on using available binary-alloy data both to train the SNAP models and to create prototypes for ternary phases. From the prototype structures, all unique ternary decorations are created and used to form a pool of candidate compounds. The SNAPs ensemble is then used to prerelax the structures and screen the most favorable prototypes before using DFT to build the final phase diagram. As constructed, the proposed workflow relies on no extra first-principles data to train the ML surrogate model and yields a DFT-level accurate convex hull. We demonstrate its efficacy by investigating the Cu–Ag–Au and Mo–Ta–W ternary systems.}, - keywords = {/unread,\_tablet,AFLOWLIB,alloys,AML,ase,bispectrum,convex hull,ensemble learning,Jacobi-Legendre,JLCDM,LAMMPS,ML,ML-DFT,ML-ESM,MLP,phase diagram,pymatgen,scikit-learn,SNAP,structure relaxation,surrogate model,ternary systems,with-data}, + keywords = {\_tablet,AFLOWLIB,alloys,AML,ase,bispectrum,convex hull,ensemble learning,Jacobi-Legendre,JLCDM,LAMMPS,ML,ML-DFT,ML-ESM,MLP,phase diagram,pymatgen,scikit-learn,SNAP,structure relaxation,surrogate model,ternary systems,with-data}, file = {/Users/wasmer/Nextcloud/Zotero/Rossignol et al_2024_Machine-Learning-Assisted Construction of Ternary Convex Hull Diagrams.pdf} } @@ -15721,6 +16509,19 @@ Subject\_term\_id: condensed-matter-physics;history;quantum-physics}, file = {/Users/wasmer/Nextcloud/Zotero/Ross et al_2022_Large-scale chemical language representations capture molecular structure and.pdf} } +@book{rougierScientificVisualizationPython2021, + title = {Scientific {{Visualization}}: {{Python}} + {{Matplotlib}}}, + shorttitle = {Scientific {{Visualization}}}, + author = {Rougier, Nicolas P.}, + date = {2021-11-15}, + url = {https://inria.hal.science/hal-03427242}, + urldate = {2024-10-25}, + abstract = {The Python scientific visualization landscape is huge. It is composed of a myriad of tools, ranging from the most versatile and widely used down to the more specialised and confidential. Some of these tools are community based while others are developed by companies. Some are made specifically for the web, others are for the desktop only, some deal with 3D and large data, while others target flawless 2D rendering. In this landscape, Matplotlib has a very special place. It is a versatile and powerful library that allows you to design very high quality figures, suitable for scientific publishing. It also offers a simple and intuitive interface as well as an object oriented architecture that allows you to tweak anything within a figure. Finally, it can be used as a regular graphic library in order to design nonâ€scientific figures. This book is organized into four parts. The first part considers the fundamental principles of the Matplotlib library. This includes reviewing the different parts that constitute a figure, the different coordinate systems, the available scales and projections, and we’ll also introduce a few concepts related to typography and colors. The second part is dedicated to the actual design of a figure. After introducing some simple rules for generating better figures, we’ll then go on to explain the Matplotlib defaults and styling system before diving on into figure layout organization. We’ll then explore the different types of plot available and see how a figure can be ornamented with different elements. The third part is dedicated to more advanced concepts, namely 3D figures, optimization \& animation. The fourth and final part is a collection of showcases.}, + langid = {english}, + keywords = {/unread,3D,advice,best practices,matplotlib,numerical methods,publishing,Python,scientific computing,scientific programming,scientific visualization,scientific writing,visualization,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Rougier - 2021 - Scientific Visualization Python + Matplotlib.pdf} +} + @online{rubungoLLMPropPredictingPhysical2023, title = {{{LLM-Prop}}: {{Predicting Physical And Electronic Properties Of Crystalline Solids From Their Text Descriptions}}}, shorttitle = {{{LLM-Prop}}}, @@ -16399,6 +17200,22 @@ Subject\_term\_id: condensed-matter-physics;history;quantum-physics}, file = {/Users/wasmer/Nextcloud/Zotero/Scherbela et al_2022_Solving the electronic Schrödinger equation for multiple nuclear geometries.pdf;/Users/wasmer/Zotero/storage/JHT752NC/s43588-022-00228-x.html} } +@online{schilling-wilhelmiTextInsightLarge2024, + title = {From {{Text}} to {{Insight}}: {{Large Language Models}} for {{Materials Science Data Extraction}}}, + shorttitle = {From {{Text}} to {{Insight}}}, + author = {Schilling-Wilhelmi, Mara and RÃos-GarcÃa, Martiño and Shabih, Sherjeel and Gil, MarÃa Victoria and Miret, Santiago and Koch, Christoph T. and Márquez, José A. and Jablonka, Kevin Maik}, + date = {2024-07-23}, + eprint = {2407.16867}, + eprinttype = {arXiv}, + doi = {10.48550/arXiv.2407.16867}, + url = {http://arxiv.org/abs/2407.16867}, + urldate = {2024-10-17}, + abstract = {The vast majority of materials science knowledge exists in unstructured natural language, yet structured data is crucial for innovative and systematic materials design. Traditionally, the field has relied on manual curation and partial automation for data extraction for specific use cases. The advent of large language models (LLMs) represents a significant shift, potentially enabling efficient extraction of structured, actionable data from unstructured text by non-experts. While applying LLMs to materials science data extraction presents unique challenges, domain knowledge offers opportunities to guide and validate LLM outputs. This review provides a comprehensive overview of LLM-based structured data extraction in materials science, synthesizing current knowledge and outlining future directions. We address the lack of standardized guidelines and present frameworks for leveraging the synergy between LLMs and materials science expertise. This work serves as a foundational resource for researchers aiming to harness LLMs for data-driven materials research. The insights presented here could significantly enhance how researchers across disciplines access and utilize scientific information, potentially accelerating the development of novel materials for critical societal needs.}, + pubstate = {prepublished}, + keywords = {/unread,Computer Science - Machine Learning,Condensed Matter - Materials Science}, + file = {/Users/wasmer/Nextcloud/Zotero/Schilling-Wilhelmi et al. - 2024 - From Text to Insight Large Language Models for Materials Science Data Extraction.pdf;/Users/wasmer/Zotero/storage/FKVEPZBG/2407.html} +} + @article{schlederDFTMachineLearning2019, title = {From {{DFT}} to Machine Learning: Recent Approaches to Materials Science–a Review}, shorttitle = {From {{DFT}} to Machine Learning}, @@ -16416,7 +17233,7 @@ Subject\_term\_id: condensed-matter-physics;history;quantum-physics}, urldate = {2024-08-31}, abstract = {Recent advances in experimental and computational methods are increasing the quantity and complexity of generated data. This massive amount of raw data needs to be stored and interpreted in order to advance the materials science field. Identifying correlations and patterns from large amounts of complex data is being performed by machine learning algorithms for decades. Recently, the materials science community started to invest in these methodologies to extract knowledge and insights from the accumulated data. This review follows a logical sequence starting from density functional theory as the representative instance of electronic structure methods, to the subsequent high-throughput approach, used to generate large amounts of data. Ultimately, data-driven strategies which include data mining, screening, and machine learning techniques, employ the data generated. We show how these approaches to modern computational materials science are being used to uncover complexities and design novel materials with enhanced properties. Finally, we point to the present research problems, challenges, and potential future perspectives of this new exciting field.}, langid = {english}, - keywords = {/unread}, + keywords = {AML,condensed matter,high-throughput,materials,ML,ML-DFT,ML-IAP,prediction of energy,prediction of magnetic properties,prediction of phase,prediction of topology,review,review-of-AML,superconductor,topological,topological insulator,topological phase transition}, file = {/Users/wasmer/Nextcloud/Zotero/Schleder et al. - 2019 - From DFT to machine learning recent approaches to materials science–a review.pdf} } @@ -16506,7 +17323,7 @@ Subject\_term\_id: condensed-matter-physics;history;quantum-physics}, url = {https://www.sciencedirect.com/science/article/pii/S2542529324002360}, urldate = {2024-10-03}, abstract = {The accuracy of a machine learning model is limited by the quality and quantity of the data available for its training and validation. This problem is particularly challenging in materials science, where large, high-quality, and consistent datasets are scarce. Here we present alexandria, an open database of more than 5 million density-functional theory calculations for periodic three-, two-, and one-dimensional compounds. We use this data to train machine learning models to reproduce seven different properties using both composition-based models and crystal-graph neural networks. In the majority of cases, the error of the models decreases monotonically with the training data, although some graph networks seem to saturate for large training set sizes. Differences in the training can be correlated with the statistical distribution of the different properties. We also observe that graph-networks, that have access to detailed geometrical information, yield in general more accurate models than simple composition-based methods. Finally, we assess several universal machine learning interatomic potentials. Crystal geometries optimised with these force fields are very high quality, but unfortunately the accuracy of the energies is still lacking. Furthermore, we observe some instabilities for regions of chemical space that are undersampled in the training sets used for these models. This study highlights the potential of large-scale, high-quality datasets to improve machine learning models in materials science.}, - keywords = {/unread,2D material,Alexandria database,ALIGNN,AML,binary systems,CGAT,convex hull,crystal graph,dataset,large dataset,M3GNet,MACE,materials database,ML,n-ary alloys,OPTIMADE,PBE,quaternary systems,SCAN,ternary systems,universal potential,with-code,with-data}, + keywords = {2D material,Alexandria database,ALIGNN,AML,binary systems,CGAT,convex hull,crystal graph,dataset,large dataset,M3GNet,MACE,materials database,ML,n-ary alloys,OPTIMADE,PBE,quaternary systems,SCAN,ternary systems,universal potential,with-code,with-data}, file = {/Users/wasmer/Zotero/storage/A2DT6HKC/S2542529324002360.html} } @@ -16626,6 +17443,21 @@ Subject\_term\_id: condensed-matter-physics;electronic-structure;materials-scien file = {/Users/wasmer/Nextcloud/Zotero/Scholz_2022_Writing and publishing a scientific paper.pdf} } +@online{schubertPredictingElectronicScreening2024, + title = {Predicting Electronic Screening for Fast {{Koopmans}} Spectral Functional Calculations}, + author = {Schubert, Yannick and Luber, Sandra and Marzari, Nicola and Linscott, Edward}, + date = {2024-06-21}, + eprint = {2406.15205}, + eprinttype = {arXiv}, + doi = {10.48550/arXiv.2406.15205}, + url = {http://arxiv.org/abs/2406.15205}, + urldate = {2024-10-17}, + abstract = {Koopmans spectral functionals represent a powerful extension of Kohn-Sham density-functional theory (DFT), enabling accurate predictions of spectral properties with state-of-the-art accuracy. The success of these functionals relies on capturing the effects of electronic screening through scalar, orbital-dependent parameters. These parameters have to be computed for every calculation, making Koopmans spectral functionals more expensive than their DFT counterparts. In this work, we present a machine-learning model that -- with minimal training -- can predict these screening parameters directly from orbital densities calculated at the DFT level. We show on two prototypical use cases that using the screening parameters predicted by this model, instead of those calculated from linear response, leads to orbital energies that differ by less than 20 meV on average. Since this approach dramatically reduces run-times with minimal loss of accuracy, it will enable the application of Koopmans spectral functionals to classes of problems that previously would have been prohibitively expensive, such as the prediction of temperature-dependent spectral properties.}, + pubstate = {prepublished}, + keywords = {/unread,Condensed Matter - Materials Science,Physics - Chemical Physics,Physics - Computational Physics}, + file = {/Users/wasmer/Nextcloud/Zotero/Schubert et al. - 2024 - Predicting electronic screening for fast Koopmans spectral functional calculations.pdf;/Users/wasmer/Zotero/storage/WGCN5BAZ/2406.html} +} + @article{schuchComputationalComplexityInteracting2009, title = {Computational Complexity of Interacting Electrons and Fundamental Limitations of Density Functional Theory}, author = {Schuch, Norbert and Verstraete, Frank}, @@ -16910,6 +17742,24 @@ Subject\_term\_id: condensed-matter-physics;electronic-structure;materials-scien file = {/Users/wasmer/Nextcloud/Zotero/Sharma and Sanvito - 2024 - Quantum-accurate machine learning potentials for metal-organic frameworks using temperature driven a.pdf} } +@article{shenoyCollinearspinMachineLearned2024, + title = {Collinear-Spin Machine Learned Interatomic Potential for {{Fe7Cr2Ni}} Alloy}, + author = {Shenoy, Lakshmi and Woodgate, Christopher D. and Staunton, Julie B. and Bartók, Albert P. and Becquart, Charlotte S. and Domain, Christophe and Kermode, James R.}, + date = {2024-03-22}, + journaltitle = {Physical Review Materials}, + shortjournal = {Phys. Rev. Mater.}, + volume = {8}, + number = {3}, + pages = {033804}, + publisher = {American Physical Society}, + doi = {10.1103/PhysRevMaterials.8.033804}, + url = {https://link.aps.org/doi/10.1103/PhysRevMaterials.8.033804}, + urldate = {2024-11-20}, + abstract = {We have developed a machine learned interatomic potential for the prototypical austenitic steel Fe7â¢Cr2â¢Ni, using the Gaussian approximation potential (GAP) framework. This GAP can model the alloy's properties with close to density functional theory (DFT) accuracy, while at the same time allowing us to access larger length and time scales than expensive first-principles methods. We also extended the GAP input descriptors to approximate the effects of collinear spins (spin GAP), and demonstrate how this extended model successfully predicts structural distortions due to antiferromagnetic and paramagnetic spin states. We demonstrate the application of the spin GAP model for bulk properties and vacancies and validate against DFT. These results are a step towards modeling the atomistic origins of ageing in austenitic steels with higher accuracy.}, + keywords = {AFM,alloys,AML,collinear,CPA,defects,disordered,EAM,Ferromagnetism,GAP,kernel PCA,KKR,magnetic ML-IAP,magnetism,MC,MD,MLP,prediction of energy,SOAP,spin-polarized,ternary systems,vacancies,with-code,with-data}, + file = {/Users/wasmer/Nextcloud/Zotero/Shenoy et al. - 2024 - Collinear-spin machine learned interatomic potential for Fe7Cr2Ni alloy.pdf;/Users/wasmer/Zotero/storage/EJ6QRAC3/PhysRevMaterials.8.html} +} + @online{shenRepresentationindependentElectronicCharge2021, title = {A Representation-Independent Electronic Charge Density Database for Crystalline Materials}, author = {Shen, Jimmy-Xuan and Munro, Jason M. and Horton, Matthew K. and Huck, Patrick and Dwaraknath, Shyam and Persson, Kristin A.}, @@ -17293,6 +18143,24 @@ Subject\_term\_id: condensed-matter-physics;electronic-structure;materials-scien file = {/Users/wasmer/Nextcloud/Zotero/Soiland-Reyes et al_2022_Packaging research artefacts with RO-Crate.pdf;/Users/wasmer/Zotero/storage/X2IWHLC7/ds210053.html} } +@article{solovyevExchangeInteractionsMagnetic2021, + title = {Exchange Interactions and Magnetic Force Theorem}, + author = {Solovyev, I. V.}, + date = {2021-03-17}, + journaltitle = {Physical Review B}, + shortjournal = {Phys. Rev. B}, + volume = {103}, + number = {10}, + pages = {104428}, + publisher = {American Physical Society}, + doi = {10.1103/PhysRevB.103.104428}, + url = {https://link.aps.org/doi/10.1103/PhysRevB.103.104428}, + urldate = {2024-10-30}, + abstract = {We critically reexamine the problem of interatomic exchange interactions, which describe the total energy change caused by infinitesimal rotations of spins near some equilibrium state in the framework of constrained spin-density functional theory (cSDFT). For small variations of the spin magnetization, such interactions can always be related to the response function (or transverse spin susceptibility). However, the form of this relation can depend on additional approximations supplementing the practical calculations. Particularly, the commonly used magnetic force theorem prescribes the linear relation between the exchange interactions and the response function, while the exact theory requires this dependence to be inverse, as it can be rigorously derived from cSDFT. We explore the origin and consequences of these differences in the definition for a wide class of materials, including ferromagnetic Ni, antiferromagnetic NiO, half-metallic ferromagnetic CrO2, multiferroic HoMnO3, and layered van der Waals magnets CrCl3 and CrI3. While in most of these cases, the magnetic force theorem produces quite reasonable results and can be rigorously justified in the long wavelength and strong-coupling limits, the exact formulation appears to be more consistent, especially in dealing with two important issues, which typically arise in the theory of exchange interactions: (i) the treatment of the ligand spins and (ii) the choice of the suitable variable for the description of infinitesimal rotations in the system of spins within cSDFT. Both issues can be efficiently resolved by employing the ideas of adiabatic spin dynamics supplemented with the exact expression for the exchange interactions. Particularly, the ligand spins can produce quite sizable contributions to the total energy change. For this case, we propose a simple downfolding procedure of elimination of the ligand spins from the model by transferring their effects to the interaction parameters between the localized 3â¢ð‘‘ spins. Furthermore, the exchange interactions appear to be sensitive to the definition of the variable, which is used to describe the rotations of spins in cSDFT: Generally, the rotations of spin moments and spin magnetization matrix lead to different results. In this respect, we argue that the rotations of spin moments are more suitable for the description of low-energy excitations, while the rotations of the whole magnetization matrix cause much stronger perturbation in the system of spins.}, + keywords = {AFM,constrained DFT,DFT,exchange interaction,FM,Heisenberg model,magnetic force theorem,magnetism,multiferroic,spin spiral,spin-dependent}, + file = {/Users/wasmer/Nextcloud/Zotero/Solovyev - 2021 - Exchange interactions and magnetic force theorem.pdf;/Users/wasmer/Zotero/storage/5K8S3MSN/PhysRevB.103.html} +} + @article{sommer3DSCDatasetSuperconductors2023, title = {{{3DSC}} - a Dataset of Superconductors Including Crystal Structures}, author = {Sommer, Timo and Willa, Roland and Schmalian, Jörg and Friederich, Pascal}, @@ -17367,6 +18235,22 @@ Subject\_term\_id: condensed-matter-physics;electronic-structure;materials-scien file = {/Users/wasmer/Nextcloud/Zotero/Song et al_2022_Density-Corrected DFT Explained.pdf} } +@online{songNeuralSCFNeuralNetwork2024, + title = {{{NeuralSCF}}: {{Neural}} Network Self-Consistent Fields for Density Functional Theory}, + shorttitle = {{{NeuralSCF}}}, + author = {Song, Feitong and Feng, Ji}, + date = {2024-06-22}, + eprint = {2406.15873}, + eprinttype = {arXiv}, + doi = {10.48550/arXiv.2406.15873}, + url = {http://arxiv.org/abs/2406.15873}, + urldate = {2024-10-17}, + abstract = {Kohn-Sham density functional theory (KS-DFT) has found widespread application in accurate electronic structure calculations. However, it can be computationally demanding especially for large-scale simulations, motivating recent efforts toward its machine-learning (ML) acceleration. We propose a neural network self-consistent fields (NeuralSCF) framework that establishes the Kohn-Sham density map as a deep learning objective, which encodes the mechanics of the Kohn-Sham equations. Modeling this map with an SE(3)-equivariant graph transformer, NeuralSCF emulates the Kohn-Sham self-consistent iterations to obtain electron densities, from which other properties can be derived. NeuralSCF achieves state-of-the-art accuracy in electron density prediction and derived properties, featuring exceptional zero-shot generalization to a remarkable range of out-of-distribution systems. NeuralSCF reveals that learning from KS-DFT's intrinsic mechanics significantly enhances the model's accuracy and transferability, offering a promising stepping stone for accelerating electronic structure calculations through mechanics learning.}, + pubstate = {prepublished}, + keywords = {/unread,Computer Science - Machine Learning,Physics - Chemical Physics,Physics - Computational Physics}, + file = {/Users/wasmer/Nextcloud/Zotero/Song and Feng - 2024 - NeuralSCF Neural network self-consistent fields for density functional theory.pdf;/Users/wasmer/Zotero/storage/HPCFZD9P/2406.html} +} + @online{songOmniPredLanguageModels2024, title = {{{OmniPred}}: {{Language Models}} as {{Universal Regressors}}}, shorttitle = {{{OmniPred}}}, @@ -17727,6 +18611,41 @@ Subject\_term\_id: condensed-matter-physics;electronic-structure;materials-scien file = {/Users/wasmer/Nextcloud/Zotero/false;/Users/wasmer/Nextcloud/Zotero/Sunshine et al_2023_Chemical Properties from Graph Neural Network-Predicted Electron Densities.pdf} } +@book{susskindQuantumMechanicsTheoretical2014, + title = {Quantum Mechanics: The Theoretical Minimum}, + shorttitle = {Quantum Mechanics}, + author = {Susskind, Leonard and Friedman, Art}, + date = {2014}, + series = {Theoretical Minimum}, + publisher = {Basic Books}, + location = {New York}, + url = {http://catdir.loc.gov/catdir/enhancements/fy1410/2014932200-b.html}, + urldate = {2025-01-06}, + abstract = {Explains the theory and associated mathematics of quantum mechanics, discussing topics ranging from uncertainty and time dependence to particle and wave states}, + isbn = {978-0-465-03667-7}, + langid = {english}, + pagetotal = {364}, + keywords = {/unread}, + annotation = {OCLC: 853310551} +} + +@book{susskindTheoreticalMinimumWhat2013, + title = {The Theoretical Minimum. {{What}} You Need to Know to Start Doing Physics}, + namea = {Susskind, Leonard and Hrabovsky, George and Friedman, Art}, + nameatype = {collaborator}, + date = {2013}, + publisher = {Basic Books}, + location = {New York, NY}, + url = {http://digitale-objekte.hbz-nrw.de/storage2/2014/04/06/file_4/5568736.pdf}, + urldate = {2025-01-06}, + abstract = {\&\#34;Beautifully clear explanations of famously\&\#39;difficult\&\#39; things\&\#34;-Wall Street Journal}, + isbn = {978-0-465-02811-5}, + langid = {english}, + pagetotal = {238}, + keywords = {/unread}, + annotation = {OCLC: 1075901036} +} + @online{suSVNetWhereEquivariance2022, title = {{{SVNet}}: {{Where SO}}(3) {{Equivariance Meets Binarization}} on {{Point Cloud Representation}}}, shorttitle = {{{SVNet}}}, @@ -17997,6 +18916,21 @@ Subject\_term\_id: databases;materials-science}, file = {/Users/wasmer/Nextcloud/Zotero/Tang et al_2023_Efficient hybrid density functional calculation by deep learning.pdf;/Users/wasmer/Zotero/storage/E995LV4K/2302.html} } +@online{tangImprovingDensityMatrix2024, + title = {Improving Density Matrix Electronic Structure Method by Deep Learning}, + author = {Tang, Zechen and Zou, Nianlong and Li, He and Wang, Yuxiang and Yuan, Zilong and Tao, Honggeng and Li, Yang and Chen, Zezhou and Zhao, Boheng and Sun, Minghui and Jiang, Hong and Duan, Wenhui and Xu, Yong}, + date = {2024-06-25}, + eprint = {2406.17561}, + eprinttype = {arXiv}, + doi = {10.48550/arXiv.2406.17561}, + url = {http://arxiv.org/abs/2406.17561}, + urldate = {2024-10-17}, + abstract = {The combination of deep learning and ab initio materials calculations is emerging as a trending frontier of materials science research, with deep-learning density functional theory (DFT) electronic structure being particularly promising. In this work, we introduce a neural-network method for modeling the DFT density matrix, a fundamental yet previously unexplored quantity in deep-learning electronic structure. Utilizing an advanced neural network framework that leverages the nearsightedness and equivariance properties of the density matrix, the method demonstrates high accuracy and excellent generalizability in multiple example studies, as well as capability to precisely predict charge density and reproduce other electronic structure properties. Given the pivotal role of the density matrix in DFT as well as other computational methods, the current research introduces a novel approach to the deep-learning study of electronic structure properties, opening up new opportunities for deep-learning enhanced computational materials study.}, + pubstate = {prepublished}, + keywords = {/unread,Condensed Matter - Materials Science,Physics - Computational Physics}, + file = {/Users/wasmer/Nextcloud/Zotero/Tang et al. - 2024 - Improving density matrix electronic structure method by deep learning.pdf;/Users/wasmer/Zotero/storage/F8YSU4Y7/2406.html} +} + @unpublished{tangiralaNeuralNetworkPredictiveModeling2022, title = {Neural-{{Network Predictive Modeling}} of {{Physical Properties}} in {{Binary Magnetic}} and {{Non-Magnetic Alloys}}}, author = {Tangirala, Sairam}, @@ -18023,6 +18957,38 @@ Ying-Wai Li\\ file = {/Users/wasmer/Zotero/storage/SGNGMK25/T32.html} } +@online{tangMultitaskLearningMolecular2024, + title = {Multi-Task Learning for Molecular Electronic Structure Approaching Coupled-Cluster Accuracy}, + author = {Tang, Hao and Xiao, Brian and He, Wenhao and Subasic, Pero and Harutyunyan, Avetik R. and Wang, Yao and Liu, Fang and Xu, Haowei and Li, Ju}, + date = {2024-06-24}, + eprint = {2405.12229}, + eprinttype = {arXiv}, + doi = {10.48550/arXiv.2405.12229}, + url = {http://arxiv.org/abs/2405.12229}, + urldate = {2024-10-17}, + abstract = {Machine learning (ML) plays an important role in quantum chemistry, providing fast-to-evaluate predictive models for various properties of molecules. However, most existing ML models for molecular electronic properties use density functional theory (DFT) databases as ground truth in training, and their prediction accuracy cannot surpass that of DFT. In this work, we developed a unified ML method for electronic structures of organic molecules using the gold-standard CCSD(T) calculations as training data. Tested on hydrocarbon molecules, our model outperforms DFT with the widely-used hybrid and double hybrid functionals in computational costs and prediction accuracy of various quantum chemical properties. As case studies, we apply the model to aromatic compounds and semiconducting polymers on both ground state and excited state properties, demonstrating its accuracy and generalization capability to complex systems that are hard to calculate using CCSD(T)-level methods.}, + pubstate = {prepublished}, + keywords = {{Computer Science - Computational Engineering, Finance, and Science},/unread,Computer Science - Artificial Intelligence,Condensed Matter - Materials Science,Physics - Chemical Physics,Physics - Computational Physics}, + file = {/Users/wasmer/Nextcloud/Zotero/Tang et al. - 2024 - Multi-task learning for molecular electronic structure approaching coupled-cluster accuracy.pdf;/Users/wasmer/Zotero/storage/VP7WW2XM/2405.html} +} + +@online{tavazzaApproachesUncertaintyQuantification2023, + title = {Approaches for {{Uncertainty Quantification}} of {{AI-predicted Material Properties}}: {{A Comparison}}}, + shorttitle = {Approaches for {{Uncertainty Quantification}} of {{AI-predicted Material Properties}}}, + author = {Tavazza, Francesca and Choudhary, Kamal and DeCost, Brian}, + date = {2023-10-19}, + eprint = {2310.13136}, + eprinttype = {arXiv}, + eprintclass = {cond-mat}, + doi = {10.48550/arXiv.2310.13136}, + url = {http://arxiv.org/abs/2310.13136}, + urldate = {2025-01-08}, + abstract = {The development of large databases of material properties, together with the availability of powerful computers, has allowed machine learning (ML) modeling to become a widely used tool for predicting material performances. While confidence intervals are commonly reported for such ML models, prediction intervals, i.e., the uncertainty on each prediction, are not as frequently available. Here, we investigate three easy-to-implement approaches to determine such individual uncertainty, comparing them across ten ML quantities spanning energetics, mechanical, electronic, optical, and spectral properties. Specifically, we focused on the Quantile approach, the direct machine learning of the prediction intervals and Ensemble methods.}, + pubstate = {prepublished}, + keywords = {AML,CFID,descriptors,ensemble learning,gradient boosting,JARVIS,JARVIS-DFT,LightGBM,ML,uncertainty quantification,with-code,with-data}, + file = {/Users/wasmer/Nextcloud/Zotero/Tavazza et al. - 2023 - Approaches for Uncertainty Quantification of AI-predicted Material Properties A Comparison.pdf;/Users/wasmer/Zotero/storage/AVVBFEPB/2310.html} +} + @article{tealeDFTExchangeSharing2022, title = {{{DFT Exchange}}: {{Sharing Perspectives}} on the {{Workhorse}} of {{Quantum Chemistry}} and {{Materials Science}}}, shorttitle = {{{DFT Exchange}}}, @@ -18472,6 +19438,21 @@ Subject\_term\_id: electronic-devices;electronic-properties-and-materials;ferroe file = {/Users/wasmer/Nextcloud/Zotero/Ueno et al. - 2024 - SpinMultiNet Neural Network Potential Incorporating Spin Degrees of Freedom with Multi-Task Learnin.pdf;/Users/wasmer/Zotero/storage/2T3H8XM6/2409.html} } +@online{uhrinMachineLearningHubbard2024, + title = {Machine Learning {{Hubbard}} Parameters with Equivariant Neural Networks}, + author = {Uhrin, Martin and Zadoks, Austin and Binci, Luca and Marzari, Nicola and Timrov, Iurii}, + date = {2024-06-04}, + eprint = {2406.02457}, + eprinttype = {arXiv}, + doi = {10.48550/arXiv.2406.02457}, + url = {http://arxiv.org/abs/2406.02457}, + urldate = {2024-10-17}, + abstract = {Density-functional theory with extended Hubbard functionals (DFT+\$U\$+\$V\$) provides a robust framework to accurately describe complex materials containing transition-metal or rare-earth elements. It does so by mitigating self-interaction errors inherent to semi-local functionals which are particularly pronounced in systems with partially-filled \$d\$ and \$f\$ electronic states. However, achieving accuracy in this approach hinges upon the accurate determination of the on-site \$U\$ and inter-site \$V\$ Hubbard parameters. In practice, these are obtained either by semi-empirical tuning, requiring prior knowledge, or, more correctly, by using predictive but expensive first-principles calculations. Here, we present a machine learning model based on equivariant neural networks which uses atomic occupation matrices as descriptors, directly capturing the electronic structure, local chemical environment, and oxidation states of the system at hand. We target here the prediction of Hubbard parameters computed self-consistently with iterative linear-response calculations, as implemented in density-functional perturbation theory (DFPT), and structural relaxations. Remarkably, when trained on data from 11 materials spanning various crystal structures and compositions, our model achieves mean absolute relative errors of 3\% and 5\% for Hubbard \$U\$ and \$V\$ parameters, respectively. By circumventing computationally expensive DFT or DFPT self-consistent protocols, our model significantly expedites the prediction of Hubbard parameters with negligible computational overhead, while approaching the accuracy of DFPT. Moreover, owing to its robust transferability, the model facilitates accelerated materials discovery and design via high-throughput calculations, with relevance for various technological applications.}, + pubstate = {prepublished}, + keywords = {/unread,Computer Science - Machine Learning,Condensed Matter - Materials Science,Physics - Chemical Physics}, + file = {/Users/wasmer/Nextcloud/Zotero/Uhrin et al. - 2024 - Machine learning Hubbard parameters with equivariant neural networks.pdf;/Users/wasmer/Zotero/storage/L7KIG7MJ/2406.html} +} + @article{uhrinWorkflowsAiiDAEngineering2021, title = {Workflows in {{AiiDA}}: {{Engineering}} a High-Throughput, Event-Based Engine for Robust and Modular Computational Workflows}, shorttitle = {Workflows in {{AiiDA}}}, @@ -18971,6 +19952,21 @@ Subject\_term\_id: electronic-devices;electronic-properties-and-materials;ferroe file = {/Users/wasmer/Nextcloud/Zotero/Wang et al_2024_DeepH-2.pdf;/Users/wasmer/Zotero/storage/WVSPJ8YJ/2401.html} } +@online{wangEfficientPredictionPotential2024, + title = {Efficient Prediction of Potential Energy Surface and Physical Properties with {{Kolmogorov-Arnold Networks}}}, + author = {Wang, Rui and Yu, Hongyu and Zhong, Yang and Xiang, Hongjun}, + date = {2024-09-05}, + eprint = {2409.03430}, + eprinttype = {arXiv}, + doi = {10.48550/arXiv.2409.03430}, + url = {http://arxiv.org/abs/2409.03430}, + urldate = {2024-10-17}, + abstract = {The application of machine learning methodologies for predicting properties within materials science has garnered significant attention. Among recent advancements, Kolmogorov-Arnold Networks (KANs) have emerged as a promising alternative to traditional Multi-Layer Perceptrons (MLPs). This study evaluates the impact of substituting MLPs with KANs within three established machine learning frameworks: Allegro, Neural Equivariant Interatomic Potentials (NequIP), and the Edge-Based Tensor Prediction Graph Neural Network (ETGNN). Our results demonstrate that the integration of KANs generally yields enhanced prediction accuracies. Specifically, replacing MLPs with KANs in the output blocks leads to notable improvements in accuracy and, in certain scenarios, also results in reduced training times. Furthermore, employing KANs exclusively in the output block facilitates faster inference and improved computational efficiency relative to utilizing KANs throughout the entire model. The selection of an optimal basis function for KANs is found to be contingent upon the particular problem at hand. Our results demonstrate the strong potential of KANs in enhancing machine learning potentials and material property predictions.}, + pubstate = {prepublished}, + keywords = {/unread,Condensed Matter - Materials Science,Physics - Computational Physics}, + file = {/Users/wasmer/Nextcloud/Zotero/Wang et al. - 2024 - Efficient prediction of potential energy surface and physical properties with Kolmogorov-Arnold Netw.pdf;/Users/wasmer/Zotero/storage/9W2ARXEN/2409.html} +} + @online{wangGeneratingMolecularConformer2023, title = {Generating {{Molecular Conformer Fields}}}, author = {Wang, Yuyang and Elhag, Ahmed A. and Jaitly, Navdeep and Susskind, Joshua M. and Bautista, Miguel Angel}, @@ -19255,6 +20251,21 @@ Subject\_term\_id: electronic-properties-and-materials;quantum-hall;superconduct file = {/Users/wasmer/Nextcloud/Zotero/Wang_Zhang_2017_Topological states of condensed matter.pdf} } +@online{wangUniversalMaterialsModel2024, + title = {Universal Materials Model of Deep-Learning Density Functional Theory {{Hamiltonian}}}, + author = {Wang, Yuxiang and Li, Yang and Tang, Zechen and Li, He and Yuan, Zilong and Tao, Honggeng and Zou, Nianlong and Bao, Ting and Liang, Xinghao and Chen, Zezhou and Xu, Shanghua and Bian, Ce and Xu, Zhiming and Wang, Chong and Si, Chen and Duan, Wenhui and Xu, Yong}, + date = {2024-06-15}, + eprint = {2406.10536}, + eprinttype = {arXiv}, + doi = {10.48550/arXiv.2406.10536}, + url = {http://arxiv.org/abs/2406.10536}, + urldate = {2024-10-17}, + abstract = {Realizing large materials models has emerged as a critical endeavor for materials research in the new era of artificial intelligence, but how to achieve this fantastic and challenging objective remains elusive. Here, we propose a feasible pathway to address this paramount pursuit by developing universal materials models of deep-learning density functional theory Hamiltonian (DeepH), enabling computational modeling of the complicated structure-property relationship of materials in general. By constructing a large materials database and substantially improving the DeepH method, we obtain a universal materials model of DeepH capable of handling diverse elemental compositions and material structures, achieving remarkable accuracy in predicting material properties. We further showcase a promising application of fine-tuning universal materials models for enhancing specific materials models. This work not only demonstrates the concept of DeepH's universal materials model but also lays the groundwork for developing large materials models, opening up significant opportunities for advancing artificial intelligence-driven materials discovery.}, + pubstate = {prepublished}, + keywords = {/unread,Condensed Matter - Materials Science,Physics - Computational Physics}, + file = {/Users/wasmer/Nextcloud/Zotero/Wang et al. - 2024 - Universal materials model of deep-learning density functional theory Hamiltonian.pdf;/Users/wasmer/Zotero/storage/B5MPYXLW/2406.html} +} + @article{wangWillAnyCrap2020, title = {Will {{Any Crap We Put}} into {{Graphene Increase Its Electrocatalytic Effect}}?}, author = {Wang, Lu and Sofer, Zdenek and Pumera, Martin}, @@ -19413,6 +20424,40 @@ Subject\_term\_id: electronic-properties-and-materials;quantum-hall;superconduct file = {/Users/wasmer/Zotero/storage/NTW3FHQ6/1020053.html} } +@unpublished{wasmerPhysicsPhysicsAIHybrids2024, + type = {Conference talk}, + title = {From Physics to Physics-{{AI}} Hybrids in Quantum Materials Simulation}, + author = {Wasmer, Johannes}, + namea = {Wasmer, Johannes and Rüssmann, Philipp and Assent, Ira and Blügel, Stefan}, + nameatype = {collaborator}, + date = {2024}, + publisher = {Forschungzentrum Jülich}, + url = {https://juser.fz-juelich.de/record/1031808}, + urldate = {2024-10-18}, + abstract = {No abstract.}, + eventtitle = {{{HDS-LEE Retreat}} 2024}, + langid = {english}, + venue = {Overbach}, + keywords = {/unread,all-electron,AML,DFT,energy challenge,for introductions,FZJ,ML,ML-DFT,PGI,PGI-1/IAS-1} +} + +@unpublished{wasmerPredictionMagneticExchange2024, + type = {Conference talk}, + title = {Prediction of the Magnetic Exchange Interaction in Doped Topological Insulators}, + author = {Wasmer, Johannes and Mozumder, Rubel}, + namea = {Wasmer, Johannes and Antognini Silva, David and Rüssmann, Philipp and Blügel, Stefan}, + nameatype = {collaborator}, + date = {2024}, + publisher = {Forschungzentrum Jülich}, + url = {https://juser.fz-juelich.de/record/1031807}, + urldate = {2024-10-18}, + abstract = {We present a benchmark study of surrogate models for impurities embedded into crystalline solids. Using the Korringa-Kohn-Rostoker Green Function method and the AiiDA workflow engine [1], we have built a database of magnetic transition metal impurity dimers embedded in the topological insulator Bi2Te3. We predict isotropic exchange interaction of the impurity dimer in the classical Heisenberg model with machine learning and then use these surrogates as input for spin dynamics calculations to find the magnetic ground state of the material [2]. The study compares various recent E(3)-equivariant models such as ACE and MACE [3] in terms of performance and reproducible end-to-end workflows.References.[1] P. Rüßmann, F. Bertoldo, S. Blügel, npj. Comput. Mater., 7, 13 (2021)[2] P. Rüßmann, J. Ribas Sobreviela, M. Sallermann, M. Hoffmann, F. Rhiem, S. Blügel, Front. Mater., 9, (2022)[3] Batatia, I., Kovács, D. P., Simm, G. N. C., Ortner, C. \& Csányi, G. MACE: Higher Order Equivariant Message Passing Neural Networks for Fast and Accurate Force Fields. Preprint (2022).}, + eventtitle = {{{CECAM Workshop Machine Learning}} of {{First Principles Observables}}}, + langid = {english}, + venue = {Berlin}, + keywords = {/unread,conference contribution,FZJ,Jij,ML,PGI,PGI-1/IAS-1} +} + @online{weiGraphLearningIts2023, title = {Graph {{Learning}} and {{Its Applications}}: {{A Holistic Survey}}}, shorttitle = {Graph {{Learning}} and {{Its Applications}}}, @@ -19459,6 +20504,22 @@ Subject\_term\_id: electronic-properties-and-materials;quantum-hall;superconduct file = {/Users/wasmer/Nextcloud/Zotero/Weiler_Cesa_2021_General $E(2)$-Equivariant Steerable CNNs.pdf;/Users/wasmer/Zotero/storage/49VD5PUN/1911.html} } +@book{weinbergLecturesQuantumMechanics2015, + title = {Lectures on {{Quantum Mechanics}}}, + author = {Weinberg, Steven}, + date = {2015}, + edition = {2}, + publisher = {Cambridge University Press}, + location = {Cambridge}, + doi = {10.1017/CBO9781316276105}, + url = {https://www.cambridge.org/core/books/lectures-on-quantum-mechanics/F739B9577D2473995024FA5E9ABA9B6C}, + urldate = {2025-01-05}, + abstract = {Nobel Laureate Steven Weinberg combines exceptional physical insight with his gift for clear exposition, to provide a concise introduction to modern quantum mechanics, in this fully updated second edition of his successful textbook. Now including six brand new sections covering key topics such as the rigid rotator and quantum key distribution, as well as major additions to existing topics throughout, this revised edition is ideally suited to a one-year graduate course or as a reference for researchers. Beginning with a review of the history of quantum mechanics and an account of classic solutions of the Schrödinger equation, before quantum mechanics is developed in a modern Hilbert space approach, Weinberg uses his remarkable expertise to elucidate topics such as Bloch waves and band structure, the Wigner–Eckart theorem, magic numbers, isospin symmetry, and general scattering theory. Problems are included at the ends of chapters, with solutions available for instructors at www.cambridge.org/9781107111660.}, + isbn = {978-1-107-11166-0}, + keywords = {/unread,learning material,Quantum Physics,textbook}, + file = {/Users/wasmer/Zotero/storage/3P7DPKPT/F739B9577D2473995024FA5E9ABA9B6C.html} +} + @article{weinertSolutionPoissonEquation1981, title = {Solution of {{Poisson}}’s Equation: {{Beyond Ewald}}â€type Methods}, shorttitle = {Solution of {{Poisson}}’s Equation}, @@ -19752,6 +20813,23 @@ Subject\_term\_id: publication-characteristics;research-data}, file = {/Users/wasmer/Nextcloud/Zotero/Willatt et al_2018_Feature optimization for atomistic machine learning yields a data-driven.pdf;/Users/wasmer/Zotero/storage/ZY2VC9JE/C8CP05921G.html} } +@online{winesCHIPSFFEvaluatingUniversal2024, + title = {{{CHIPS-FF}}: {{Evaluating Universal Machine Learning Force Fields}} for {{Material Properties}}}, + shorttitle = {{{CHIPS-FF}}}, + author = {Wines, Daniel and Choudhary, Kamal}, + date = {2024-12-21}, + eprint = {2412.10516}, + eprinttype = {arXiv}, + eprintclass = {cond-mat}, + doi = {10.48550/arXiv.2412.10516}, + url = {http://arxiv.org/abs/2412.10516}, + urldate = {2025-01-08}, + abstract = {In this work, we introduce CHIPS-FF (Computational High-Performance Infrastructure for Predictive Simulation-based Force Fields), a universal, open-source benchmarking platform for machine learning force fields (MLFFs). This platform provides robust evaluation beyond conventional metrics such as energy, focusing on complex properties including elastic constants, phonon spectra, defect formation energies, surface energies, and interfacial and amorphous phase properties. Utilizing 13 graph-based MLFF models including ALIGNN-FF, CHGNet, MatGL, MACE, SevenNet, ORB and OMat24, the CHIPS-FF workflow integrates the Atomic Simulation Environment (ASE) with JARVIS-Tools to facilitate automated high-throughput simulations. Our framework is tested on a set of 104 materials, including metals, semiconductors and insulators representative of those used in semiconductor components, with each MLFF evaluated for convergence, accuracy, and computational cost. Additionally, we evaluate the force-prediction accuracy of these models for close to 2 million atomic structures. By offering a streamlined, flexible benchmarking infrastructure, CHIPS-FF aims to guide the development and deployment of MLFFs for real-world semiconductor applications, bridging the gap between quantum mechanical simulations and large-scale device modeling.}, + pubstate = {prepublished}, + keywords = {Alexandria database,ALIGNN,AML,benchmarking,CHGNet,defects,Equiformer,foundation models,JARVIS,leaderboard,MACE,materials,materials project,MatGL,MD,ML,model comparison,MOSFET,OMat24,Orbital Materials,prediction of energy,prediction of mechanical properties,prediction of phonon dispersion,prediction of structure,prediction of thermal properties,Semiconductors,SevenNet,universal potential,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Wines and Choudhary - 2024 - CHIPS-FF Evaluating Universal Machine Learning Force Fields for Material Properties.pdf;/Users/wasmer/Zotero/storage/L2PXJQH5/2412.html} +} + @unpublished{winterUnsupervisedLearningGroup2022, title = {Unsupervised {{Learning}} of {{Group Invariant}} and {{Equivariant Representations}}}, author = {Winter, Robin and Bertolini, Marco and Le, Tuan and Noé, Frank and Clevert, Djork-Arné}, @@ -20312,6 +21390,21 @@ Subject\_term\_id: computational-methods;corrosion;mathematics-and-computing;the file = {/Users/wasmer/Nextcloud/Zotero/Yao et al_2018_The TensorMol-0.pdf} } +@online{yuanDeepLearningDensity2024, + title = {Deep Learning Density Functional Theory {{Hamiltonian}} in Real Space}, + author = {Yuan, Zilong and Tang, Zechen and Tao, Honggeng and Gong, Xiaoxun and Chen, Zezhou and Wang, Yuxiang and Li, He and Li, Yang and Xu, Zhiming and Sun, Minghui and Zhao, Boheng and Wang, Chong and Duan, Wenhui and Xu, Yong}, + date = {2024-07-19}, + eprint = {2407.14379}, + eprinttype = {arXiv}, + doi = {10.48550/arXiv.2407.14379}, + url = {http://arxiv.org/abs/2407.14379}, + urldate = {2024-10-17}, + abstract = {Deep learning electronic structures from ab initio calculations holds great potential to revolutionize computational materials studies. While existing methods proved success in deep-learning density functional theory (DFT) Hamiltonian matrices, they are limited to DFT programs using localized atomic-like bases and heavily depend on the form of the bases. Here, we propose the DeepH-r method for deep-learning DFT Hamiltonians in real space, facilitating the prediction of DFT Hamiltonian in a basis-independent manner. An equivariant neural network architecture for modeling the real-space DFT potential is developed, targeting a more fundamental quantity in DFT. The real-space potential exhibits simplified principles of equivariance and enhanced nearsightedness, further boosting the performance of deep learning. When applied to evaluate the Hamiltonian matrix, this method significantly improved in accuracy, as exemplified in multiple case studies. Given the abundance of data in the real-space potential, this work may pave a novel pathway for establishing a ``large materials model" with increased accuracy.}, + pubstate = {prepublished}, + keywords = {/unread,Condensed Matter - Materials Science,Physics - Computational Physics}, + file = {/Users/wasmer/Nextcloud/Zotero/Yuan et al. - 2024 - Deep learning density functional theory Hamiltonian in real space.pdf;/Users/wasmer/Zotero/storage/RFCYBR4Z/2407.html} +} + @online{yuanEquivariantNeuralNetwork2024, title = {Equivariant {{Neural Network Force Fields}} for {{Magnetic Materials}}}, author = {Yuan, Zilong and Xu, Zhiming and Li, He and Cheng, Xinle and Tao, Honggeng and Tang, Zechen and Zhou, Zhiyuan and Duan, Wenhui and Xu, Yong}, @@ -20654,7 +21747,7 @@ Subject\_term\_id: computational-methods;corrosion;mathematics-and-computing;the date = {2024}, location = {Forschungszentrum Jülich}, howpublished = {Unpublished notes}, - keywords = {/unread} + file = {/Users/wasmer/Nextcloud/Zotero/Zeller - 2024 - Yukawa potential in KKRnano.pdf} } @book{zengQuantumInformationMeets2019, @@ -21012,13 +22105,30 @@ Subject\_term\_id: computational-methods;corrosion;mathematics-and-computing;the file = {/Users/wasmer/Nextcloud/Zotero/Zhang_Ling_2018_A strategy to apply machine learning to small datasets in materials science.pdf;/Users/wasmer/Zotero/storage/PEGZREYC/s41524-018-0081-z.html} } +@online{zhangTheoryUnderstandingArtificial2024, + title = {A Theory of Understanding for Artificial Intelligence: Composability, Catalysts, and Learning}, + shorttitle = {A Theory of Understanding for Artificial Intelligence}, + author = {Zhang, Zijian and Aronowitz, Sara and Aspuru-Guzik, Alán}, + date = {2024-08-16}, + eprint = {2408.08463}, + eprinttype = {arXiv}, + eprintclass = {cs}, + doi = {10.48550/arXiv.2408.08463}, + url = {http://arxiv.org/abs/2408.08463}, + urldate = {2025-01-08}, + abstract = {Understanding is a crucial yet elusive concept in artificial intelligence (AI). This work proposes a framework for analyzing understanding based on the notion of composability. Given any subject (e.g., a person or an AI), we suggest characterizing its understanding of an object in terms of its ability to process (compose) relevant inputs into satisfactory outputs from the perspective of a verifier. This highly universal framework can readily apply to non-human subjects, such as AIs, non-human animals, and institutions. Further, we propose methods for analyzing the inputs that enhance output quality in compositions, which we call catalysts. We show how the structure of a subject can be revealed by analyzing its components that act as catalysts and argue that a subject's learning ability can be regarded as its ability to compose inputs into its inner catalysts. Finally we examine the importance of learning ability for AIs to attain general intelligence. Our analysis indicates that models capable of generating outputs that can function as their own catalysts, such as language models, establish a foundation for potentially overcoming existing limitations in AI understanding.}, + pubstate = {prepublished}, + keywords = {/unread,agent,AI,AI scientist,AI4Science,LLM,philosophy of science,scientific understanding}, + file = {/Users/wasmer/Nextcloud/Zotero/Zhang et al. - 2024 - A theory of understanding for artificial intelligence composability, catalysts, and learning.pdf;/Users/wasmer/Zotero/storage/V4SWA2WG/2408.html} +} + @article{zhangTopologicalInsulatorsPerspective2013, title = {Topological Insulators from the Perspective of First-Principles Calculations}, author = {Zhang, Haijun and Zhang, Shou-Cheng}, date = {2013}, journaltitle = {physica status solidi (RRL) – Rapid Research Letters}, volume = {7}, - number = {1-2}, + number = {1--2}, pages = {72--81}, issn = {1862-6270}, doi = {10.1002/pssr.201206414}, @@ -21042,6 +22152,23 @@ Subject\_term\_id: computational-methods;corrosion;mathematics-and-computing;the file = {/Users/wasmer/Zotero/storage/AQGN5964/J00.html} } +@article{zhaoDeepMind212024, + title = {Deep {{Mind}} 21 Functional Does Not Extrapolate to Transition Metal Chemistry}, + author = {Zhao, Heng and Gould, Tim and Vuckovic, Stefan}, + date = {2024}, + journaltitle = {Physical Chemistry Chemical Physics}, + volume = {26}, + number = {16}, + pages = {12289--12298}, + publisher = {Royal Society of Chemistry}, + doi = {10.1039/D4CP00878B}, + url = {https://pubs.rsc.org/en/content/articlelanding/2024/cp/d4cp00878b}, + urldate = {2024-10-22}, + langid = {english}, + keywords = {/unread,AML,B3LYP,criticism,DFA,DFT,DM21,generalization,ML,ML-DFA,ML-DFT,transition metal complex,transition metals}, + file = {/Users/wasmer/Nextcloud/Zotero/Zhao et al. - 2024 - Deep Mind 21 functional does not extrapolate to transition metal chemistry.pdf} +} + @article{zhaoQuantumOscillationsIrondoped2019, title = {Quantum Oscillations in Iron-Doped Single Crystals of the Topological Insulator \$\textbackslash mathrm\{\vphantom\}{{S}}\vphantom\{\}\{\textbackslash mathrm\{b\}\}\_\{2\}\textbackslash mathrm\{\vphantom\}{{T}}\vphantom\{\}\{\textbackslash mathrm\{e\}\}\_\{3\}\$}, author = {Zhao, Weiyao and Cortie, David and Chen, Lei and Li, Zhi and Yue, Zengji and Wang, Xiaolin}, @@ -21227,7 +22354,7 @@ Subject\_term\_id: computational-methods;corrosion;mathematics-and-computing;the url = {https://www.sciencedirect.com/science/article/pii/S2666651021000012}, urldate = {2023-11-14}, abstract = {Lots of learning tasks require dealing with graph data which contains rich relation information among elements. Modeling physics systems, learning molecular fingerprints, predicting protein interface, and classifying diseases demand a model to learn from graph inputs. In other domains such as learning from non-structural data like texts and images, reasoning on extracted structures (like the dependency trees of sentences and the scene graphs of images) is an important research topic which also needs graph reasoning models. Graph neural networks (GNNs) are neural models that capture the dependence of graphs via message passing between the nodes of graphs. In recent years, variants of GNNs such as graph convolutional network (GCN), graph attention network (GAT), graph recurrent network (GRN) have demonstrated ground-breaking performances on many deep learning tasks. In this survey, we propose a general design pipeline for GNN models and discuss the variants of each component, systematically categorize the applications, and propose four open problems for future research.}, - keywords = {General ML,GNN,graph,graph ML,ML,review,review-of-graph-ML}, + keywords = {General ML,GNN,graph,graph ML,ML,review,review-of-GNN,review-of-graph-ML}, file = {/Users/wasmer/Nextcloud/Zotero/Zhou et al_2020_Graph neural networks.pdf;/Users/wasmer/Zotero/storage/YML8J4GK/S2666651021000012.html} }