Skip to content
Snippets Groups Projects
Commit 1556434c authored by Johannes Wasmer's avatar Johannes Wasmer
Browse files

update bibliography

parent 335285a3
No related branches found
No related tags found
No related merge requests found
......@@ -300,6 +300,23 @@
file = {/Users/wasmer/Zotero/storage/KA8R9BT2/Aldarawsheh et al. - 2024 - Current-driven dynamics of antiferromagnetic skyrmions from skyrmion Hall effects to hybrid inter-s.pdf}
}
 
@article{alghadeerHighlyAccurateMachine2021,
title = {Highly Accurate Machine Learning Model for Kinetic Energy Density Functional},
author = {Alghadeer, Mohammed and Al-Aswad, Abdulaziz and Alharbi, Fahhad H},
date = {2021-10-29},
journaltitle = {Physics Letters A},
shortjournal = {Physics Letters A},
volume = {414},
pages = {127621},
issn = {0375-9601},
doi = {10.1016/j.physleta.2021.127621},
url = {https://www.sciencedirect.com/science/article/pii/S0375960121004850},
urldate = {2024-10-03},
abstract = {Inspired by the remarkable ongoing progress of data-driven approaches, a very accurate predictive model is developed to estimate one-dimensional kinetic energy density functionals (KEDF) using Machine Learning (ML). Starting from possible analytical forms of kinetic energy density and by utilizing a variety of solvable models, a simple – yet highly – accurate linear regression model is statistically trained to estimate the kinetic energy as functionals of the density. The mean relative accuracy for even a small number of randomly generated potentials is found to be better than the standard KEDF (Thomas-Fermi (TF) and von Weizsäcker (vW)) by several orders of magnitudes. As more different potentials of model problems are mixed, the coefficients of the linear model significantly approach the known values of Thomas-Fermi and von Weizsäcker, suggesting the reliability of the statistical training approach. This work can provide an important step toward more accurate large-scale orbital free density functional theory (OF-DFT) calculations.},
keywords = {AML,Kinetic energy density functionals,ML,ML-DFA,ML-DFT,OF-DFT},
file = {/Users/wasmer/Nextcloud/Zotero/Alghadeer et al. - 2021 - Highly accurate machine learning model for kinetic energy density functional.pdf;/Users/wasmer/Zotero/storage/X6Y8KKF9/S0375960121004850.html}
}
@article{allenMachineLearningMaterial2022,
title = {Machine Learning of Material Properties: {{Predictive}} and Interpretable Multilinear Models},
shorttitle = {Machine Learning of Material Properties},
......@@ -462,7 +479,7 @@
abstract = {There is no doubt that the economic and computing activity related to the digital sector will ramp up faster in the present decade than in the last. Moreover, computing infrastructure is one of three major drivers of new electricity use alongsidefuture and current hydrogen production and battery electric vehicles charging. Here is proposed a trajectory in this decade for CO2 emissions associated with this digitalization and its share of electricity and energy generation as a whole. The roadmap for major sources of primary energy and electricity and associated CO2 emissions areprojected and connected to the probable power use of the digital industry. The truncation error for manufacturing related CO2 emissions may be 0.8 Gt or more indicating a larger share of manufacturing and absolute digital CO2 emissions.While remaining at a moderate share of global CO2 emissions (4-5\%), the resulting digital CO2 emissions will likely rise from 2020 to 2030. The opposite may only happen if the electricity used to run especially data centers and production plants is produced locally (next to the data centers and plants) from renewable sources and data intensity metrics grow slower than expected.},
langid = {english},
keywords = {ecological footprint,economics,energy consumption,energy efficiency,environmental impact,for introductions,ICT sector,low-power electronics,world energy consumption},
file = {/Users/wasmer/Nextcloud/Zotero/Andrae_2020_Hypotheses for Primary Energy Use, Electricity Use and CΟ2 Emissions of Global.pdf}
file = {/Users/wasmer/Nextcloud/Zotero/Andrae_2020_Hypotheses for Primary Energy Use, Electricity Use and CO2 Emissions of Global.pdf}
}
 
@article{andrejevicMachineLearningSpectralIndicators2022,
......@@ -766,7 +783,6 @@ Subject\_term\_id: cheminformatics;computational-models;computational-science},
url = {https://pubs.rsc.org/en/content/articlelanding/2024/dd/d3dd00213f},
urldate = {2024-09-03},
langid = {english},
keywords = {/unread},
file = {/Users/wasmer/Nextcloud/Zotero/Back et al. - 2024 - Accelerated chemical science with AI.pdf}
}
 
......@@ -3787,6 +3803,21 @@ Subject\_term\_id: computational-methods;electronic-structure;theory-and-computa
file = {/Users/wasmer/Nextcloud/Zotero/Crisostomo et al_2023_Seven Useful Questions in Density Functional Theory.pdf;/Users/wasmer/Zotero/storage/45PQRMPB/2207.html}
}
 
@article{csanyiSoftwareAtomisticMachine2024,
title = {Software for {{Atomistic Machine Learning}}},
editor = {Csányi, Gábor and Rupp, Matthias and Kucukbenli, Emile and Manolopoulos, David E. and Ceriotti, Michele and Michaelides, Angelos and Sherill, David},
date = {2024-08-09},
journaltitle = {AIP Publishing},
volume = {161},
number = {060401},
url = {https://pubs.aip.org/collection/1349/Software-for-Atomistic-Machine-Learning},
urldate = {2024-09-30},
abstract = {The application of machine-learning techniques to atomistic modeling of physics, chemistry and materials science is blooming, and machine learning is becoming a},
langid = {english},
keywords = {/unread,AML,collection,ML,review-of-AML,software,special issue},
file = {/Users/wasmer/Zotero/storage/V88JWUZY/Software-for-Atomistic-Machine-Learning.html}
}
@article{cuevas-zuviriaAnalyticalModelElectron2020,
title = {Analytical {{Model}} of {{Electron Density}} and {{Its Machine Learning Inference}}},
author = {Cuevas-Zuviría, Bruno and Pacios, Luis F.},
......@@ -5418,7 +5449,7 @@ Junqi Yin\\
url = {https://pubs.rsc.org/en/content/articlelanding/2019/sc/c9sc02696g},
urldate = {2021-10-16},
langid = {english},
keywords = {library,ML,ML-DFT,ML-ESM,prediction of electron density,with-code},
keywords = {library,ML,ML-DFT,ML-ESM,prediction of electron density,Resolution of the identity,with-code},
file = {/Users/wasmer/Nextcloud/Zotero/Fabrizio et al_2019_Electron density learning of non-covalent systems.pdf}
}
 
......@@ -6244,6 +6275,22 @@ Subject\_term: Quantum physics, Publishing, Peer review},
file = {/Users/wasmer/Zotero/storage/D55696WS/acs.chemmater.html}
}
 
@online{fuRecipeChargeDensity2024,
title = {A {{Recipe}} for {{Charge Density Prediction}}},
author = {Fu, Xiang and Rosen, Andrew and Bystrom, Kyle and Wang, Rui and Musaelian, Albert and Kozinsky, Boris and Smidt, Tess and Jaakkola, Tommi},
date = {2024-05-29},
eprint = {2405.19276},
eprinttype = {arXiv},
eprintclass = {physics},
doi = {10.48550/arXiv.2405.19276},
url = {http://arxiv.org/abs/2405.19276},
urldate = {2024-09-24},
abstract = {In density functional theory, charge density is the core attribute of atomic systems from which all chemical properties can be derived. Machine learning methods are promising in significantly accelerating charge density prediction, yet existing approaches either lack accuracy or scalability. We propose a recipe that can achieve both. In particular, we identify three key ingredients: (1) representing the charge density with atomic and virtual orbitals (spherical fields centered at atom/virtual coordinates); (2) using expressive and learnable orbital basis sets (basis function for the spherical fields); and (3) using high-capacity equivariant neural network architecture. Our method achieves state-of-the-art accuracy while being more than an order of magnitude faster than existing methods. Furthermore, our method enables flexible efficiency-accuracy trade-offs by adjusting the model/basis sizes.},
pubstate = {prepublished},
keywords = {AML,basis set,equivariant,eSCN,ML,ML-Density,ML-DFT,ML-ESM,molecules,prediction of electron density,QM9},
file = {/Users/wasmer/Nextcloud/Zotero/Fu et al. - 2024 - A Recipe for Charge Density Prediction.pdf;/Users/wasmer/Zotero/storage/YSSBDXWN/2405.html}
}
@article{furnessAccurateNumericallyEfficient2020,
title = {Accurate and {{Numerically Efficient r2SCAN Meta-Generalized Gradient Approximation}}},
author = {Furness, James W. and Kaplan, Aaron D. and Ning, Jinliang and Perdew, John P. and Sun, Jianwei},
......@@ -7701,6 +7748,24 @@ Subject\_term\_id: condensed-matter-physics;theory-and-computation},
file = {/Users/wasmer/Nextcloud/Zotero/Hazra et al_2024_Predicting the One-Particle Density Matrix with Machine Learning2.pdf}
}
 
@incollection{head-gordonTensorsElectronicStructure2000,
title = {Tensors in {{Electronic Structure Theory}}: {{Basic Concepts}} and {{Applications}} to {{Electron Correlation Models}}},
booktitle = {Modern {{Methods}} and {{Algorithms}} of {{Quantum Chemistry}}},
author = {Head-Gordon, Martin and Lee, Michael and family=Voorhis, given=Troy, prefix=van, useprefix=false and Maslen, Paul and family=Berkeley, given=University of California, prefix=at, useprefix=false},
date = {2000},
series = {{{NIC}} Series},
volume = {3},
number = {FZJ-2014-02065},
publisher = {John von Neumann Institute for Computing},
location = {Jülich},
url = {http://hdl.handle.net/2128/6058},
urldate = {2024-10-03},
abstract = {Head-Gordon, Martin; Lee, Michael; Maslen, Paul; Voorhis, Troy van; Berkeley, University of California at},
isbn = {978-3-00-005834-9},
langid = {english},
file = {/Users/wasmer/Nextcloud/Zotero/Head-Gordon et al. - 2000 - Tensors in Electronic Structure TheoryBasic Concepts and Applications to Electron Correlation Model.pdf;/Users/wasmer/Zotero/storage/2CCUFS87/152463.html}
}
@article{hegdeMachinelearnedApproximationsDensity2017,
title = {Machine-Learned Approximations to {{Density Functional Theory Hamiltonians}}},
author = {Hegde, Ganesh and Bowen, R. Chris},
......@@ -10061,6 +10126,24 @@ Subject\_term\_id: computational-methods;density-functional-theory;method-develo
file = {/Users/wasmer/Nextcloud/Zotero/Ko et al_2021_General-Purpose Machine Learning Potentials Capturing Nonlocal Charge Transfer2.pdf;/Users/wasmer/Zotero/storage/RLPWSGFJ/acs.accounts.html}
}
 
@article{kohnNobelLectureElectronic1999,
title = {Nobel {{Lecture}}: {{Electronic}} Structure of Matter---Wave Functions and Density Functionals},
shorttitle = {Nobel {{Lecture}}},
author = {Kohn, W.},
date = {1999-10-01},
journaltitle = {Reviews of Modern Physics},
shortjournal = {Rev. Mod. Phys.},
volume = {71},
number = {5},
pages = {1253--1266},
publisher = {American Physical Society},
doi = {10.1103/RevModPhys.71.1253},
url = {https://link.aps.org/doi/10.1103/RevModPhys.71.1253},
urldate = {2024-10-03},
keywords = {DFT,DFT theory,history of science,KS-DFT,Nobel prize,physics},
file = {/Users/wasmer/Nextcloud/Zotero/Kohn - 1999 - Nobel Lecture Electronic structure of matter---wave functions and density functionals.pdf;/Users/wasmer/Zotero/storage/QERFJ3F5/RevModPhys.71.html}
}
@article{kohnSelfConsistentEquationsIncluding1965,
title = {Self-{{Consistent Equations Including Exchange}} and {{Correlation Effects}}},
author = {Kohn, W.},
......@@ -11467,6 +11550,7 @@ Subject\_term\_id: computational-methods;density-functional-theory;method-develo
urldate = {2023-09-24},
abstract = {Kohn–Sham density functional theory (DFT) is the most widely used electronic structure theory. Despite significant progress in the past few decades, the numerical solution of Kohn–Sham DFT problems remains challenging, especially for large-scale systems. In this paper we review the basics as well as state-of-the-art numerical methods, and focus on the unique numerical challenges of DFT.},
langid = {english},
keywords = {DFT,DFT numerics,DFT theory,KS-DFT,numerical analysis,numerical methods,physics,Resolution of the identity},
file = {/Users/wasmer/Zotero/storage/9Z7XGB5F/Lin et al. - 2019 - Numerical methods for Kohn–Sham density functional.pdf}
}
 
......@@ -11834,6 +11918,24 @@ Subject\_term\_id: computational-methods;density-functional-theory;method-develo
file = {/Users/wasmer/Nextcloud/Zotero/Lopanitsyna et al_2022_Modeling high-entropy transition-metal alloys with alchemical compression.pdf;/Users/wasmer/Nextcloud/Zotero/Lopanitsyna et al_2022_Modeling high-entropy transition-metal alloys with alchemical compression2.pdf;/Users/wasmer/Zotero/storage/QNGQ9AQD/2212.html}
}
 
@article{louNeuralWaveFunctions2024,
title = {Neural {{Wave Functions}} for {{Superfluids}}},
author = {Lou, Wan Tong and Sutterud, Halvard and Cassella, Gino and Foulkes, W. M. C. and Knolle, Johannes and Pfau, David and Spencer, James S.},
date = {2024-05-22},
journaltitle = {Physical Review X},
shortjournal = {Phys. Rev. X},
volume = {14},
number = {2},
pages = {021030},
publisher = {American Physical Society},
doi = {10.1103/PhysRevX.14.021030},
url = {https://link.aps.org/doi/10.1103/PhysRevX.14.021030},
urldate = {2024-10-03},
abstract = {Understanding superfluidity remains a major goal of condensed matter physics. Here, we tackle this challenge utilizing the recently developed fermionic neural network (FermiNet) wave function Ansatz [D. Pfau et al., Phys. Rev. Res. 2, 033429 (2020).] for variational Monte Carlo calculations. We study the unitary Fermi gas, a system with strong, short-range, two-body interactions known to possess a superfluid ground state but difficult to describe quantitatively. We demonstrate key limitations of the FermiNet Ansatz in studying the unitary Fermi gas and propose a simple modification based on the idea of an antisymmetric geminal power singlet (AGPs) wave function. The new AGPs FermiNet outperforms the original FermiNet significantly in paired systems, giving results which are more accurate than fixed-node diffusion Monte Carlo and are consistent with experiment. We prove mathematically that the new Ansatz, which differs from the original Ansatz only by the method of antisymmetrization, is a strict generalization of the original FermiNet architecture, despite the use of fewer parameters. Our approach shares several advantages with the original FermiNet: The use of a neural network removes the need for an underlying basis set; sand the flexibility of the network yields extremely accurate results within a variational quantum Monte Carlo framework that provides access to unbiased estimates of arbitrary ground-state expectation values. We discuss how the method can be extended to study other superfluid.},
keywords = {DeepMind,Fermi gas,FermiNet,ML-ESM,ML-QMBP,NIC,quantum materials,superfluidity,VMC},
file = {/Users/wasmer/Nextcloud/Zotero/Lou et al. - 2024 - Neural Wave Functions for Superfluids.pdf;/Users/wasmer/Zotero/storage/JG3MF88T/PhysRevX.14.html}
}
@thesis{lounisTheoryMagneticTransition2007,
title = {Theory of {{Magnetic Transition Metal Nanoclusters}} on {{Surfaces}}},
author = {Lounis, Samir},
......@@ -13449,6 +13551,23 @@ Subject\_term\_id: magnetic-properties-and-materials},
file = {/Users/wasmer/Nextcloud/Zotero/Nagaosa_Tokura_2013_Topological properties and dynamics of magnetic skyrmions.pdf}
}
 
@article{nakajimaCrystalStructureBi2Te3xSex1963,
title = {The Crystal Structure of {{Bi2Te3}}−{{xSex}}},
author = {Nakajima, Seizo},
date = {1963-03},
journaltitle = {Journal of Physics and Chemistry of Solids},
shortjournal = {Journal of Physics and Chemistry of Solids},
volume = {24},
number = {3},
pages = {479--485},
issn = {00223697},
doi = {10.1016/0022-3697(63)90207-5},
url = {https://linkinghub.elsevier.com/retrieve/pii/0022369763902075},
urldate = {2024-09-27},
langid = {english},
keywords = {/unread}
}
@article{nakataLargeScaleLinear2020,
title = {Large Scale and Linear Scaling {{DFT}} with the {{CONQUEST}} Code},
author = {Nakata, Ayako and Baker, Jack S. and Mujahed, Shereif Y. and Poulton, Jack T. L. and Arapan, Sergiu and Lin, Jianbo and Raza, Zamaan and Yadav, Sushma and Truflandier, Lionel and Miyazaki, Tsuyoshi and Bowler, David R.},
......@@ -14335,7 +14454,7 @@ Subject\_term\_id: magnetic-properties-and-materials},
file = {/Users/wasmer/Nextcloud/Zotero/Pasini et al_2023_Transferable prediction of formation energy across lattices of increasing size.pdf}
}
 
@online{passaroReducingConvolutionsEfficient2023,
@online{passaroReducingSO3Convolutions2023,
title = {Reducing {{SO}}(3) {{Convolutions}} to {{SO}}(2) for {{Efficient Equivariant GNNs}}},
author = {Passaro, Saro and Zitnick, C. Lawrence},
date = {2023-06-14},
......@@ -14347,7 +14466,7 @@ Subject\_term\_id: magnetic-properties-and-materials},
urldate = {2024-05-07},
abstract = {Graph neural networks that model 3D data, such as point clouds or atoms, are typically desired to be \$SO(3)\$ equivariant, i.e., equivariant to 3D rotations. Unfortunately equivariant convolutions, which are a fundamental operation for equivariant networks, increase significantly in computational complexity as higher-order tensors are used. In this paper, we address this issue by reducing the \$SO(3)\$ convolutions or tensor products to mathematically equivalent convolutions in \$SO(2)\$ . This is accomplished by aligning the node embeddings' primary axis with the edge vectors, which sparsifies the tensor product and reduces the computational complexity from \$O(L\textasciicircum 6)\$ to \$O(L\textasciicircum 3)\$, where \$L\$ is the degree of the representation. We demonstrate the potential implications of this improvement by proposing the Equivariant Spherical Channel Network (eSCN), a graph neural network utilizing our novel approach to equivariant convolutions, which achieves state-of-the-art results on the large-scale OC-20 and OC-22 datasets.},
pubstate = {prepublished},
keywords = {/unread,alternative approaches,alternative for equivariance,AML,computational complexity,convolution,equivariant,equivariant alternative,eSCN,GNN,Meta Research,ML,MLP,MPNN,Open Catalyst,rotational symmetry,SO(3),tensor product,with-code},
keywords = {alternative approaches,alternative for equivariance,AML,computational complexity,convolution,equivariant,equivariant alternative,eSCN,GNN,Meta Research,ML,MLP,MPNN,Open Catalyst,rotational symmetry,SO(3),tensor product,with-code},
file = {/Users/wasmer/Nextcloud/Zotero/Passaro_Zitnick_2023_Reducing SO(3) Convolutions to SO(2) for Efficient Equivariant GNNs2.pdf;/Users/wasmer/Zotero/storage/IIL5PCZ5/2302.html}
}
 
......@@ -14939,7 +15058,7 @@ Subject\_term\_id: magnetic-properties-and-materials},
doi = {10.1073/pnas.0505436102},
url = {https://www.pnas.org/doi/full/10.1073/pnas.0505436102},
urldate = {2022-10-05},
keywords = {condensed matter,electronic structure,near-sightedness,NEM,original publication,physics},
keywords = {condensed matter,DFT,electronic structure,near-sightedness,NEM,original publication,physics},
file = {/Users/wasmer/Nextcloud/Zotero/Prodan_Kohn_2005_Nearsightedness of electronic matter.pdf}
}
 
......@@ -15233,6 +15352,25 @@ Subject\_term\_id: magnetic-properties-and-materials},
file = {/Users/wasmer/Nextcloud/Zotero/Reiser et al_2022_Graph neural networks for materials science and chemistry.pdf;/Users/wasmer/Zotero/storage/IVEGXDHZ/2208.html}
}
 
@article{remmeKineticNetDeepLearning2023,
title = {{{KineticNet}}: {{Deep}} Learning a Transferable Kinetic Energy Functional for Orbital-Free Density Functional Theory},
shorttitle = {{{KineticNet}}},
author = {Remme, R. and Kaczun, T. and Scheurer, M. and Dreuw, A. and Hamprecht, F. A.},
date = {2023-10-13},
journaltitle = {The Journal of Chemical Physics},
shortjournal = {The Journal of Chemical Physics},
volume = {159},
number = {14},
pages = {144113},
issn = {0021-9606},
doi = {10.1063/5.0158275},
url = {https://doi.org/10.1063/5.0158275},
urldate = {2024-10-03},
abstract = {Orbital-free density functional theory (OF-DFT) holds promise to compute ground state molecular properties at minimal cost. However, it has been held back by our inability to compute the kinetic energy as a functional of electron density alone. Here, we set out to learn the kinetic energy functional from ground truth provided by the more expensive Kohn–Sham density functional theory. Such learning is confronted with two key challenges: Giving the model sufficient expressivity and spatial context while limiting the memory footprint to afford computations on a GPU and creating a sufficiently broad distribution of training data to enable iterative density optimization even when starting from a poor initial guess. In response, we introduce KineticNet, an equivariant deep neural network architecture based on point convolutions adapted to the prediction of quantities on molecular quadrature grids. Important contributions include convolution filters with sufficient spatial resolution in the vicinity of nuclear cusp, an atom-centric sparse but expressive architecture that relays information across multiple bond lengths, and a new strategy to generate varied training data by finding ground state densities in the face of perturbations by a random external potential. KineticNet achieves, for the first time, chemical accuracy of the learned functionals across input densities and geometries of tiny molecules. For two-electron systems, we additionally demonstrate OF-DFT density optimization with chemical accuracy.},
keywords = {/unread,AML,Kinetic energy density functionals,ML,ML-DFT,ML-ESM,OF-DFT,prediction of kinetic energy},
file = {/Users/wasmer/Nextcloud/Zotero/Remme et al. - 2023 - KineticNet Deep learning a transferable kinetic energy functional for orbital-free density function.pdf;/Users/wasmer/Zotero/storage/WWVJWQTX/KineticNet-Deep-learning-a-transferable-kinetic.html}
}
@article{renLigandOptimizationExchange2022,
title = {Ligand {{Optimization}} of {{Exchange Interaction}} in {{Co}}({{II}}) {{Dimer Single Molecule Magnet}} by {{Machine Learning}}},
author = {Ren, Sijin and Fonseca, Eric and Perry, William and Cheng, Hai-Ping and Zhang, Xiao-Guang and Hennig, Richard G.},
......@@ -15557,6 +15695,24 @@ Subject\_term\_id: condensed-matter-physics;history;quantum-physics},
file = {/Users/wasmer/Nextcloud/Zotero/Rupp et al_2012_Fast and Accurate Modeling of Molecular Atomization Energies with Machine.pdf;/Users/wasmer/Zotero/storage/AP7Y6JEW/PhysRevLett.108.html}
}
 
@article{ruppGuestEditorialSpecial2024,
title = {Guest Editorial: {{Special Topic}} on Software for Atomistic Machine Learning},
shorttitle = {Guest Editorial},
author = {Rupp, Matthias and Küçükbenli, Emine and Csányi, Gábor},
date = {2024-08-09},
journaltitle = {The Journal of Chemical Physics},
shortjournal = {The Journal of Chemical Physics},
volume = {161},
number = {6},
pages = {060401},
issn = {0021-9606},
doi = {10.1063/5.0228461},
url = {https://doi.org/10.1063/5.0228461},
urldate = {2024-09-30},
keywords = {/unread,AML,best-of-list,ML,review-of-AML,software},
file = {/Users/wasmer/Nextcloud/Zotero/Rupp et al. - 2024 - Guest editorial Special Topic on software for atomistic machine learning.pdf;/Users/wasmer/Zotero/storage/WDZTVFQU/Guest-editorial-Special-Topic-on-software-for.html}
}
@article{ruppMachineLearningQuantum2015,
title = {Machine Learning for Quantum Mechanics in a Nutshell},
author = {Rupp, Matthias},
......@@ -16242,6 +16398,23 @@ Subject\_term\_id: condensed-matter-physics;history;quantum-physics},
file = {/Users/wasmer/Nextcloud/Zotero/Schmidt et al_2021_Crystal graph attention networks for the prediction of stable materials.pdf}
}
 
@article{schmidtImprovingMachinelearningModels2024,
title = {Improving Machine-Learning Models in Materials Science through Large Datasets},
author = {Schmidt, Jonathan and Cerqueira, Tiago F. T. and Romero, Aldo H. and Loew, Antoine and Jäger, Fabian and Wang, Hai-Chen and Botti, Silvana and Marques, Miguel A. L.},
date = {2024-11-01},
journaltitle = {Materials Today Physics},
shortjournal = {Materials Today Physics},
volume = {48},
pages = {101560},
issn = {2542-5293},
doi = {10.1016/j.mtphys.2024.101560},
url = {https://www.sciencedirect.com/science/article/pii/S2542529324002360},
urldate = {2024-10-03},
abstract = {The accuracy of a machine learning model is limited by the quality and quantity of the data available for its training and validation. This problem is particularly challenging in materials science, where large, high-quality, and consistent datasets are scarce. Here we present alexandria, an open database of more than 5 million density-functional theory calculations for periodic three-, two-, and one-dimensional compounds. We use this data to train machine learning models to reproduce seven different properties using both composition-based models and crystal-graph neural networks. In the majority of cases, the error of the models decreases monotonically with the training data, although some graph networks seem to saturate for large training set sizes. Differences in the training can be correlated with the statistical distribution of the different properties. We also observe that graph-networks, that have access to detailed geometrical information, yield in general more accurate models than simple composition-based methods. Finally, we assess several universal machine learning interatomic potentials. Crystal geometries optimised with these force fields are very high quality, but unfortunately the accuracy of the energies is still lacking. Furthermore, we observe some instabilities for regions of chemical space that are undersampled in the training sets used for these models. This study highlights the potential of large-scale, high-quality datasets to improve machine learning models in materials science.},
keywords = {/unread,2D material,Alexandria database,ALIGNN,AML,binary systems,CGAT,convex hull,crystal graph,dataset,large dataset,M3GNet,MACE,materials database,ML,n-ary alloys,OPTIMADE,PBE,quaternary systems,SCAN,ternary systems,universal potential,with-code,with-data},
file = {/Users/wasmer/Zotero/storage/A2DT6HKC/S2542529324002360.html}
}
@online{schmidtLargescaleMachinelearningassistedExploration2022,
title = {Large-Scale Machine-Learning-Assisted Exploration of the Whole Materials Space},
author = {Schmidt, Jonathan and Hoffmann, Noah and Wang, Hai-Chen and Borlido, Pedro and Carriço, Pedro J. M. A. and Cerqueira, Tiago F. T. and Botti, Silvana and Marques, Miguel A. L.},
......@@ -18167,6 +18340,23 @@ Subject\_term\_id: electronic-devices;electronic-properties-and-materials;ferroe
file = {/Users/wasmer/Nextcloud/Zotero/Tyler et al_2023_Artificial Intelligence in Materials Education.pdf}
}
 
@online{uenoSpinMultiNetNeuralNetwork2024,
title = {{{SpinMultiNet}}: {{Neural Network Potential Incorporating Spin Degrees}} of {{Freedom}} with {{Multi-Task Learning}}},
shorttitle = {{{SpinMultiNet}}},
author = {Ueno, Koki and Ohuchi, Satoru and Ichikawa, Kazuhide and Amii, Kei and Wakasugi, Kensuke},
date = {2024-09-08},
eprint = {2409.03253},
eprinttype = {arXiv},
eprintclass = {cond-mat},
doi = {10.48550/arXiv.2409.03253},
url = {http://arxiv.org/abs/2409.03253},
urldate = {2024-09-23},
abstract = {Neural Network Potentials (NNPs) have attracted significant attention as a method for accelerating density functional theory (DFT) calculations. However, conventional NNP models typically do not incorporate spin degrees of freedom, limiting their applicability to systems where spin states critically influence material properties, such as transition metal oxides. This study introduces SpinMultiNet, a novel NNP model that integrates spin degrees of freedom through multi-task learning. SpinMultiNet achieves accurate predictions without relying on correct spin values obtained from DFT calculations. Instead, it utilizes initial spin estimates as input and leverages multi-task learning to optimize the spin latent representation while maintaining both \$E(3)\$ and time-reversal equivariance. Validation on a dataset of transition metal oxides demonstrates the high predictive accuracy of SpinMultiNet. The model successfully reproduces the energy ordering of stable spin configurations originating from superexchange interactions and accurately captures the rhombohedral distortion of the rocksalt structure. These results pave the way for new possibilities in materials simulations that consider spin degrees of freedom, promising future applications in large-scale simulations of various material systems, including magnetic materials.},
pubstate = {prepublished},
keywords = {AFM,AML,E(3),equivariant,FM,magnetism,ML,MLP,MPNN,multitask learning,NequIP,oxides,prediction of total energy,rec-by-katsumoto,spin-dependent,SpinGNN,transition metals,TRS,VASP},
file = {/Users/wasmer/Nextcloud/Zotero/Ueno et al. - 2024 - SpinMultiNet Neural Network Potential Incorporating Spin Degrees of Freedom with Multi-Task Learnin.pdf;/Users/wasmer/Zotero/storage/2T3H8XM6/2409.html}
}
@article{uhrinWorkflowsAiiDAEngineering2021,
title = {Workflows in {{AiiDA}}: {{Engineering}} a High-Throughput, Event-Based Engine for Robust and Modular Computational Workflows},
shorttitle = {Workflows in {{AiiDA}}},
......@@ -20329,6 +20519,15 @@ Subject\_term\_id: computational-methods;corrosion;mathematics-and-computing;the
keywords = {/unread}
}
 
@unpublished{zellerYukawaPotentialKKRnano2024,
title = {Yukawa Potential in {{KKRnano}}},
author = {Zeller, Rudolf},
date = {2024},
location = {Forschungszentrum Jülich},
howpublished = {Unpublished notes},
keywords = {/unread}
}
@book{zengQuantumInformationMeets2019,
title = {Quantum {{Information Meets Quantum Matter}}: {{From Quantum Entanglement}} to {{Topological Phases}} of {{Many-Body Systems}}},
shorttitle = {Quantum {{Information Meets Quantum Matter}}},
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment