Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • phd-project-wasmer/resources/latex-resources
1 result
Show changes
Commits on Source (5)
Showing
with 365 additions and 7 deletions
......@@ -300,6 +300,23 @@
file = {/Users/wasmer/Zotero/storage/KA8R9BT2/Aldarawsheh et al. - 2024 - Current-driven dynamics of antiferromagnetic skyrmions from skyrmion Hall effects to hybrid inter-s.pdf}
}
 
@article{alghadeerHighlyAccurateMachine2021,
title = {Highly Accurate Machine Learning Model for Kinetic Energy Density Functional},
author = {Alghadeer, Mohammed and Al-Aswad, Abdulaziz and Alharbi, Fahhad H},
date = {2021-10-29},
journaltitle = {Physics Letters A},
shortjournal = {Physics Letters A},
volume = {414},
pages = {127621},
issn = {0375-9601},
doi = {10.1016/j.physleta.2021.127621},
url = {https://www.sciencedirect.com/science/article/pii/S0375960121004850},
urldate = {2024-10-03},
abstract = {Inspired by the remarkable ongoing progress of data-driven approaches, a very accurate predictive model is developed to estimate one-dimensional kinetic energy density functionals (KEDF) using Machine Learning (ML). Starting from possible analytical forms of kinetic energy density and by utilizing a variety of solvable models, a simple – yet highly – accurate linear regression model is statistically trained to estimate the kinetic energy as functionals of the density. The mean relative accuracy for even a small number of randomly generated potentials is found to be better than the standard KEDF (Thomas-Fermi (TF) and von Weizsäcker (vW)) by several orders of magnitudes. As more different potentials of model problems are mixed, the coefficients of the linear model significantly approach the known values of Thomas-Fermi and von Weizsäcker, suggesting the reliability of the statistical training approach. This work can provide an important step toward more accurate large-scale orbital free density functional theory (OF-DFT) calculations.},
keywords = {AML,Kinetic energy density functionals,ML,ML-DFA,ML-DFT,OF-DFT},
file = {/Users/wasmer/Nextcloud/Zotero/Alghadeer et al. - 2021 - Highly accurate machine learning model for kinetic energy density functional.pdf;/Users/wasmer/Zotero/storage/X6Y8KKF9/S0375960121004850.html}
}
@article{allenMachineLearningMaterial2022,
title = {Machine Learning of Material Properties: {{Predictive}} and Interpretable Multilinear Models},
shorttitle = {Machine Learning of Material Properties},
......@@ -462,7 +479,7 @@
abstract = {There is no doubt that the economic and computing activity related to the digital sector will ramp up faster in the present decade than in the last. Moreover, computing infrastructure is one of three major drivers of new electricity use alongsidefuture and current hydrogen production and battery electric vehicles charging. Here is proposed a trajectory in this decade for CO2 emissions associated with this digitalization and its share of electricity and energy generation as a whole. The roadmap for major sources of primary energy and electricity and associated CO2 emissions areprojected and connected to the probable power use of the digital industry. The truncation error for manufacturing related CO2 emissions may be 0.8 Gt or more indicating a larger share of manufacturing and absolute digital CO2 emissions.While remaining at a moderate share of global CO2 emissions (4-5\%), the resulting digital CO2 emissions will likely rise from 2020 to 2030. The opposite may only happen if the electricity used to run especially data centers and production plants is produced locally (next to the data centers and plants) from renewable sources and data intensity metrics grow slower than expected.},
langid = {english},
keywords = {ecological footprint,economics,energy consumption,energy efficiency,environmental impact,for introductions,ICT sector,low-power electronics,world energy consumption},
file = {/Users/wasmer/Nextcloud/Zotero/Andrae_2020_Hypotheses for Primary Energy Use, Electricity Use and CΟ2 Emissions of Global.pdf}
file = {/Users/wasmer/Nextcloud/Zotero/Andrae_2020_Hypotheses for Primary Energy Use, Electricity Use and CO2 Emissions of Global.pdf}
}
 
@article{andrejevicMachineLearningSpectralIndicators2022,
......@@ -766,7 +783,6 @@ Subject\_term\_id: cheminformatics;computational-models;computational-science},
url = {https://pubs.rsc.org/en/content/articlelanding/2024/dd/d3dd00213f},
urldate = {2024-09-03},
langid = {english},
keywords = {/unread},
file = {/Users/wasmer/Nextcloud/Zotero/Back et al. - 2024 - Accelerated chemical science with AI.pdf}
}
 
......@@ -811,6 +827,19 @@ Subject\_term\_id: cheminformatics;computational-models;computational-science},
file = {/Users/wasmer/Nextcloud/Zotero/Bai et al_2022_Machine learning the Hohenberg-Kohn map for molecular excited states.pdf}
}
 
@online{bakerNobelPrizeChemistry2024,
title = {The {{Nobel Prize}} in {{Chemistry}} 2024},
author = {Baker, David and Hassabis, Demis and Jumper, John M.},
date = {2024-10-09},
url = {https://www.nobelprize.org/prizes/chemistry/2024/summary/},
urldate = {2024-10-09},
abstract = {The Nobel Prize in Chemistry 2024 was awarded with one half to David Baker “for computational protein design” and the other half jointly to Demis Hassabis and John M. Jumper “for protein structure prediction”},
langid = {american},
organization = {NobelPrize.org},
keywords = {/unread,AI4Science,AlphaFold,AML,biomolecules,DeepMind,for introductions,ML,Nobel prize,Protein structure predictions},
file = {/Users/wasmer/Nextcloud/Zotero/Baker et al. - 2024 - The Nobel Prize in Chemistry 2024_1.pdf;/Users/wasmer/Nextcloud/Zotero/Baker et al. - 2024 - The Nobel Prize in Chemistry 2024.pdf;/Users/wasmer/Zotero/storage/8V2RH9IV/summary.html}
}
@online{bakshiLearningQuantumHamiltonians2023,
title = {Learning Quantum {{Hamiltonians}} at Any Temperature in Polynomial Time},
author = {Bakshi, Ainesh and Liu, Allen and Moitra, Ankur and Tang, Ewin},
......@@ -3787,6 +3816,21 @@ Subject\_term\_id: computational-methods;electronic-structure;theory-and-computa
file = {/Users/wasmer/Nextcloud/Zotero/Crisostomo et al_2023_Seven Useful Questions in Density Functional Theory.pdf;/Users/wasmer/Zotero/storage/45PQRMPB/2207.html}
}
 
@article{csanyiSoftwareAtomisticMachine2024,
title = {Software for {{Atomistic Machine Learning}}},
editor = {Csányi, Gábor and Rupp, Matthias and Kucukbenli, Emile and Manolopoulos, David E. and Ceriotti, Michele and Michaelides, Angelos and Sherill, David},
date = {2024-08-09},
journaltitle = {AIP Publishing},
volume = {161},
number = {060401},
url = {https://pubs.aip.org/collection/1349/Software-for-Atomistic-Machine-Learning},
urldate = {2024-09-30},
abstract = {The application of machine-learning techniques to atomistic modeling of physics, chemistry and materials science is blooming, and machine learning is becoming a},
langid = {english},
keywords = {/unread,AML,collection,ML,review-of-AML,software,special issue},
file = {/Users/wasmer/Zotero/storage/V88JWUZY/Software-for-Atomistic-Machine-Learning.html}
}
@article{cuevas-zuviriaAnalyticalModelElectron2020,
title = {Analytical {{Model}} of {{Electron Density}} and {{Its Machine Learning Inference}}},
author = {Cuevas-Zuviría, Bruno and Pacios, Luis F.},
......@@ -5418,7 +5462,7 @@ Junqi Yin\\
url = {https://pubs.rsc.org/en/content/articlelanding/2019/sc/c9sc02696g},
urldate = {2021-10-16},
langid = {english},
keywords = {library,ML,ML-DFT,ML-ESM,prediction of electron density,with-code},
keywords = {library,ML,ML-DFT,ML-ESM,prediction of electron density,Resolution of the identity,with-code},
file = {/Users/wasmer/Nextcloud/Zotero/Fabrizio et al_2019_Electron density learning of non-covalent systems.pdf}
}
 
......@@ -6031,6 +6075,23 @@ Junqi Yin\\
file = {/Users/wasmer/Nextcloud/Zotero/Freitag et al_2021_The real climate and transformative impact of ICT.pdf;/Users/wasmer/Zotero/storage/3IPQYR9I/S2666389921001884.html}
}
 
@article{freyHighthroughputSearchMagnetic2020,
title = {High-Throughput Search for Magnetic and Topological Order in Transition Metal Oxides},
author = {Frey, Nathan C. and Horton, Matthew K. and Munro, Jason M. and Griffin, Sinéad M. and Persson, Kristin A. and Shenoy, Vivek B.},
date = {2020-12-09},
journaltitle = {Science Advances},
volume = {6},
number = {50},
pages = {eabd1076},
publisher = {American Association for the Advancement of Science},
doi = {10.1126/sciadv.abd1076},
url = {https://www.science.org/doi/10.1126/sciadv.abd1076},
urldate = {2024-10-08},
abstract = {The discovery of intrinsic magnetic topological order in MnBi2Te4 has invigorated the search for materials with coexisting magnetic and topological phases. These multiorder quantum materials are expected to exhibit new topological phases that can be tuned with magnetic fields, but the search for such materials is stymied by difficulties in predicting magnetic structure and stability. Here, we compute more than 27,000 unique magnetic orderings for more than 3000 transition metal oxides in the Materials Project database to determine their magnetic ground states and estimate their effective exchange parameters and critical temperatures. We perform a high-throughput band topology analysis of centrosymmetric magnetic materials, calculate topological invariants, and identify 18 new candidate ferromagnetic topological semimetals, axion insulators, and antiferromagnetic topological insulators. To accelerate future efforts, machine learning classifiers are trained to predict both magnetic ground states and magnetic topological order without requiring first-principles calculations.},
keywords = {/unread,AML,HTC,magnetic topological materials,magnetism,materials discovery,ML,oxides,scikit-learn,topological,topological insulator,transition metals},
file = {/Users/wasmer/Nextcloud/Zotero/Frey et al. - 2020 - High-throughput search for magnetic and topological order in transition metal oxides.pdf}
}
@article{freyMachineLearningEnabledDesign2020,
title = {Machine {{Learning-Enabled Design}} of {{Point Defects}} in {{2D Materials}} for {{Quantum}} and {{Neuromorphic Information Processing}}},
author = {Frey, Nathan C. and Akinwande, Deji and Jariwala, Deep and Shenoy, Vivek B.},
......@@ -6244,6 +6305,22 @@ Subject\_term: Quantum physics, Publishing, Peer review},
file = {/Users/wasmer/Zotero/storage/D55696WS/acs.chemmater.html}
}
 
@online{fuRecipeChargeDensity2024,
title = {A {{Recipe}} for {{Charge Density Prediction}}},
author = {Fu, Xiang and Rosen, Andrew and Bystrom, Kyle and Wang, Rui and Musaelian, Albert and Kozinsky, Boris and Smidt, Tess and Jaakkola, Tommi},
date = {2024-05-29},
eprint = {2405.19276},
eprinttype = {arXiv},
eprintclass = {physics},
doi = {10.48550/arXiv.2405.19276},
url = {http://arxiv.org/abs/2405.19276},
urldate = {2024-09-24},
abstract = {In density functional theory, charge density is the core attribute of atomic systems from which all chemical properties can be derived. Machine learning methods are promising in significantly accelerating charge density prediction, yet existing approaches either lack accuracy or scalability. We propose a recipe that can achieve both. In particular, we identify three key ingredients: (1) representing the charge density with atomic and virtual orbitals (spherical fields centered at atom/virtual coordinates); (2) using expressive and learnable orbital basis sets (basis function for the spherical fields); and (3) using high-capacity equivariant neural network architecture. Our method achieves state-of-the-art accuracy while being more than an order of magnitude faster than existing methods. Furthermore, our method enables flexible efficiency-accuracy trade-offs by adjusting the model/basis sizes.},
pubstate = {prepublished},
keywords = {AML,basis set,equivariant,eSCN,ML,ML-Density,ML-DFT,ML-ESM,molecules,prediction of electron density,QM9},
file = {/Users/wasmer/Nextcloud/Zotero/Fu et al. - 2024 - A Recipe for Charge Density Prediction.pdf;/Users/wasmer/Zotero/storage/YSSBDXWN/2405.html}
}
@article{furnessAccurateNumericallyEfficient2020,
title = {Accurate and {{Numerically Efficient r2SCAN Meta-Generalized Gradient Approximation}}},
author = {Furness, James W. and Kaplan, Aaron D. and Ning, Jinliang and Perdew, John P. and Sun, Jianwei},
......@@ -7701,6 +7778,24 @@ Subject\_term\_id: condensed-matter-physics;theory-and-computation},
file = {/Users/wasmer/Nextcloud/Zotero/Hazra et al_2024_Predicting the One-Particle Density Matrix with Machine Learning2.pdf}
}
 
@incollection{head-gordonTensorsElectronicStructure2000,
title = {Tensors in {{Electronic Structure Theory}}: {{Basic Concepts}} and {{Applications}} to {{Electron Correlation Models}}},
booktitle = {Modern {{Methods}} and {{Algorithms}} of {{Quantum Chemistry}}},
author = {Head-Gordon, Martin and Lee, Michael and family=Voorhis, given=Troy, prefix=van, useprefix=false and Maslen, Paul and family=Berkeley, given=University of California, prefix=at, useprefix=false},
date = {2000},
series = {{{NIC}} Series},
volume = {3},
number = {FZJ-2014-02065},
publisher = {John von Neumann Institute for Computing},
location = {Jülich},
url = {http://hdl.handle.net/2128/6058},
urldate = {2024-10-03},
abstract = {Head-Gordon, Martin; Lee, Michael; Maslen, Paul; Voorhis, Troy van; Berkeley, University of California at},
isbn = {978-3-00-005834-9},
langid = {english},
file = {/Users/wasmer/Nextcloud/Zotero/Head-Gordon et al. - 2000 - Tensors in Electronic Structure TheoryBasic Concepts and Applications to Electron Correlation Model.pdf;/Users/wasmer/Zotero/storage/2CCUFS87/152463.html}
}
@article{hegdeMachinelearnedApproximationsDensity2017,
title = {Machine-Learned Approximations to {{Density Functional Theory Hamiltonians}}},
author = {Hegde, Ganesh and Bowen, R. Chris},
......@@ -8376,6 +8471,36 @@ Subject\_term\_id: condensed-matter-physics;theory-and-computation},
file = {/Users/wasmer/Nextcloud/Zotero/Hoogeboom et al_2022_Equivariant Diffusion for Molecule Generation in 3D.pdf;/Users/wasmer/Zotero/storage/K6KWYTSV/2203.html}
}
 
@article{hopfieldNeuralNetworksPhysical1982,
title = {Neural Networks and Physical Systems with Emergent Collective Computational Abilities.},
author = {Hopfield, J J},
date = {1982-04},
journaltitle = {Proceedings of the National Academy of Sciences},
volume = {79},
number = {8},
pages = {2554--2558},
publisher = {Proceedings of the National Academy of Sciences},
doi = {10.1073/pnas.79.8.2554},
url = {https://www.pnas.org/doi/abs/10.1073/pnas.79.8.2554},
urldate = {2024-10-08},
abstract = {Computational properties of use of biological organisms or to the construction of computers can emerge as collective properties of systems having a large number of simple equivalent components (or neurons). The physical meaning of content-addressable memory is described by an appropriate phase space flow of the state of a system. A model of such a system is given, based on aspects of neurobiology but readily adapted to integrated circuits. The collective properties of this model produce a content-addressable memory which correctly yields an entire memory from any subpart of sufficient size. The algorithm for the time evolution of the state of the system is based on asynchronous parallel processing. Additional emergent collective properties include some capacity for generalization, familiarity recognition, categorization, error correction, and time sequence retention. The collective properties are only weakly sensitive to details of the modeling or the failure of individual devices.},
keywords = {/unread},
file = {/Users/wasmer/Nextcloud/Zotero/Hopfield - 1982 - Neural networks and physical systems with emergent collective computational abilities..pdf}
}
@online{hopfieldNobelPrizePhysics2024,
title = {The {{Nobel Prize}} in {{Physics}} 2024},
author = {Hopfield, John and Hinton, Geoffrey},
date = {2024-10-08},
url = {https://www.nobelprize.org/prizes/physics/2024/summary/},
urldate = {2024-10-08},
abstract = {The Nobel Prize in Physics 2024 was awarded to John J. Hopfield and Geoffrey E. Hinton “for foundational discoveries and inventions that enable machine learning with artificial neural networks”},
langid = {american},
organization = {NobelPrize.org},
keywords = {AI4Science,Deep learning,for introductions,General ML,Hopfield network,ML,neural network,Nobel prize,RBM,spin models,statistical physics},
file = {/Users/wasmer/Nextcloud/Zotero/Hopfield and Hinton - 2024 - The Nobel Prize in Physics 2024_1.pdf;/Users/wasmer/Nextcloud/Zotero/Hopfield and Hinton - 2024 - The Nobel Prize in Physics 2024.pdf;/Users/wasmer/Zotero/storage/7KYN3SJY/summary.html}
}
@inproceedings{horschEuropeanStandardizationEfforts2023,
title = {European Standardization Efforts from {{FAIR}} toward Explainable-{{AI-ready}} Data Documentation in Materials Modelling},
booktitle = {2023 3rd {{International Conference}} on {{Applied Artificial Intelligence}} ({{ICAPAI}})},
......@@ -8613,6 +8738,24 @@ Subject\_term\_id: computational-methods;research-management},
file = {/Users/wasmer/Nextcloud/Zotero/Huh et al_2024_The Platonic Representation Hypothesis.pdf;/Users/wasmer/Zotero/storage/R7C9FVMV/2405.html}
}
 
@article{hungUniversalEnsembleEmbeddingGraph2024,
title = {Universal {{Ensemble-Embedding Graph Neural Network}} for {{Direct Prediction}} of {{Optical Spectra}} from {{Crystal Structures}}},
author = {Hung, Nguyen Tuan and Okabe, Ryotaro and Chotrattanapituk, Abhijatmedhi and Li, Mingda},
date = {2024-09-12},
journaltitle = {Advanced Materials},
volume = {n/a},
number = {n/a},
pages = {2409175},
issn = {1521-4095},
doi = {10.1002/adma.202409175},
url = {https://onlinelibrary.wiley.com/doi/abs/10.1002/adma.202409175},
urldate = {2024-10-08},
abstract = {Optical properties in solids, such as refractive index and absorption, hold vast applications ranging from solar panels to sensors, photodetectors, and transparent displays. However, first-principles computation of optical properties from crystal structures is a complex task due to the high convergence criteria and computational cost. Recent progress in machine learning shows promise in predicting material properties, yet predicting optical properties from crystal structures remains challenging due to the lack of efficient atomic embeddings. Here, Graph Neural Network for Optical spectra prediction (GNNOpt) is introduced, an equivariant graph-neural-network architecture featuring universal embedding with automatic optimization. This enables high-quality optical predictions with a dataset of only 944 materials. GNNOpt predicts all optical properties based on the Kramers-Krönig relations, including absorption coefficient, complex dielectric function, complex refractive index, and reflectance. The trained model is applied to screen photovoltaic materials based on spectroscopic limited maximum efficiency and search for quantum materials based on quantum weight. First-principles calculations validate the efficacy of the GNNOpt model, demonstrating excellent agreement in predicting the optical spectra of unseen materials. The discovery of new quantum materials with high predicted quantum weight, such as SiOs, which host exotic quasiparticles with multifold nontrivial topology, demonstrates the potential of GNNOpt in predicting optical properties across a broad range of materials and applications.},
langid = {english},
keywords = {/unread,AML,energy materials,equivariant,GNN,kramers-krönig relations,ML,photovoltaics,prediction of optical spectra,quantum materials},
file = {/Users/wasmer/Nextcloud/Zotero/Hung et al. - Universal Ensemble-Embedding Graph Neural Network for Direct Prediction of Optical Spectra from Crys.pdf;/Users/wasmer/Zotero/storage/JVQULL49/adma.html}
}
@online{huOGBLSCLargeScaleChallenge2021,
title = {{{OGB-LSC}}: {{A Large-Scale Challenge}} for {{Machine Learning}} on {{Graphs}}},
shorttitle = {{{OGB-LSC}}},
......@@ -10061,6 +10204,24 @@ Subject\_term\_id: computational-methods;density-functional-theory;method-develo
file = {/Users/wasmer/Nextcloud/Zotero/Ko et al_2021_General-Purpose Machine Learning Potentials Capturing Nonlocal Charge Transfer2.pdf;/Users/wasmer/Zotero/storage/RLPWSGFJ/acs.accounts.html}
}
 
@article{kohnNobelLectureElectronic1999,
title = {Nobel {{Lecture}}: {{Electronic}} Structure of Matter---Wave Functions and Density Functionals},
shorttitle = {Nobel {{Lecture}}},
author = {Kohn, W.},
date = {1999-10-01},
journaltitle = {Reviews of Modern Physics},
shortjournal = {Rev. Mod. Phys.},
volume = {71},
number = {5},
pages = {1253--1266},
publisher = {American Physical Society},
doi = {10.1103/RevModPhys.71.1253},
url = {https://link.aps.org/doi/10.1103/RevModPhys.71.1253},
urldate = {2024-10-03},
keywords = {DFT,DFT theory,history of science,KS-DFT,Nobel prize,physics},
file = {/Users/wasmer/Nextcloud/Zotero/Kohn - 1999 - Nobel Lecture Electronic structure of matter---wave functions and density functionals.pdf;/Users/wasmer/Zotero/storage/QERFJ3F5/RevModPhys.71.html}
}
@article{kohnSelfConsistentEquationsIncluding1965,
title = {Self-{{Consistent Equations Including Exchange}} and {{Correlation Effects}}},
author = {Kohn, W.},
......@@ -10422,6 +10583,23 @@ Subject\_term\_id: computational-methods;density-functional-theory;method-develo
file = {/Users/wasmer/Nextcloud/Zotero/Kreuzberger et al_2023_Machine Learning Operations (MLOps).pdf;/Users/wasmer/Zotero/storage/AGAJG2J6/10081336.html}
}
 
@article{krizhevskyImageNetClassificationDeep2017,
title = {{{ImageNet}} Classification with Deep Convolutional Neural Networks},
author = {Krizhevsky, Alex and Sutskever, Ilya and Hinton, Geoffrey E.},
date = {2017-05-24},
journaltitle = {Commun. ACM},
volume = {60},
number = {6},
pages = {84--90},
issn = {0001-0782},
doi = {10.1145/3065386},
url = {https://dl.acm.org/doi/10.1145/3065386},
urldate = {2024-10-08},
abstract = {We trained a large, deep convolutional neural network to classify the 1.2 million high-resolution images in the ImageNet LSVRC-2010 contest into the 1000 different classes. On the test data, we achieved top-1 and top-5 error rates of 37.5\% and 17.0\%, respectively, which is considerably better than the previous state-of-the-art. The neural network, which has 60 million parameters and 650,000 neurons, consists of five convolutional layers, some of which are followed by max-pooling layers, and three fully connected layers with a final 1000-way softmax. To make training faster, we used non-saturating neurons and a very efficient GPU implementation of the convolution operation. To reduce overfitting in the fully connected layers we employed a recently developed regularization method called "dropout" that proved to be very effective. We also entered a variant of this model in the ILSVRC-2012 competition and achieved a winning top-5 test error rate of 15.3\%, compared to 26.2\% achieved by the second-best entry.},
keywords = {/unread},
file = {/Users/wasmer/Nextcloud/Zotero/Krizhevsky et al. - 2017 - ImageNet classification with deep convolutional neural networks.pdf}
}
@book{kronmullerHandbookMagnetismAdvanced2007,
title = {Handbook of {{Magnetism}} and {{Advanced Magnetic Materials}}},
editor = {Kronmüller, Helmut and Parkin, Stuart},
......@@ -11467,6 +11645,7 @@ Subject\_term\_id: computational-methods;density-functional-theory;method-develo
urldate = {2023-09-24},
abstract = {Kohn–Sham density functional theory (DFT) is the most widely used electronic structure theory. Despite significant progress in the past few decades, the numerical solution of Kohn–Sham DFT problems remains challenging, especially for large-scale systems. In this paper we review the basics as well as state-of-the-art numerical methods, and focus on the unique numerical challenges of DFT.},
langid = {english},
keywords = {DFT,DFT numerics,DFT theory,KS-DFT,numerical analysis,numerical methods,physics,Resolution of the identity},
file = {/Users/wasmer/Zotero/storage/9Z7XGB5F/Lin et al. - 2019 - Numerical methods for Kohn–Sham density functional.pdf}
}
 
......@@ -11834,6 +12013,24 @@ Subject\_term\_id: computational-methods;density-functional-theory;method-develo
file = {/Users/wasmer/Nextcloud/Zotero/Lopanitsyna et al_2022_Modeling high-entropy transition-metal alloys with alchemical compression.pdf;/Users/wasmer/Nextcloud/Zotero/Lopanitsyna et al_2022_Modeling high-entropy transition-metal alloys with alchemical compression2.pdf;/Users/wasmer/Zotero/storage/QNGQ9AQD/2212.html}
}
 
@article{louNeuralWaveFunctions2024,
title = {Neural {{Wave Functions}} for {{Superfluids}}},
author = {Lou, Wan Tong and Sutterud, Halvard and Cassella, Gino and Foulkes, W. M. C. and Knolle, Johannes and Pfau, David and Spencer, James S.},
date = {2024-05-22},
journaltitle = {Physical Review X},
shortjournal = {Phys. Rev. X},
volume = {14},
number = {2},
pages = {021030},
publisher = {American Physical Society},
doi = {10.1103/PhysRevX.14.021030},
url = {https://link.aps.org/doi/10.1103/PhysRevX.14.021030},
urldate = {2024-10-03},
abstract = {Understanding superfluidity remains a major goal of condensed matter physics. Here, we tackle this challenge utilizing the recently developed fermionic neural network (FermiNet) wave function Ansatz [D. Pfau et al., Phys. Rev. Res. 2, 033429 (2020).] for variational Monte Carlo calculations. We study the unitary Fermi gas, a system with strong, short-range, two-body interactions known to possess a superfluid ground state but difficult to describe quantitatively. We demonstrate key limitations of the FermiNet Ansatz in studying the unitary Fermi gas and propose a simple modification based on the idea of an antisymmetric geminal power singlet (AGPs) wave function. The new AGPs FermiNet outperforms the original FermiNet significantly in paired systems, giving results which are more accurate than fixed-node diffusion Monte Carlo and are consistent with experiment. We prove mathematically that the new Ansatz, which differs from the original Ansatz only by the method of antisymmetrization, is a strict generalization of the original FermiNet architecture, despite the use of fewer parameters. Our approach shares several advantages with the original FermiNet: The use of a neural network removes the need for an underlying basis set; sand the flexibility of the network yields extremely accurate results within a variational quantum Monte Carlo framework that provides access to unbiased estimates of arbitrary ground-state expectation values. We discuss how the method can be extended to study other superfluid.},
keywords = {DeepMind,Fermi gas,FermiNet,ML-ESM,ML-QMBP,NIC,quantum materials,superfluidity,VMC},
file = {/Users/wasmer/Nextcloud/Zotero/Lou et al. - 2024 - Neural Wave Functions for Superfluids.pdf;/Users/wasmer/Zotero/storage/JG3MF88T/PhysRevX.14.html}
}
@thesis{lounisTheoryMagneticTransition2007,
title = {Theory of {{Magnetic Transition Metal Nanoclusters}} on {{Surfaces}}},
author = {Lounis, Samir},
......@@ -13449,6 +13646,23 @@ Subject\_term\_id: magnetic-properties-and-materials},
file = {/Users/wasmer/Nextcloud/Zotero/Nagaosa_Tokura_2013_Topological properties and dynamics of magnetic skyrmions.pdf}
}
 
@article{nakajimaCrystalStructureBi2Te3xSex1963,
title = {The Crystal Structure of {{Bi2Te3}}−{{xSex}}},
author = {Nakajima, Seizo},
date = {1963-03},
journaltitle = {Journal of Physics and Chemistry of Solids},
shortjournal = {Journal of Physics and Chemistry of Solids},
volume = {24},
number = {3},
pages = {479--485},
issn = {00223697},
doi = {10.1016/0022-3697(63)90207-5},
url = {https://linkinghub.elsevier.com/retrieve/pii/0022369763902075},
urldate = {2024-09-27},
langid = {english},
keywords = {/unread}
}
@article{nakataLargeScaleLinear2020,
title = {Large Scale and Linear Scaling {{DFT}} with the {{CONQUEST}} Code},
author = {Nakata, Ayako and Baker, Jack S. and Mujahed, Shereif Y. and Poulton, Jack T. L. and Arapan, Sergiu and Lin, Jianbo and Raza, Zamaan and Yadav, Sushma and Truflandier, Lionel and Miyazaki, Tsuyoshi and Bowler, David R.},
......@@ -14335,7 +14549,7 @@ Subject\_term\_id: magnetic-properties-and-materials},
file = {/Users/wasmer/Nextcloud/Zotero/Pasini et al_2023_Transferable prediction of formation energy across lattices of increasing size.pdf}
}
 
@online{passaroReducingConvolutionsEfficient2023,
@online{passaroReducingSO3Convolutions2023,
title = {Reducing {{SO}}(3) {{Convolutions}} to {{SO}}(2) for {{Efficient Equivariant GNNs}}},
author = {Passaro, Saro and Zitnick, C. Lawrence},
date = {2023-06-14},
......@@ -14347,7 +14561,7 @@ Subject\_term\_id: magnetic-properties-and-materials},
urldate = {2024-05-07},
abstract = {Graph neural networks that model 3D data, such as point clouds or atoms, are typically desired to be \$SO(3)\$ equivariant, i.e., equivariant to 3D rotations. Unfortunately equivariant convolutions, which are a fundamental operation for equivariant networks, increase significantly in computational complexity as higher-order tensors are used. In this paper, we address this issue by reducing the \$SO(3)\$ convolutions or tensor products to mathematically equivalent convolutions in \$SO(2)\$ . This is accomplished by aligning the node embeddings' primary axis with the edge vectors, which sparsifies the tensor product and reduces the computational complexity from \$O(L\textasciicircum 6)\$ to \$O(L\textasciicircum 3)\$, where \$L\$ is the degree of the representation. We demonstrate the potential implications of this improvement by proposing the Equivariant Spherical Channel Network (eSCN), a graph neural network utilizing our novel approach to equivariant convolutions, which achieves state-of-the-art results on the large-scale OC-20 and OC-22 datasets.},
pubstate = {prepublished},
keywords = {/unread,alternative approaches,alternative for equivariance,AML,computational complexity,convolution,equivariant,equivariant alternative,eSCN,GNN,Meta Research,ML,MLP,MPNN,Open Catalyst,rotational symmetry,SO(3),tensor product,with-code},
keywords = {alternative approaches,alternative for equivariance,AML,computational complexity,convolution,equivariant,equivariant alternative,eSCN,GNN,Meta Research,ML,MLP,MPNN,Open Catalyst,rotational symmetry,SO(3),tensor product,with-code},
file = {/Users/wasmer/Nextcloud/Zotero/Passaro_Zitnick_2023_Reducing SO(3) Convolutions to SO(2) for Efficient Equivariant GNNs2.pdf;/Users/wasmer/Zotero/storage/IIL5PCZ5/2302.html}
}
 
......@@ -14939,7 +15153,7 @@ Subject\_term\_id: magnetic-properties-and-materials},
doi = {10.1073/pnas.0505436102},
url = {https://www.pnas.org/doi/full/10.1073/pnas.0505436102},
urldate = {2022-10-05},
keywords = {condensed matter,electronic structure,near-sightedness,NEM,original publication,physics},
keywords = {condensed matter,DFT,electronic structure,near-sightedness,NEM,original publication,physics},
file = {/Users/wasmer/Nextcloud/Zotero/Prodan_Kohn_2005_Nearsightedness of electronic matter.pdf}
}
 
......@@ -15233,6 +15447,25 @@ Subject\_term\_id: magnetic-properties-and-materials},
file = {/Users/wasmer/Nextcloud/Zotero/Reiser et al_2022_Graph neural networks for materials science and chemistry.pdf;/Users/wasmer/Zotero/storage/IVEGXDHZ/2208.html}
}
 
@article{remmeKineticNetDeepLearning2023,
title = {{{KineticNet}}: {{Deep}} Learning a Transferable Kinetic Energy Functional for Orbital-Free Density Functional Theory},
shorttitle = {{{KineticNet}}},
author = {Remme, R. and Kaczun, T. and Scheurer, M. and Dreuw, A. and Hamprecht, F. A.},
date = {2023-10-13},
journaltitle = {The Journal of Chemical Physics},
shortjournal = {The Journal of Chemical Physics},
volume = {159},
number = {14},
pages = {144113},
issn = {0021-9606},
doi = {10.1063/5.0158275},
url = {https://doi.org/10.1063/5.0158275},
urldate = {2024-10-03},
abstract = {Orbital-free density functional theory (OF-DFT) holds promise to compute ground state molecular properties at minimal cost. However, it has been held back by our inability to compute the kinetic energy as a functional of electron density alone. Here, we set out to learn the kinetic energy functional from ground truth provided by the more expensive Kohn–Sham density functional theory. Such learning is confronted with two key challenges: Giving the model sufficient expressivity and spatial context while limiting the memory footprint to afford computations on a GPU and creating a sufficiently broad distribution of training data to enable iterative density optimization even when starting from a poor initial guess. In response, we introduce KineticNet, an equivariant deep neural network architecture based on point convolutions adapted to the prediction of quantities on molecular quadrature grids. Important contributions include convolution filters with sufficient spatial resolution in the vicinity of nuclear cusp, an atom-centric sparse but expressive architecture that relays information across multiple bond lengths, and a new strategy to generate varied training data by finding ground state densities in the face of perturbations by a random external potential. KineticNet achieves, for the first time, chemical accuracy of the learned functionals across input densities and geometries of tiny molecules. For two-electron systems, we additionally demonstrate OF-DFT density optimization with chemical accuracy.},
keywords = {/unread,AML,Kinetic energy density functionals,ML,ML-DFT,ML-ESM,OF-DFT,prediction of kinetic energy},
file = {/Users/wasmer/Nextcloud/Zotero/Remme et al. - 2023 - KineticNet Deep learning a transferable kinetic energy functional for orbital-free density function.pdf;/Users/wasmer/Zotero/storage/WWVJWQTX/KineticNet-Deep-learning-a-transferable-kinetic.html}
}
@article{renLigandOptimizationExchange2022,
title = {Ligand {{Optimization}} of {{Exchange Interaction}} in {{Co}}({{II}}) {{Dimer Single Molecule Magnet}} by {{Machine Learning}}},
author = {Ren, Sijin and Fonseca, Eric and Perry, William and Cheng, Hai-Ping and Zhang, Xiao-Guang and Hennig, Richard G.},
......@@ -15557,6 +15790,24 @@ Subject\_term\_id: condensed-matter-physics;history;quantum-physics},
file = {/Users/wasmer/Nextcloud/Zotero/Rupp et al_2012_Fast and Accurate Modeling of Molecular Atomization Energies with Machine.pdf;/Users/wasmer/Zotero/storage/AP7Y6JEW/PhysRevLett.108.html}
}
 
@article{ruppGuestEditorialSpecial2024,
title = {Guest Editorial: {{Special Topic}} on Software for Atomistic Machine Learning},
shorttitle = {Guest Editorial},
author = {Rupp, Matthias and Küçükbenli, Emine and Csányi, Gábor},
date = {2024-08-09},
journaltitle = {The Journal of Chemical Physics},
shortjournal = {The Journal of Chemical Physics},
volume = {161},
number = {6},
pages = {060401},
issn = {0021-9606},
doi = {10.1063/5.0228461},
url = {https://doi.org/10.1063/5.0228461},
urldate = {2024-09-30},
keywords = {/unread,AML,best-of-list,ML,review-of-AML,software},
file = {/Users/wasmer/Nextcloud/Zotero/Rupp et al. - 2024 - Guest editorial Special Topic on software for atomistic machine learning.pdf;/Users/wasmer/Zotero/storage/WDZTVFQU/Guest-editorial-Special-Topic-on-software-for.html}
}
@article{ruppMachineLearningQuantum2015,
title = {Machine Learning for Quantum Mechanics in a Nutshell},
author = {Rupp, Matthias},
......@@ -16242,6 +16493,23 @@ Subject\_term\_id: condensed-matter-physics;history;quantum-physics},
file = {/Users/wasmer/Nextcloud/Zotero/Schmidt et al_2021_Crystal graph attention networks for the prediction of stable materials.pdf}
}
 
@article{schmidtImprovingMachinelearningModels2024,
title = {Improving Machine-Learning Models in Materials Science through Large Datasets},
author = {Schmidt, Jonathan and Cerqueira, Tiago F. T. and Romero, Aldo H. and Loew, Antoine and Jäger, Fabian and Wang, Hai-Chen and Botti, Silvana and Marques, Miguel A. L.},
date = {2024-11-01},
journaltitle = {Materials Today Physics},
shortjournal = {Materials Today Physics},
volume = {48},
pages = {101560},
issn = {2542-5293},
doi = {10.1016/j.mtphys.2024.101560},
url = {https://www.sciencedirect.com/science/article/pii/S2542529324002360},
urldate = {2024-10-03},
abstract = {The accuracy of a machine learning model is limited by the quality and quantity of the data available for its training and validation. This problem is particularly challenging in materials science, where large, high-quality, and consistent datasets are scarce. Here we present alexandria, an open database of more than 5 million density-functional theory calculations for periodic three-, two-, and one-dimensional compounds. We use this data to train machine learning models to reproduce seven different properties using both composition-based models and crystal-graph neural networks. In the majority of cases, the error of the models decreases monotonically with the training data, although some graph networks seem to saturate for large training set sizes. Differences in the training can be correlated with the statistical distribution of the different properties. We also observe that graph-networks, that have access to detailed geometrical information, yield in general more accurate models than simple composition-based methods. Finally, we assess several universal machine learning interatomic potentials. Crystal geometries optimised with these force fields are very high quality, but unfortunately the accuracy of the energies is still lacking. Furthermore, we observe some instabilities for regions of chemical space that are undersampled in the training sets used for these models. This study highlights the potential of large-scale, high-quality datasets to improve machine learning models in materials science.},
keywords = {/unread,2D material,Alexandria database,ALIGNN,AML,binary systems,CGAT,convex hull,crystal graph,dataset,large dataset,M3GNet,MACE,materials database,ML,n-ary alloys,OPTIMADE,PBE,quaternary systems,SCAN,ternary systems,universal potential,with-code,with-data},
file = {/Users/wasmer/Zotero/storage/A2DT6HKC/S2542529324002360.html}
}
@online{schmidtLargescaleMachinelearningassistedExploration2022,
title = {Large-Scale Machine-Learning-Assisted Exploration of the Whole Materials Space},
author = {Schmidt, Jonathan and Hoffmann, Noah and Wang, Hai-Chen and Borlido, Pedro and Carriço, Pedro J. M. A. and Cerqueira, Tiago F. T. and Botti, Silvana and Marques, Miguel A. L.},
......@@ -16622,6 +16890,26 @@ Subject\_term\_id: condensed-matter-physics;electronic-structure;materials-scien
file = {/Users/wasmer/Nextcloud/Zotero/Shapeev_2017_Accurate representation of formation energies of crystalline alloys with many.pdf;/Users/wasmer/Zotero/storage/EQYE3F3F/S0927025617303610.html}
}
 
@article{sharmaQuantumaccurateMachineLearning2024,
title = {Quantum-Accurate Machine Learning Potentials for Metal-Organic Frameworks Using Temperature Driven Active Learning},
author = {Sharma, Abhishek and Sanvito, Stefano},
date = {2024-10-08},
journaltitle = {npj Computational Materials},
shortjournal = {npj Comput Mater},
volume = {10},
number = {1},
pages = {1--13},
publisher = {Nature Publishing Group},
issn = {2057-3960},
doi = {10.1038/s41524-024-01427-y},
url = {https://www.nature.com/articles/s41524-024-01427-y},
urldate = {2024-10-08},
abstract = {Understanding structural flexibility of metal-organic frameworks (MOFs) via molecular dynamics simulations is crucial to design better MOFs. Density functional theory (DFT) and quantum-chemistry methods provide highly accurate molecular dynamics, but the computational overheads limit their use in long time-dependent simulations. In contrast, classical force fields struggle with the description of coordination bonds. Here we develop a DFT-accurate machine-learning spectral neighbor analysis potentials for two representative MOFs. Their structural and vibrational properties are then studied and tightly compared with available experimental data. Most importantly, we demonstrate an active-learning algorithm, based on mapping the relevant internal coordinates, which drastically reduces the number of training data to be computed at the DFT level. Thus, the workflow presented here appears as an efficient strategy for the study of flexible MOFs with DFT accuracy, but at a fraction of the DFT computational cost.},
langid = {english},
keywords = {/unread,AML,ML,MLP,MOF,SNAP,with-code,with-data},
file = {/Users/wasmer/Nextcloud/Zotero/Sharma and Sanvito - 2024 - Quantum-accurate machine learning potentials for metal-organic frameworks using temperature driven a.pdf}
}
@online{shenRepresentationindependentElectronicCharge2021,
title = {A Representation-Independent Electronic Charge Density Database for Crystalline Materials},
author = {Shen, Jimmy-Xuan and Munro, Jason M. and Horton, Matthew K. and Huck, Patrick and Dwaraknath, Shyam and Persson, Kristin A.},
......@@ -18167,6 +18455,23 @@ Subject\_term\_id: electronic-devices;electronic-properties-and-materials;ferroe
file = {/Users/wasmer/Nextcloud/Zotero/Tyler et al_2023_Artificial Intelligence in Materials Education.pdf}
}
 
@online{uenoSpinMultiNetNeuralNetwork2024,
title = {{{SpinMultiNet}}: {{Neural Network Potential Incorporating Spin Degrees}} of {{Freedom}} with {{Multi-Task Learning}}},
shorttitle = {{{SpinMultiNet}}},
author = {Ueno, Koki and Ohuchi, Satoru and Ichikawa, Kazuhide and Amii, Kei and Wakasugi, Kensuke},
date = {2024-09-08},
eprint = {2409.03253},
eprinttype = {arXiv},
eprintclass = {cond-mat},
doi = {10.48550/arXiv.2409.03253},
url = {http://arxiv.org/abs/2409.03253},
urldate = {2024-09-23},
abstract = {Neural Network Potentials (NNPs) have attracted significant attention as a method for accelerating density functional theory (DFT) calculations. However, conventional NNP models typically do not incorporate spin degrees of freedom, limiting their applicability to systems where spin states critically influence material properties, such as transition metal oxides. This study introduces SpinMultiNet, a novel NNP model that integrates spin degrees of freedom through multi-task learning. SpinMultiNet achieves accurate predictions without relying on correct spin values obtained from DFT calculations. Instead, it utilizes initial spin estimates as input and leverages multi-task learning to optimize the spin latent representation while maintaining both \$E(3)\$ and time-reversal equivariance. Validation on a dataset of transition metal oxides demonstrates the high predictive accuracy of SpinMultiNet. The model successfully reproduces the energy ordering of stable spin configurations originating from superexchange interactions and accurately captures the rhombohedral distortion of the rocksalt structure. These results pave the way for new possibilities in materials simulations that consider spin degrees of freedom, promising future applications in large-scale simulations of various material systems, including magnetic materials.},
pubstate = {prepublished},
keywords = {AFM,AML,E(3),equivariant,FM,magnetism,ML,MLP,MPNN,multitask learning,NequIP,oxides,prediction of total energy,rec-by-katsumoto,spin-dependent,SpinGNN,transition metals,TRS,VASP},
file = {/Users/wasmer/Nextcloud/Zotero/Ueno et al. - 2024 - SpinMultiNet Neural Network Potential Incorporating Spin Degrees of Freedom with Multi-Task Learnin.pdf;/Users/wasmer/Zotero/storage/2T3H8XM6/2409.html}
}
@article{uhrinWorkflowsAiiDAEngineering2021,
title = {Workflows in {{AiiDA}}: {{Engineering}} a High-Throughput, Event-Based Engine for Robust and Modular Computational Workflows},
shorttitle = {Workflows in {{AiiDA}}},
......@@ -19026,6 +19331,20 @@ Subject\_term\_id: electronic-properties-and-materials;quantum-hall;superconduct
file = {/Users/wasmer/Nextcloud/Zotero/Waroquiers et al_2017_Statistical Analysis of Coordination Environments in Oxides.pdf}
}
 
@software{wasmerAiiDAJuTools2023,
title = {{{AiiDA-JuTools}}},
author = {Wasmer, Johannes and Rüßmann, Philipp and Kovacik, Roman},
date = {2023-12-25},
doi = {10.5281/zenodo.10430514},
url = {https://zenodo.org/records/10430514},
urldate = {2024-10-08},
abstract = {Tools for simplifying daily work with the AiiDA workflow engine},
organization = {Zenodo},
version = {v0.1.2},
keywords = {/unread,aiida,computational-materials-science,computational-science,data-science,density-functional-theory,dft,forschungszentrum-juelich,high-throughput,judft,materials-science,pandas,provenance,toolkit,utility,workflow},
file = {/Users/wasmer/Zotero/storage/28HAV2UB/10430514.html}
}
@online{wasmerBestAtomisticMachine2023,
title = {Best of {{Atomistic Machine Learning}}},
author = {Wasmer, Johannes and Evans, Matthew and Blaiszik, Ben and Riebesell, Janosh},
......@@ -20329,6 +20648,15 @@ Subject\_term\_id: computational-methods;corrosion;mathematics-and-computing;the
keywords = {/unread}
}
 
@unpublished{zellerYukawaPotentialKKRnano2024,
title = {Yukawa Potential in {{KKRnano}}},
author = {Zeller, Rudolf},
date = {2024},
location = {Forschungszentrum Jülich},
howpublished = {Unpublished notes},
keywords = {/unread}
}
@book{zengQuantumInformationMeets2019,
title = {Quantum {{Information Meets Quantum Matter}}: {{From Quantum Entanglement}} to {{Topological Phases}} of {{Many-Body Systems}}},
shorttitle = {Quantum {{Information Meets Quantum Matter}}},
......@@ -20446,7 +20774,7 @@ Subject\_term\_id: computational-methods;corrosion;mathematics-and-computing;the
abstract = {Advances in artificial intelligence (AI) are fueling a new paradigm of discoveries in natural sciences. Today, AI has started to advance natural sciences by improving, accelerating, and enabling our understanding of natural phenomena at a wide range of spatial and temporal scales, giving rise to a new area of research known as AI for science (AI4Science). Being an emerging research paradigm, AI4Science is unique in that it is an enormous and highly interdisciplinary area. Thus, a unified and technical treatment of this field is needed yet challenging. This work aims to provide a technically thorough account of a subarea of AI4Science; namely, AI for quantum, atomistic, and continuum systems. These areas aim at understanding the physical world from the subatomic (wavefunctions and electron density), atomic (molecules, proteins, materials, and interactions), to macro (fluids, climate, and subsurface) scales and form an important subarea of AI4Science. A unique advantage of focusing on these areas is that they largely share a common set of challenges, thereby allowing a unified and foundational treatment. A key common challenge is how to capture physics first principles, especially symmetries, in natural systems by deep learning methods. We provide an in-depth yet intuitive account of techniques to achieve equivariance to symmetry transformations. We also discuss other common technical challenges, including explainability, out-of-distribution generalization, knowledge transfer with foundation and large language models, and uncertainty quantification. To facilitate learning and education, we provide categorized lists of resources that we found to be useful. We strive to be thorough and unified and hope this initial effort may trigger more community interests and efforts to further advance AI4Science.},
pubstate = {prepublished},
version = {2},
keywords = {/unread,ACE,AI4Science,ALIGNN,Allegro,AlphaFold,AML,benchmarking,body-order,CCSD(T),CGCNN,chemistry,Computer Science - Machine Learning,Database,DeepH,DFT,DimeNet,drug discovery,E(3),education,EGNN,equivariant,FermiNet,foundation models,G-SchNet,GemNet,generative models,GNN,graph ML,invariance,learning material,library,lists,LLM,M3GNet,MACE,magnetism,MatBench,materials discovery,materials project,MD,MD17,MEGNet,Microsoft Research,ML,ML-DFA,ML-DFT,ML-ESM,ML-FF,ML-QMBP,MLP,model comparison,model taxonomy,molecules,MPNN,NequIP,NQS,OC20,OF-DFT,open questions,out-of-distribution,PAiNN,PauliNet,PDE,PhiSNet,phonon,physics,Physics - Computational Physics,QM7,QM9,representation learning,resources list,review,review-of-AI4science,review-of-AML,review-of-ML-DFT,roadmap,SchNet,SchNOrb,SE(3),SOTA,SphereNet,spin-dependent,SSL,symmetry,uncertainty quantification,with-code,XAI},
keywords = {ACE,AI4Science,ALIGNN,Allegro,AlphaFold,AML,benchmarking,body-order,CCSD(T),CGCNN,chemistry,Computer Science - Machine Learning,Database,DeepH,DFT,DimeNet,drug discovery,E(3),education,EGNN,equivariant,FermiNet,foundation models,G-SchNet,GemNet,generative models,GNN,graph ML,invariance,learning material,library,lists,LLM,M3GNet,MACE,magnetism,MatBench,materials discovery,materials project,MD,MD17,MEGNet,Microsoft Research,ML,ML-DFA,ML-DFT,ML-ESM,ML-FF,ML-QMBP,MLP,model comparison,model taxonomy,molecules,MPNN,NequIP,NQS,OC20,OF-DFT,open questions,out-of-distribution,PAiNN,PauliNet,PDE,PhiSNet,phonon,physics,Physics - Computational Physics,QM7,QM9,representation learning,resources list,review,review-of-AI4science,review-of-AML,review-of-ML-DFT,roadmap,SchNet,SchNOrb,SE(3),SOTA,SphereNet,spin-dependent,SSL,symmetry,uncertainty quantification,with-code,XAI},
file = {/Users/wasmer/Nextcloud/Zotero/Zhang et al_2023_Artificial Intelligence for Science in Quantum, Atomistic, and Continuum Systems2.pdf;/Users/wasmer/Zotero/storage/J2HWXJKJ/2307.html}
}
 
File added
File added
fig/logos/casus.science/Logo-CASUS-color.png

130 B

fig/logos/hida/HiDA_Logo_RGB_kompakt.jpg

131 B

fig/logos/tcd.ie/Trinity-College-Dublin-Logo.png

130 B

fig/logos/tcd.ie/Trinity_Main_Logo.jpg

131 B

fig/presentation-2024-09/tmp-vimp-prediction/kkr-cu-pot-column2.png

131 B

fig/presentation-2024-09/tmp-vimp-prediction/kkr-cu-pot-wrong-colorbar.png

131 B