From b297c712816645de958dc317c01064e1175c8994 Mon Sep 17 00:00:00 2001 From: johannes wasmer <johannes.wasmer@gmail.com> Date: Fri, 17 Nov 2023 08:52:44 +0100 Subject: [PATCH] update bibliography --- bib/bibliography.bib | 4539 +++++++++++++++++++++++++++++++++++++++++- 1 file changed, 4459 insertions(+), 80 deletions(-) diff --git a/bib/bibliography.bib b/bib/bibliography.bib index 1852a0a..d6ee4f5 100644 --- a/bib/bibliography.bib +++ b/bib/bibliography.bib @@ -1,7 +1,19 @@ +@online{40YearsQuantum2020, + title = {40 Years of the Quantum {{Hall}} Effect}, + date = {2020-07-31}, + url = {https://www.nature.com/collections/fdbjbijfea}, + urldate = {2023-08-13}, + abstract = {The discovery of the quantum Hall effect in 1980 marked a turning point in condensed matter physics.}, + langid = {english}, + organization = {{Nature}}, + keywords = {/unread,2D material,ARPES,Berry phase,collection,Hall effect,Hall QHE,heterostructures,Heusler,history of science,magnetic order,magnetism,popular science,quantum materials,semimetal,strongly correlated maeterials,superconductor,TMDC,topological,topological insulator,topological phase,vdW materials,Weyl semimetal}, + file = {/Users/wasmer/Zotero/storage/SJH8NYEP/fdbjbijfea.html} +} + @article{acharMachineLearningElectron2023, title = {Machine {{Learning Electron Density Prediction Using Weighted Smooth Overlap}} of {{Atomic Positions}}}, author = {Achar, Siddarth K. and Bernasconi, Leonardo and Johnson, J. Karl}, - date = {2023-01}, + date = {2023-06-13}, journaltitle = {Nanomaterials}, volume = {13}, number = {12}, @@ -14,10 +26,49 @@ abstract = {Having access to accurate electron densities in chemical systems, especially for dynamical systems involving chemical reactions, ion transport, and other charge transfer processes, is crucial for numerous applications in materials chemistry. Traditional methods for computationally predicting electron density data for such systems include quantum mechanical (QM) techniques, such as density functional theory. However, poor scaling of these QM methods restricts their use to relatively small system sizes and short dynamic time scales. To overcome this limitation, we have developed a deep neural network machine learning formalism, which we call deep charge density prediction (DeepCDP), for predicting charge densities by only using atomic positions for molecules and condensed phase (periodic) systems. Our method uses the weighted smooth overlap of atomic positions to fingerprint environments on a grid-point basis and map it to electron density data generated from QM simulations. We trained models for bulk systems of copper, LiF, and silicon; for a molecular system, water; and for two-dimensional charged and uncharged systems, hydroxyl-functionalized graphane, with and without an added proton. We showed that DeepCDP achieves prediction R2 values greater than 0.99 and mean squared error values on the order of 10−5e2 Å−6 for most systems. DeepCDP scales linearly with system size, is highly parallelizable, and is capable of accurately predicting the excess charge in protonated hydroxyl-functionalized graphane. We demonstrate how DeepCDP can be used to accurately track the location of charges (protons) by computing electron densities at a few selected grid points in the materials, thus significantly reducing the computational cost. We also show that our models can be transferable, allowing prediction of electron densities for systems on which it has not been trained but that contain a subset of atomic species on which it has been trained. Our approach can be used to develop models that span different chemical systems and train them for the study of large-scale charge transport and chemical reactions.}, issue = {12}, langid = {english}, - keywords = {AML,BLYP,charge transfer,CP2K,DeepCDP,DNN,FCNN,GGA,grid-based descriptors,linear scaling,materials,ML,ML-DFT,ML-ESM,MLP,molecules,PBE,prediction of electron density,pseudopotential,PyTorch,SOAP,transfer learning,weighted SOAP}, + keywords = {AML,BLYP,charge transfer,copper,CP2K,DeepCDP,DNN,FCNN,GGA,grid-based descriptors,library,linear scaling,materials,MD,ML,ML-DFT,ML-ESM,MLP,molecules,NN,PBE,prediction of electron density,pseudopotential,PyTorch,silicon,SOAP,transfer learning,weighted SOAP,with-code}, file = {/Users/wasmer/Nextcloud/Zotero/Achar et al_2023_Machine Learning Electron Density Prediction Using Weighted Smooth Overlap of.pdf} } +@article{acinQuantumTechnologiesRoadmap2018, + title = {The Quantum Technologies Roadmap: A {{European}} Community View}, + shorttitle = {The Quantum Technologies Roadmap}, + author = {AcÃn, Antonio and Bloch, Immanuel and Buhrman, Harry and Calarco, Tommaso and Eichler, Christopher and Eisert, Jens and Esteve, Daniel and Gisin, Nicolas and Glaser, Steffen J. and Jelezko, Fedor and Kuhr, Stefan and Lewenstein, Maciej and Riedel, Max F. and Schmidt, Piet O. and Thew, Rob and Wallraff, Andreas and Walmsley, Ian and Wilhelm, Frank K.}, + date = {2018-08}, + journaltitle = {New Journal of Physics}, + shortjournal = {New J. Phys.}, + volume = {20}, + number = {8}, + pages = {080201}, + publisher = {{IOP Publishing}}, + issn = {1367-2630}, + doi = {10.1088/1367-2630/aad1ea}, + url = {https://dx.doi.org/10.1088/1367-2630/aad1ea}, + urldate = {2023-08-24}, + abstract = {Within the last two decades, quantum technologies (QT) have made tremendous progress, moving from Nobel Prize award-winning experiments on quantum physics (1997: Chu, Cohen-Tanoudji, Phillips; 2001: Cornell, Ketterle, Wieman; 2005: Hall, Hänsch-, Glauber; 2012: Haroche, Wineland) into a cross-disciplinary field of applied research. Technologies are being developed now that explicitly address individual quantum states and make use of the ‘strange’ quantum properties, such as superposition and entanglement. The field comprises four domains: quantum communication, where individual or entangled photons are used to transmit data in a provably secure way; quantum simulation, where well-controlled quantum systems are used to reproduce the behaviour of other, less accessible quantum systems; quantum computation, which employs quantum effects to dramatically speed up certain calculations, such as number factoring; and quantum sensing and metrology, where the high sensitivity of coherent quantum systems to external perturbations is exploited to enhance the performance of measurements of physical quantities. In Europe, the QT community has profited from several EC funded coordination projects, which, among other things, have coordinated the creation of a 150-page QT Roadmap (http://qurope.eu/h2020/qtflagship/roadmap2016). This article presents an updated summary of this roadmap.}, + langid = {english}, + keywords = {for introductions,perspective,QIT,quantum materials,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/AcÃn et al_2018_The quantum technologies roadmap.pdf} +} + +@incollection{agostiniExactFactorizationElectron2020, + title = {Exact {{Factorization}} of the {{Electron}}–{{Nuclear Wave Function}}: {{Theory}} and {{Applications}}}, + shorttitle = {Exact {{Factorization}} of the {{Electron}}–{{Nuclear Wave Function}}}, + booktitle = {Quantum {{Chemistry}} and {{Dynamics}} of {{Excited States}}}, + author = {Agostini, Federica and Gross, E. K. U.}, + date = {2020}, + pages = {531--562}, + publisher = {{John Wiley \& Sons, Ltd}}, + doi = {10.1002/9781119417774.ch17}, + url = {https://onlinelibrary.wiley.com/doi/abs/10.1002/9781119417774.ch17}, + urldate = {2023-09-21}, + abstract = {In this Chapter we review the exact factorization of the electron-nuclear wave function. The molecular wave function, solution of a time-dependent Schröodinger equation, is factored into a nuclear wave function and an electronic wave function with parametric dependence on nuclear configuration. This factorization resembles the (approximate) adiabatic product of a single Born-Oppenheimer state and a time-dependent nuclear wave packet, but it introduces a fundamental difference: both terms of the product are explicitly time-dependent. Such feature introduces new concepts of time-dependent vector potential and time-dependent potential energy surface that allow for the treatment of nonadiabatic dynamics, thus of dynamics beyond the Born-Oppenheimer approximation. The theoretical framework of the exact factorization is presented, also in connection to the more standard Born-Huang (still exact) representation of the molecular wave function. A trajectory-based approach to nonadiabatic dynamics is derived from the exact factorization. A discussion on the connection between the molecular Berry phase and the corresponding quantity arising from the exact factorization is briefly discussed.}, + isbn = {978-1-119-41777-4}, + langid = {english}, + keywords = {/unread}, + file = {/Users/wasmer/Zotero/storage/XIN3MVNJ/Agostini and Gross - 2020 - Exact Factorization of the Electron–Nuclear Wave F.pdf;/Users/wasmer/Zotero/storage/7CQVKAMY/9781119417774.html} +} + @article{aguadoMajoranaQubitsTopological2020, title = {Majorana Qubits for Topological Quantum Computing}, author = {Aguado, Ramón and Kouwenhoven, Leo P.}, @@ -138,6 +189,24 @@ file = {/Users/wasmer/Nextcloud/Zotero/Alred et al_2018_Machine learning electron density in sulfur crosslinked carbon nanotubes.pdf;/Users/wasmer/Zotero/storage/879XU9RI/S0266353817330300.html} } +@article{altlandNonstandardSymmetryClasses1997, + title = {Nonstandard Symmetry Classes in Mesoscopic Normal-Superconducting Hybrid Structures}, + author = {Altland, Alexander and Zirnbauer, Martin R.}, + date = {1997-01-01}, + journaltitle = {Physical Review B}, + shortjournal = {Phys. Rev. B}, + volume = {55}, + number = {2}, + pages = {1142--1161}, + publisher = {{American Physical Society}}, + doi = {10.1103/PhysRevB.55.1142}, + url = {https://link.aps.org/doi/10.1103/PhysRevB.55.1142}, + urldate = {2023-07-12}, + abstract = {Normal-conducting mesoscopic systems in contact with a superconductor are classified by the symmetry operations of time reversal and rotation of the electron's spin. Four symmetry classes are identified, which correspond to Cartan's symmetric spaces of type C, CI, D, and DIII. A detailed study is made of the systems where the phase shift due to Andreev reflection averages to zero along a typical semiclassical single-electron trajectory. Such systems are particularly interesting because they do not have a genuine excitation gap but support quasiparticle states close to the chemical potential. Disorder or dynamically generated chaos mixes the states and produces forms of universal level statistics different from Wigner-Dyson. For two of the four universality classes, the n-level correlation functions are calculated by the mapping on a free one-dimensional Fermi gas with a boundary. The remaining two classes are related to the Laguerre orthogonal and symplectic random-matrix ensembles. For a quantum dot with a normal-metal–superconducting geometry, the weak-localization correction to the conductance is calculated as a function of sticking probability and two perturbations breaking time-reversal symmetry and spin-rotation invariance. The universal conductance fluctuations are computed from a maximum-entropy S-matrix ensemble. They are larger by a factor of 2 than what is naively expected from the analogy with normal-conducting systems. This enhancement is explained by the doubling of the number of slow modes: owing to the coupling of particles and holes by the proximity to the superconductor, every cooperon and diffusion mode in the advanced-retarded channel entails a corresponding mode in the advanced-advanced (or retarded-retarded) channel. , This article appears in the following collection:}, + keywords = {classification,group theory,Many-body theory,physics,spin,spin-dependent,superconductor,symmetry,theory,TRS}, + file = {/Users/wasmer/Nextcloud/Zotero/Altland_Zirnbauer_1997_Nonstandard symmetry classes in mesoscopic normal-superconducting hybrid.pdf;/Users/wasmer/Zotero/storage/SPFQ9JR3/PhysRevB.55.html} +} + @article{amorosoInterplaySingleIonTwoIon2021, title = {Interplay between {{Single-Ion}} and {{Two-Ion Anisotropies}} in {{Frustrated 2D Semiconductors}} and {{Tuning}} of {{Magnetic Structures Topology}}}, author = {Amoroso, Danila and Barone, Paolo and Picozzi, Silvia}, @@ -157,6 +226,22 @@ file = {/Users/wasmer/Nextcloud/Zotero/Amoroso et al_2021_Interplay between Single-Ion and Two-Ion Anisotropies in Frustrated 2D.pdf;/Users/wasmer/Zotero/storage/FN7Y4K4H/htm.html} } +@article{anandExploitingMachineLearning2022, + title = {Exploiting {{Machine Learning}} in {{Multiscale Modelling}} of {{Materials}}}, + author = {Anand, G. and Ghosh, Swarnava and Zhang, Liwei and Anupam, Angesh and Freeman, Colin L. and Ortner, Christoph and Eisenbach, Markus and Kermode, James R.}, + date = {2022-11-28}, + journaltitle = {Journal of The Institution of Engineers (India): Series D}, + shortjournal = {J. Inst. Eng. India Ser. D}, + issn = {2250-2130}, + doi = {10.1007/s40033-022-00424-z}, + url = {https://doi.org/10.1007/s40033-022-00424-z}, + urldate = {2023-09-19}, + abstract = {Recent developments in efficient machine learning algorithms have spurred significant interest in the materials community. The inherently complex and multiscale problems in Materials Science and Engineering pose a formidable challenge. The present scenario of machine learning research in Materials Science has a clear lacunae, where efficient algorithms are being developed as a separate endeavour, while such methods are being applied as ‘black-box’ models by others. The present article aims to discuss pertinent issues related to the development and application of machine learning algorithms for various aspects of multiscale materials modelling. The authors~present an overview of machine learning of equivariant properties, machine learning-aided statistical mechanics, the incorporation of ab initio approaches in multiscale models of materials processing and application of machine learning in uncertainty quantification. In addition to the above, the applicability of Bayesian approach for multiscale modelling will be discussed. Critical issues related to the multiscale materials modelling are also discussed.}, + langid = {english}, + keywords = {/unread,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Anand et al_2022_Exploiting Machine Learning in Multiscale Modelling of Materials.pdf} +} + @article{andersenOPTIMADEAPIExchanging2021, title = {{{OPTIMADE}}, an {{API}} for Exchanging Materials Data}, author = {Andersen, Casper W. and Armiento, Rickard and Blokhin, Evgeny and Conduit, Gareth J. and Dwaraknath, Shyam and Evans, Matthew L. and Fekete, Ãdám and Gopakumar, Abhijith and Gražulis, Saulius and Merkys, Andrius and Mohamed, Fawzi and Oses, Corey and Pizzi, Giovanni and Rignanese, Gian-Marco and Scheidgen, Markus and Talirz, Leopold and Toher, Cormac and Winston, Donald and Aversa, Rossella and Choudhary, Kamal and Colinet, Pauline and Curtarolo, Stefano and Di Stefano, Davide and Draxl, Claudia and Er, Suleyman and Esters, Marco and Fornari, Marco and Giantomassi, Matteo and Govoni, Marco and Hautier, Geoffroy and Hegde, Vinay and Horton, Matthew K. and Huck, Patrick and Huhs, Georg and Hummelshøj, Jens and Kariryaa, Ankit and Kozinsky, Boris and Kumbhar, Snehal and Liu, Mohan and Marzari, Nicola and Morris, Andrew J. and Mostofi, Arash and Persson, Kristin A. and Petretto, Guido and Purcell, Thomas and Ricci, Francesco and Rose, Frisco and Scheffler, Matthias and Speckhard, Daniel and Uhrin, Martin and Vaitkus, Antanas and Villars, Pierre and Waroquiers, David and Wolverton, Chris and Wu, Michael and Yang, Xiaoyu}, @@ -194,6 +279,44 @@ file = {/Users/wasmer/Nextcloud/Zotero/Anderson et al_2019_Cormorant.pdf;/Users/wasmer/Zotero/storage/RY359LWP/1906.html} } +@article{andraeGlobalElectricityUsage2015, + title = {On {{Global Electricity Usage}} of {{Communication Technology}}: {{Trends}} to 2030}, + shorttitle = {On {{Global Electricity Usage}} of {{Communication Technology}}}, + author = {Andrae, Anders S. G. and Edler, Tomas}, + date = {2015-06}, + journaltitle = {Challenges}, + volume = {6}, + number = {1}, + pages = {117--157}, + publisher = {{Multidisciplinary Digital Publishing Institute}}, + issn = {2078-1547}, + doi = {10.3390/challe6010117}, + url = {https://www.mdpi.com/2078-1547/6/1/117}, + urldate = {2023-08-30}, + abstract = {This work presents an estimation of the global electricity usage that can be ascribed to Communication Technology (CT) between 2010 and 2030. The scope is three scenarios for use and production of consumer devices, communication networks and data centers. Three different scenarios, best, expected, and worst, are set up, which include annual numbers of sold devices, data traffic and electricity intensities/efficiencies. The most significant trend, regardless of scenario, is that the proportion of use-stage electricity by consumer devices will decrease and will be transferred to the networks and data centers. Still, it seems like wireless access networks will not be the main driver for electricity use. The analysis shows that for the worst-case scenario, CT could use as much as 51\% of global electricity in 2030. This will happen if not enough improvement in electricity efficiency of wireless access networks and fixed access networks/data centers is possible. However, until 2030, globally-generated renewable electricity is likely to exceed the electricity demand of all networks and data centers. Nevertheless, the present investigation suggests, for the worst-case scenario, that CT electricity usage could contribute up to 23\% of the globally released greenhouse gas emissions in 2030.}, + issue = {1}, + langid = {english}, + keywords = {ecological footprint,economics,energy consumption,energy efficiency,environmental impact,for introductions,ICT sector,low-power electronics,world energy consumption}, + file = {/Users/wasmer/Nextcloud/Zotero/Andrae_Edler_2015_On Global Electricity Usage of Communication Technology.pdf} +} + +@article{andraeHypothesesPrimaryEnergy2020, + title = {Hypotheses for {{Primary Energy Use}}, {{Electricity Use}} and {{CΟ2 Emissions}} of {{Global Computing}} and {{Its Shares}} of the {{Total Between}} 2020 and 2030}, + author = {Andrae, Anders S. G.}, + date = {2020}, + journaltitle = {WSEAS Transactions on Power Systems}, + volume = {15}, + pages = {50--59}, + publisher = {{WSEAS}}, + doi = {10.37394/232016.2020.15.6}, + url = {https://www.wseas.com/journals/articles.php?id=1152}, + urldate = {2023-08-30}, + abstract = {There is no doubt that the economic and computing activity related to the digital sector will ramp up faster in the present decade than in the last. Moreover, computing infrastructure is one of three major drivers of new electricity use alongsidefuture and current hydrogen production and battery electric vehicles charging. Here is proposed a trajectory in this decade for CO2 emissions associated with this digitalization and its share of electricity and energy generation as a whole. The roadmap for major sources of primary energy and electricity and associated CO2 emissions areprojected and connected to the probable power use of the digital industry. The truncation error for manufacturing related CO2 emissions may be 0.8 Gt or more indicating a larger share of manufacturing and absolute digital CO2 emissions.While remaining at a moderate share of global CO2 emissions (4-5\%), the resulting digital CO2 emissions will likely rise from 2020 to 2030. The opposite may only happen if the electricity used to run especially data centers and production plants is produced locally (next to the data centers and plants) from renewable sources and data intensity metrics grow slower than expected.}, + langid = {english}, + keywords = {/unread,ecological footprint,economics,energy consumption,energy efficiency,environmental impact,for introductions,ICT sector,low-power electronics,world energy consumption}, + file = {/Users/wasmer/Nextcloud/Zotero/Andrae_2020_Hypotheses for Primary Energy Use, Electricity Use and CΟ2 Emissions of Global.pdf} +} + @article{andrejevicMachineLearningSpectralIndicators2022, title = {Machine-{{Learning Spectral Indicators}} of {{Topology}}}, author = {Andrejevic, Nina and Andrejevic, Jovana and Bernevig, B. Andrei and Regnault, Nicolas and Han, Fei and Fabbris, Gilberto and Nguyen, Thanh and Drucker, Nathan C. and Rycroft, Chris H. and Li, Mingda}, @@ -228,6 +351,45 @@ file = {/Users/wasmer/Nextcloud/Zotero/Angelopoulos et al_2023_Prediction-Powered Inference.pdf;/Users/wasmer/Zotero/storage/VUQUZZ32/2301.html} } +@article{annevelinkAutoMatAutomatedMaterials2022, + title = {{{AutoMat}}: {{Automated}} Materials Discovery for Electrochemical Systems}, + shorttitle = {{{AutoMat}}}, + author = {Annevelink, Emil and Kurchin, Rachel and Muckley, Eric and Kavalsky, Lance and Hegde, Vinay I. and Sulzer, Valentin and Zhu, Shang and Pu, Jiankun and Farina, David and Johnson, Matthew and Gandhi, Dhairya and Dave, Adarsh and Lin, Hongyi and Edelman, Alan and Ramsundar, Bharath and Saal, James and Rackauckas, Christopher and Shah, Viral and Meredig, Bryce and Viswanathan, Venkatasubramanian}, + date = {2022-10-01}, + journaltitle = {MRS Bulletin}, + shortjournal = {MRS Bulletin}, + volume = {47}, + number = {10}, + pages = {1036--1044}, + issn = {1938-1425}, + doi = {10.1557/s43577-022-00424-0}, + url = {https://doi.org/10.1557/s43577-022-00424-0}, + urldate = {2023-08-19}, + abstract = {Large-scale electrification is vital to addressing the climate crisis, but several scientific and technological challenges remain to fully electrify both the chemical industry and transportation. In both of these areas, new electrochemical materials will be critical, but their development currently relies heavily on human-time-intensive experimental trial and error and computationally expensive first-principles, mesoscale, and continuum simulations. We present an automated workflow, AutoMat,~which accelerates these computational steps by introducing both automated input generation and management of simulations across scales from first principles to continuum device modeling. Furthermore, we show how to seamlessly integrate multi-fidelity predictions, such as machine learning surrogates or automated robotic experiments “in-the-loop.†The automated framework is implemented with design space search techniques to dramatically accelerate the overall materials discovery pipeline by implicitly learning design features that optimize device performance across several metrics. We discuss the benefits of AutoMat using examples in electrocatalysis and energy storage and highlight lessons learned.}, + langid = {english}, + keywords = {active learning,AML,chemical reaction,Citrine Informatics,closed-loop,electrochemistry,industrial application,materials discovery,ML,robotics,self-driving lab,surrogate model,uncertainty quantification}, + file = {/Users/wasmer/Nextcloud/Zotero/Annevelink et al_2022_AutoMat.pdf} +} + +@article{anstineGenerativeModelsEmerging2023, + title = {Generative {{Models}} as an {{Emerging Paradigm}} in the {{Chemical Sciences}}}, + author = {Anstine, Dylan M. and Isayev, Olexandr}, + date = {2023-04-26}, + journaltitle = {Journal of the American Chemical Society}, + shortjournal = {J. Am. Chem. Soc.}, + volume = {145}, + number = {16}, + pages = {8736--8750}, + publisher = {{American Chemical Society}}, + issn = {0002-7863}, + doi = {10.1021/jacs.2c13467}, + url = {https://doi.org/10.1021/jacs.2c13467}, + urldate = {2023-08-22}, + abstract = {Traditional computational approaches to design chemical species are limited by the need to compute properties for a vast number of candidates, e.g., by discriminative modeling. Therefore, inverse design methods aim to start from the desired property and optimize a corresponding chemical structure. From a machine learning viewpoint, the inverse design problem can be addressed through so-called generative modeling. Mathematically, discriminative models are defined by learning the probability distribution function of properties given the molecular or material structure. In contrast, a generative model seeks to exploit the joint probability of a chemical species with target characteristics. The overarching idea of generative modeling is to implement a system that produces novel compounds that are expected to have a desired set of chemical features, effectively sidestepping issues found in the forward design process. In this contribution, we overview and critically analyze popular generative algorithms like generative adversarial networks, variational autoencoders, flow, and diffusion models. We highlight key differences between each of the models, provide insights into recent success stories, and discuss outstanding challenges for realizing generative modeling discovered solutions in chemical applications.}, + keywords = {AML,chemistry,generative models,ML,organic chemistry,todo-tagging}, + file = {/Users/wasmer/Zotero/storage/EEZZ6NRF/Anstine and Isayev - 2023 - Generative Models as an Emerging Paradigm in the C.pdf} +} + @unpublished{antogninisilvaMaterialsQuantumComputing2023, title = {Materials for Quantum Computing : {{Magnetic}} Impurities Embedded in Superconductors from First Principles}, author = {Antognini Silva, David and Rüßmann, Philipp and Blügel, Stefan}, @@ -241,6 +403,83 @@ file = {/Users/wasmer/Zotero/storage/83GMAQZZ/AbstractList.html} } +@online{antunesCrystalStructureGeneration2023, + title = {Crystal {{Structure Generation}} with {{Autoregressive Large Language Modeling}}}, + author = {Antunes, Luis M. and Butler, Keith T. and Grau-Crespo, Ricardo}, + date = {2023-07-10}, + eprint = {2307.04340}, + eprinttype = {arxiv}, + eprintclass = {cond-mat}, + doi = {10.48550/arXiv.2307.04340}, + url = {http://arxiv.org/abs/2307.04340}, + urldate = {2023-07-12}, + abstract = {The generation of plausible crystal structures is often an important step in the computational prediction of crystal structures from composition. Here, we introduce a methodology for crystal structure generation involving autoregressive large language modeling of the Crystallographic Information File (CIF) format. Our model, CrystaLLM, is trained on a comprehensive dataset of millions of CIF files, and is capable of reliably generating correct CIF syntax and plausible crystal structures for many classes of inorganic compounds. Moreover, we provide general and open access to the model by deploying it as a web application, available to anyone over the internet. Our results indicate that the model promises to be a reliable and efficient tool for both crystallography and materials informatics.}, + pubstate = {preprint}, + keywords = {AML,autoregressive,CIF,crystal structure,LLM,materials,ML,NOMAD,OQMD,PBE,prediction of structure,pretrained models,transformer,VASP,with-code,with-demo}, + file = {/Users/wasmer/Nextcloud/Zotero/Antunes et al_2023_Crystal Structure Generation with Autoregressive Large Language Modeling.pdf;/Users/wasmer/Zotero/storage/VMYV5IJR/2307.html} +} + +@article{antunesDistributedRepresentationsAtoms2022, + title = {Distributed Representations of Atoms and Materials for Machine Learning}, + author = {Antunes, Luis M. and Grau-Crespo, Ricardo and Butler, Keith T.}, + date = {2022-03-18}, + journaltitle = {npj Computational Materials}, + shortjournal = {npj Comput Mater}, + volume = {8}, + number = {1}, + pages = {1--9}, + publisher = {{Nature Publishing Group}}, + issn = {2057-3960}, + doi = {10.1038/s41524-022-00729-3}, + url = {https://www.nature.com/articles/s41524-022-00729-3}, + urldate = {2023-07-12}, + abstract = {The use of machine learning is becoming increasingly common in computational materials science. To build effective models of the chemistry of materials, useful machine-based representations of atoms and their compounds are required. We derive distributed representations of compounds from their chemical formulas only, via pooling operations of distributed representations of atoms. These compound representations are evaluated on ten different tasks, such as the prediction of formation energy and band gap, and are found to be competitive with existing benchmarks that make use of structure, and even superior in cases where only composition is available. Finally, we introduce an approach for learning distributed representations of atoms, named SkipAtom, which makes use of the growing information in materials structure databases.}, + issue = {1}, + langid = {english}, + keywords = {/unread,AML,compositional descriptors,descriptors,embedding,library,ML,unsupervised learning,with-code,Word2Vec}, + file = {/Users/wasmer/Zotero/storage/ZEAJ22J7/Antunes et al. - 2022 - Distributed representations of atoms and materials.pdf} +} + +@inproceedings{APSAPSMarch, + title = {{{APS}} -{{APS March Meeting}} 2020 - {{Event}} - {{MuST}}: {{An}} Integrated Ab Initio Framework for the Study of Disordered Structures}, + shorttitle = {{{APS}} -{{APS March Meeting}} 2020 - {{Event}} - {{MuST}}}, + booktitle = {Bulletin of the {{American Physical Society}}}, + volume = {Volume 65, Number 1}, + publisher = {{American Physical Society}}, + url = {https://meetings.aps.org/Meeting/MAR20/Session/L43.3}, + urldate = {2023-09-19}, + eventtitle = {{{APS March Meeting}} 2020}, + keywords = {/unread,todo-tagging}, + file = {/Users/wasmer/Zotero/storage/IZRLCKN8/L43.html} +} + +@inproceedings{APSAPSMarch2020, + title = {{{APS}} -{{APS March Meeting}} 2020 - {{Event}} - {{From LSMS}} to {{MuST}}: {{Large}} Scale First Principles Materials Calculations at the Exascale}, + shorttitle = {{{APS}} -{{APS March Meeting}} 2020 - {{Event}} - {{From LSMS}} to {{MuST}}}, + booktitle = {Bulletin of the {{American Physical Society}}}, + date = {2020-03}, + volume = {Volume 65, Number 1}, + publisher = {{American Physical Society}}, + url = {https://meetings.aps.org/Meeting/MAR20/Session/F40.7}, + urldate = {2023-09-19}, + eventtitle = {{{APS March Meeting}} 2020}, + keywords = {/unread,KKR,todo-tagging}, + file = {/Users/wasmer/Zotero/storage/QV67BDU6/F40.html} +} + +@inproceedings{APSAPSMarch2020a, + title = {{{APS}} -{{APS March Meeting}} 2020 - {{Event}} - {{Machine Learning}} the {{Effective Hamiltonian}} in {{High Entropy Alloys}} with {{Large DFT Datasets}}}, + booktitle = {Bulletin of the {{American Physical Society}}}, + date = {2020-03}, + volume = {Volume 65, Number 1}, + publisher = {{American Physical Society}}, + url = {https://meetings.aps.org/Meeting/MAR20/Session/S45.9}, + urldate = {2023-09-19}, + eventtitle = {{{APS March Meeting}} 2020}, + keywords = {/unread,todo-tagging}, + file = {/Users/wasmer/Zotero/storage/SJ7GXD99/S45.html} +} + @article{artrithBestPracticesMachine2021, title = {Best Practices in Machine Learning for Chemistry}, author = {Artrith, Nongnuch and Butler, Keith T. and Coudert, François-Xavier and Han, Seungwu and Isayev, Olexandr and Jain, Anubhav and Walsh, Aron}, @@ -279,6 +518,22 @@ file = {/Users/wasmer/Nextcloud/Zotero/Artrith et al_2017_Efficient and accurate machine-learning interpolation of atomic energies in.pdf;/Users/wasmer/Zotero/storage/77VRNTN7/Artrith et al. - 2017 - Efficient and accurate machine-learning interpolat.pdf;/Users/wasmer/Zotero/storage/RL7TSVEA/PhysRevB.96.html} } +@book{asbothShortCourseTopological2016, + title = {A {{Short Course}} on {{Topological Insulators}}}, + author = {Asbóth, János K. and Oroszlány, László and Pályi, András}, + date = {2016}, + series = {Lecture {{Notes}} in {{Physics}}}, + volume = {919}, + publisher = {{Springer International Publishing}}, + location = {{Cham}}, + doi = {10.1007/978-3-319-25607-8}, + url = {http://link.springer.com/10.1007/978-3-319-25607-8}, + urldate = {2023-10-01}, + isbn = {978-3-319-25605-4 978-3-319-25607-8}, + keywords = {/unread,educational,physics,Su-Schrieffer-Heeger model,textbook,topological,topological insulator,TRS}, + file = {/Users/wasmer/Nextcloud/Zotero/Asbóth et al_2016_A Short Course on Topological Insulators.pdf} +} + @online{AssessingDataScience2020, title = {Assessing Data Science Research via Data Science Maturity Levels: {{Patterns}}}, date = {2020-04-10}, @@ -330,6 +585,22 @@ file = {/Users/wasmer/Nextcloud/Zotero/Bac et al_2022_Topological response of the anomalous Hall effect in MnBi2Te4 due to magnetic.pdf;/Users/wasmer/Nextcloud/Zotero/Bac et al_2022_Topological response of the anomalous Hall effect in MnBi2Te4 due to magnetic2_supplementary.pdf;/Users/wasmer/Zotero/storage/E6I5UGGJ/s41535-022-00455-5.html} } +@online{bakshiLearningQuantumHamiltonians2023, + title = {Learning Quantum {{Hamiltonians}} at Any Temperature in Polynomial Time}, + author = {Bakshi, Ainesh and Liu, Allen and Moitra, Ankur and Tang, Ewin}, + date = {2023-10-03}, + eprint = {2310.02243}, + eprinttype = {arxiv}, + eprintclass = {quant-ph}, + doi = {10.48550/arXiv.2310.02243}, + url = {http://arxiv.org/abs/2310.02243}, + urldate = {2023-10-05}, + abstract = {We study the problem of learning a local quantum Hamiltonian \$H\$ given copies of its Gibbs state \$\textbackslash rho = e\^\{-\textbackslash beta H\}/\textbackslash textrm\{tr\}(e\^\{-\textbackslash beta H\})\$ at a known inverse temperature \$\textbackslash beta{$>$}0\$. Anshu, Arunachalam, Kuwahara, and Soleimanifar (arXiv:2004.07266) gave an algorithm to learn a Hamiltonian on \$n\$ qubits to precision \$\textbackslash epsilon\$ with only polynomially many copies of the Gibbs state, but which takes exponential time. Obtaining a computationally efficient algorithm has been a major open problem [Alhambra'22 (arXiv:2204.08349)], [Anshu, Arunachalam'22 (arXiv:2204.08349)], with prior work only resolving this in the limited cases of high temperature [Haah, Kothari, Tang'21 (arXiv:2108.04842)] or commuting terms [Anshu, Arunachalam, Kuwahara, Soleimanifar'21]. We fully resolve this problem, giving a polynomial time algorithm for learning \$H\$ to precision \$\textbackslash epsilon\$ from polynomially many copies of the Gibbs state at any constant \$\textbackslash beta {$>$} 0\$. Our main technical contribution is a new flat polynomial approximation to the exponential function, and a translation between multi-variate scalar polynomials and nested commutators. This enables us to formulate Hamiltonian learning as a polynomial system. We then show that solving a low-degree sum-of-squares relaxation of this polynomial system suffices to accurately learn the Hamiltonian.}, + pubstate = {preprint}, + keywords = {AML,finite-temperature,ML,ML-QM,ML-QMBP,NQS,prediction of Hamiltonian matrix}, + file = {/Users/wasmer/Nextcloud/Zotero/Bakshi et al_2023_Learning quantum Hamiltonians at any temperature in polynomial time.pdf;/Users/wasmer/Zotero/storage/BGGJUKBE/2310.html} +} + @online{balestrieroCookbookSelfSupervisedLearning2023, title = {A {{Cookbook}} of {{Self-Supervised Learning}}}, author = {Balestriero, Randall and Ibrahim, Mark and Sobal, Vlad and Morcos, Ari and Shekhar, Shashank and Goldstein, Tom and Bordes, Florian and Bardes, Adrien and Mialon, Gregoire and Tian, Yuandong and Schwarzschild, Avi and Wilson, Andrew Gordon and Geiping, Jonas and Garrido, Quentin and Fernandez, Pierre and Bar, Amir and Pirsiavash, Hamed and LeCun, Yann and Goldblum, Micah}, @@ -505,6 +776,26 @@ file = {/Users/wasmer/Nextcloud/Zotero/Basov et al_2017_Towards properties on demand in quantum materials.pdf} } +@article{basuEquiTuningGroupEquivariant2023, + title = {Equi-{{Tuning}}: {{Group Equivariant Fine-Tuning}} of {{Pretrained Models}}}, + shorttitle = {Equi-{{Tuning}}}, + author = {Basu, Sourya and Sattigeri, Prasanna and Ramamurthy, Karthikeyan Natesan and Chenthamarakshan, Vijil and Varshney, Kush R. and Varshney, Lav R. and Das, Payel}, + date = {2023-06-26}, + journaltitle = {Proceedings of the AAAI Conference on Artificial Intelligence}, + volume = {37}, + number = {6}, + pages = {6788--6796}, + issn = {2374-3468}, + doi = {10.1609/aaai.v37i6.25832}, + url = {https://ojs.aaai.org/index.php/AAAI/article/view/25832}, + urldate = {2023-10-08}, + abstract = {We introduce equi-tuning, a novel fine-tuning method that transforms (potentially non-equivariant) pretrained models into group equivariant models while incurring minimum L\_2 loss between the feature representations of the pretrained and the equivariant models. Large pretrained models can be equi-tuned for different groups to satisfy the needs of various downstream tasks. Equi-tuned models benefit from both group equivariance as an inductive bias and semantic priors from pretrained models. We provide applications of equi-tuning on three different tasks: image classification, compositional generalization in language, and fairness in natural language generation (NLG). We also provide a novel group-theoretic definition for fairness in NLG. The effectiveness of this definition is shown by testing it against a standard empirical method of fairness in NLG. We provide experimental results for equi-tuning using a variety of pretrained models: Alexnet, Resnet, VGG, and Densenet for image classification; RNNs, GRUs, and LSTMs for compositional generalization; and GPT2 for fairness in NLG. We test these models on benchmark datasets across all considered tasks to show the generality and effectiveness of the proposed method.}, + issue = {6}, + langid = {english}, + keywords = {\_tablet,/unread,benchmarking,equivariant,fine-tuning,group theory,image classification,inductive bias,natural language generation,nlp,pretrained models,symmetry}, + file = {/Users/wasmer/Nextcloud/Zotero/Basu et al_2023_Equi-Tuning.pdf} +} + @unpublished{batatiaDesignSpaceEquivariant2022, title = {The {{Design Space}} of {{E}}(3)-{{Equivariant Atom-Centered Interatomic Potentials}}}, author = {Batatia, Ilyes and Batzner, Simon and Kovács, Dávid Péter and Musaelian, Albert and Simm, Gregor N. C. and Drautz, Ralf and Ortner, Christoph and Kozinsky, Boris and Csányi, Gábor}, @@ -521,6 +812,22 @@ file = {/Users/wasmer/Nextcloud/Zotero/Batatia et al_2022_The Design Space of E(3)-Equivariant Atom-Centered Interatomic Potentials.pdf;/Users/wasmer/Zotero/storage/2FLTPTA2/2205.html} } +@online{batatiaEquivariantMatrixFunction2023, + title = {Equivariant {{Matrix Function Neural Networks}}}, + author = {Batatia, Ilyes and Schaaf, Lars L. and Chen, Huajie and Csányi, Gábor and Ortner, Christoph and Faber, Felix A.}, + date = {2023-10-16}, + eprint = {2310.10434}, + eprinttype = {arxiv}, + eprintclass = {cond-mat, physics:physics, stat}, + doi = {10.48550/arXiv.2310.10434}, + url = {http://arxiv.org/abs/2310.10434}, + urldate = {2023-11-05}, + abstract = {Graph Neural Networks (GNNs), especially message-passing neural networks (MPNNs), have emerged as powerful architectures for learning on graphs in diverse applications. However, MPNNs face challenges when modeling non-local interactions in systems such as large conjugated molecules, metals, or amorphous materials. Although Spectral GNNs and traditional neural networks such as recurrent neural networks and transformers mitigate these challenges, they often lack extensivity, adaptability, generalizability, computational efficiency, or fail to capture detailed structural relationships or symmetries in the data. To address these concerns, we introduce Matrix Function Neural Networks (MFNs), a novel architecture that parameterizes non-local interactions through analytic matrix equivariant functions. Employing resolvent expansions offers a straightforward implementation and the potential for linear scaling with system size. The MFN architecture achieves state-of-the-art performance in standard graph benchmarks, such as the ZINC and TU datasets, and is able to capture intricate non-local interactions in quantum systems, paving the way to new state-of-the-art force fields.}, + pubstate = {preprint}, + keywords = {ACE,AML,equivariant,linear scaling,long-range interaction,MACE,Metals and alloys,ML,ML-DFT,ML-ESM,MLP,MPNN,non-local interaction,prediction of Hamiltonian matrix,representation learning,spectral GNN,ZINC}, + file = {/Users/wasmer/Nextcloud/Zotero/Batatia et al_2023_Equivariant Matrix Function Neural Networks.pdf;/Users/wasmer/Zotero/storage/2D2JTAAK/2310.html} +} + @online{batatiaMACEHigherOrder2022, title = {{{MACE}}: {{Higher Order Equivariant Message Passing Neural Networks}} for {{Fast}} and {{Accurate Force Fields}}}, shorttitle = {{{MACE}}}, @@ -574,6 +881,46 @@ file = {/Users/wasmer/Nextcloud/Zotero/Batra et al_2019_General Atomic Neighborhood Fingerprint for Machine Learning-Based Methods.pdf;/Users/wasmer/Zotero/storage/4XDSVIN6/acs.jpcc.html} } +@article{batraPhysicsCoffeeDoughnuts2020, + title = {Physics with {{Coffee}} and {{Doughnuts}}}, + author = {Batra, Navketan and Sheet, Goutam}, + date = {2020-06-01}, + journaltitle = {Resonance}, + shortjournal = {Reson}, + volume = {25}, + number = {6}, + pages = {765--786}, + issn = {0973-712X}, + doi = {10.1007/s12045-020-0995-x}, + url = {https://doi.org/10.1007/s12045-020-0995-x}, + urldate = {2023-10-01}, + abstract = {Topological insulators are a new class of materials that have attracted significant attention in contemporary condensed matter physics. They are different from regular insulators, and they display novel quantum properties that involve the idea of ‘topology’, an area of mathematics. Some of the fundamental concepts behind topological insulators, particularly in low-dimensional condensed matter systems such as poly-acetylene chains, can be understood using a simple one-dimensional toy model popularly known as the Su-Schrieffer-Heeger (SSH) model. This model can also be used as an introduction to the topological insulators of higher dimensions. Here, we give a concise description of the SSH model along with a brief review of the background physics and attempt to understand the ideas of topological invariants, edge states, and bulk-boundary correspondence using the model.}, + langid = {english}, + keywords = {\_tablet,1D,Berry phase,condensed matter,educational,learning material,physics,Su-Schrieffer-Heeger model,TB,topological,topological insulator,tutorial}, + file = {/Users/wasmer/Nextcloud/Zotero/Batra_Sheet_2020_Physics with Coffee and Doughnuts.pdf} +} + +@article{batznerAdvancingMolecularSimulation2023, + title = {Advancing Molecular Simulation with Equivariant Interatomic Potentials}, + author = {Batzner, Simon and Musaelian, Albert and Kozinsky, Boris}, + date = {2023-08}, + journaltitle = {Nature Reviews Physics}, + shortjournal = {Nat Rev Phys}, + volume = {5}, + number = {8}, + pages = {437--438}, + publisher = {{Nature Publishing Group}}, + issn = {2522-5820}, + doi = {10.1038/s42254-023-00615-x}, + url = {https://www.nature.com/articles/s42254-023-00615-x}, + urldate = {2023-09-04}, + abstract = {Deep learning has the potential to accelerate atomistic simulations, but existing models suffer from a lack of robustness, sample efficiency, and accuracy. Simon Batzner, Albert Musaelian, and Boris Kozinsky outline how exploiting the symmetry of Euclidean space offers a new way to address these challenges.}, + issue = {8}, + langid = {english}, + keywords = {AML,biomolecules,criticism,critique of GNN,GNN,large-scale simulation,ML,MLP,MPNN,scaling,symmetry}, + file = {/Users/wasmer/Nextcloud/Zotero/Batzner et al_2023_Advancing molecular simulation with equivariant interatomic potentials.pdf} +} + @unpublished{batznerEquivariantGraphNeural2021, title = {E(3)-{{Equivariant Graph Neural Networks}} for {{Data-Efficient}} and {{Accurate Interatomic Potentials}}}, author = {Batzner, Simon and Musaelian, Albert and Sun, Lixin and Geiger, Mario and Mailoa, Jonathan P. and Kornbluth, Mordechai and Molinari, Nicola and Smidt, Tess E. and Kozinsky, Boris}, @@ -711,6 +1058,26 @@ file = {/Users/wasmer/Nextcloud/Zotero/Behler_2016_Perspective.pdf} } +@article{belotMachineLearningPredictions2023, + title = {Machine {{Learning Predictions}} of {{High-Curie-Temperature Materials}}}, + author = {Belot, Joshua F. and Taufour, Valentin and Sanvito, Stefano and Hart, Gus L. W.}, + date = {2023-07-24}, + journaltitle = {Applied Physics Letters}, + volume = {123}, + number = {4}, + eprint = {2307.06879}, + eprinttype = {arxiv}, + eprintclass = {cond-mat}, + pages = {042405}, + issn = {0003-6951, 1077-3118}, + doi = {10.1063/5.0156377}, + url = {http://arxiv.org/abs/2307.06879}, + urldate = {2023-08-19}, + abstract = {Technologies that function at room temperature often require magnets with a high Curie temperature, \$T\_\textbackslash mathrm\{C\}\$, and can be improved with better materials. Discovering magnetic materials with a substantial \$T\_\textbackslash mathrm\{C\}\$ is challenging because of the large number of candidates and the cost of fabricating and testing them. Using the two largest known data sets of experimental Curie temperatures, we develop machine-learning models to make rapid \$T\_\textbackslash mathrm\{C\}\$ predictions solely based on the chemical composition of a material. We train a random forest model and a \$k\$-NN one and predict on an initial dataset of over 2,500 materials and then validate the model on a new dataset containing over 3,000 entries. The accuracy is compared for multiple compounds' representations ("descriptors") and regression approaches. A random forest model provides the most accurate predictions and is not improved by dimensionality reduction or by using more complex descriptors based on atomic properties. A random forest model trained on a combination of both datasets shows that cobalt-rich and iron-rich materials have the highest Curie temperatures for all binary and ternary compounds. An analysis of the model reveals systematic error that causes the model to over-predict low-\$T\_\textbackslash mathrm\{C\}\$ materials and under-predict high-\$T\_\textbackslash mathrm\{C\}\$ materials. For exhaustive searches to find new high-\$T\_\textbackslash mathrm\{C\}\$ materials, analysis of the learning rate suggests either that much more data is needed or that more efficient descriptors are necessary.}, + keywords = {todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Belot et al_2023_Machine Learning Predictions of High-Curie-Temperature Materials.pdf;/Users/wasmer/Zotero/storage/R2ZPPEBA/2307.html} +} + @article{benderEvaluationGuidelinesMachine2022, title = {Evaluation Guidelines for Machine Learning Tools in the Chemical Sciences}, author = {Bender, Andreas and Schneider, Nadine and Segler, Marwin and Patrick Walters, W. and Engkvist, Ola and Rodrigues, Tiago}, @@ -878,14 +1245,14 @@ type = {Video}, title = {Plenary: {{The}} Fifth Paradigm of Scientific Discovery}, shorttitle = {Plenary}, - author = {Bishop, Christopher Michael and Welling, Max and LLorens, Ashley}, + author = {Bishop, Christopher Michael and Welling, Max and Llorens, Ashley}, date = {2022-10-17}, url = {https://www.microsoft.com/en-us/research/video/plenary-the-fifth-paradigm-of-scientific-discovery/}, urldate = {2023-01-16}, abstract = {Chris Bishop, technical fellow and director of Microsoft Research AI4Science joins colleagues and collaborators across Microsoft Research to discuss how deep learning is set to have a transformational impact on the sciences – including potential applications for drug discovery and materials design. Learn more about the 2022 Microsoft Research Summit […]}, langid = {american}, organization = {{Microsoft Research Summit 2022}}, - keywords = {/unread,AML,emulator,fifth paradigm,geometric deep learning,Microsoft Research,ML,surrogate model}, + keywords = {/unread,AML,emulator,fifth paradigm,general ML,geometric deep learning,Microsoft Research,ML,surrogate model}, file = {/Users/wasmer/Zotero/storage/IJ8MX5EV/plenary-the-fifth-paradigm-of-scientific-discovery.html} } @@ -899,6 +1266,24 @@ keywords = {/unread,AML,literature analysis,ML,popular science} } +@inproceedings{blanchardComputationalWorkflowAccelerated2022, + title = {Computational {{Workflow}} for~{{Accelerated Molecular Design Using Quantum Chemical Simulations}} and~{{Deep Learning Models}}}, + booktitle = {Accelerating {{Science}} and {{Engineering Discoveries Through Integrated Research Infrastructure}} for {{Experiment}}, {{Big Data}}, {{Modeling}} and {{Simulation}}}, + author = {Blanchard, Andrew E. and Zhang, Pei and Bhowmik, Debsindhu and Mehta, Kshitij and Gounley, John and Reeve, Samuel Temple and Irle, Stephan and Pasini, Massimiliano Lupo}, + editor = {Doug, Kothe and Al, Geist and Pophale, Swaroop and Liu, Hong and Parete-Koon, Suzanne}, + date = {2022}, + series = {Communications in {{Computer}} and {{Information Science}}}, + pages = {3--19}, + publisher = {{Springer Nature Switzerland}}, + location = {{Cham}}, + doi = {10.1007/978-3-031-23606-8_1}, + abstract = {Efficient methods for searching the chemical space of molecular compounds are needed to automate and accelerate the design of new functional molecules such as pharmaceuticals. Given the high cost in both resources and time for experimental efforts, computational approaches play a key role in guiding the selection of promising molecules for further investigation. Here, we construct a workflow to accelerate design by combining approximate quantum chemical methods [i.e. density-functional tight-binding (DFTB)], a graph convolutional neural network (GCNN) surrogate model for chemical property prediction, and a masked language model (MLM) for molecule generation. Property data from the DFTB calculations are used to train the surrogate model; the surrogate model is used to score candidates generated by the MLM. The surrogate reduces computation time by orders of magnitude compared to the DFTB calculations, enabling an increased search of chemical space. Furthermore, the MLM generates a diverse set of chemical modifications based on pre-training from a large compound library. We utilize the workflow to search for near-infrared photoactive molecules by minimizing the predicted HOMO-LUMO gap as the target property. Our results show that the workflow can generate optimized molecules outside of the original training set, which suggests that iterations of the workflow could be useful for searching vast chemical spaces in a wide range of design problems.}, + isbn = {978-3-031-23606-8}, + langid = {english}, + keywords = {/unread,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Blanchard et al_2022_Computational Workflow for Accelerated Molecular Design Using Quantum Chemical.pdf} +} + @article{blankNeuralNetworkModels1995, title = {Neural Network Models of Potential Energy Surfaces}, author = {Blank, Thomas B. and Brown, Steven D. and Calhoun, August W. and Doren, Douglas J.}, @@ -972,7 +1357,7 @@ abstract = {Condensed matter physics is currently undergoing a revolution through the introduction of concepts arising from topology that are used to characterize physical states, fields and properties from a completely different perspective. With the introduction of topology, the perspective is changed from describing complex systems in terms of local order parameters to a characterization by global quantities, which are measured nonlocally and which endow the systems with a global stability to perturbations. Prominent examples are topological insulators, skyrmions and Majorana fermions. Since topology translates into quantization, and topological order to entanglement, this ongoing revolution has impact on fields like mathematics, materials science, nanoelectronics and quantum information resulting in new device concepts enabling computations without dissipation of energy or enabling the possibility of realizing platforms for topological quantum computation, and ultimately reaching out into applications. Thus, these new exciting scientific developments and their applications are closely related to the grand challenges in information and communication technology and energy saving. Topology is the branch of mathematics that deals with properties of spaces that are invariant under smooth deformations. It provides newly appreciated mathematical tools in condensed matter physics that are currently revolutionizing the field of quantum matter and materials. Topology dictates that if two different Hamiltonians can be smoothly deformed into each other they give rise to many common physical properties and their states are homotopy invariant. Thus, topological invariance, which is often protected by discrete symmetries, provides some robustness that translates into the quantization of properties; such a robust quantization motivates the search and discovery of new topological matter. So far, the mainstream of modern topological condensed matter physics relies on two profoundly different scenarios: the emergence of the complex topology either in real space, as manifested e.g. in non-trivial magnetic structures or in momentum space, finding its realization in such materials as topological and Chern insulators. The latter renowned class of solids attracted considerable attention in recent years owing to its fascinating properties of spin-momentum locking, emergence of topologically protected surface/edge states governed by Dirac physics, as well as the quantization of Hall conductance and the discovery of the quantum spin Hall effect. Historically, the discovery of topological insulators gave rise to the discovery of a whole plethora of topologically non-trivial materials such asWeyl semimetals or topological superconductors, relevant in the context of the realization of Majorana fermions and topological quantum computation. [...]}, eventtitle = {Lecture {{Notes}} of the 48th {{IFF Spring School}} 2017}, isbn = {978-3-95806-202-3}, - keywords = {Berry phase,Chern insulator,Chern number,DFT,FZJ,Hall AHE,Hall effect,Hall QAHE,Hall QHE,Hall QSHE,Heisenberg model,IFF,IFF spring school,learning material,magnetic interactions,magnetic materials,magnetic topological materials,Majorana,MZM,PGI,PGI-1/IAS-1,PGI-9,quantum computing,review,skyrmions,spin-dependent,topological,topological insulator,tutorial}, + keywords = {\_tablet,Berry phase,Chern insulator,Chern number,DFT,FZJ,Hall AHE,Hall effect,Hall QAHE,Hall QHE,Hall QSHE,Heisenberg model,IFF,IFF spring school,learning material,magnetic interactions,magnetic materials,magnetic topological materials,Majorana,MZM,PGI,PGI-1/IAS-1,PGI-9,quantum computing,review,skyrmions,spin-dependent,topological,topological insulator,tutorial}, file = {/Users/wasmer/Nextcloud/Zotero/Blügel et al_2017_Topological Matter - Topological Insulators, Skyrmions and Majoranas.pdf} } @@ -1017,7 +1402,7 @@ abstract = {An understanding of the quantum mechanical nature of magnetism has led to the development of new magnetic materials which are used as permanent magnets, sensors, and information storage. Behind these practical applications lie a range of fundamental ideas, including symmetry breaking, order parameters, excitations, frustration, and reduced dimensionality. This superb new textbook presents a logical account of these ideas, staring from basic concepts in electromagnetsim and quantum mechanics. It outlines the origin of magnetic moments in atoms and how these moments can be affected by their local environment inside a crystal. The different types of interactions which can be present between magnetic moments are described. The final chapters of the book are devoted to the magnetic properties of metals, and to the complex behaviour which can occur when competing magnetic interactions are present and/or the system has a reduced dimensionality. Throughout the text, the theoretical principles are applied to real systems. There is substantial discussion of experimental techniques and current research topics.; The book is copiously illustrated and contains detailed appendices which cover the fundamental principles.}, isbn = {9780585483603 9781280375132 9780191586644 9786610375134 9780198505921}, langid = {english}, - keywords = {condensed matter,magnetism,textbook,undergraduate}, + keywords = {\_tablet,condensed matter,magnetism,textbook,undergraduate}, annotation = {OCLC: 53956469}, file = {/Users/wasmer/Nextcloud/Zotero/Blundell_2001_Magnetism in condensed matter.pdf} } @@ -1151,10 +1536,27 @@ urldate = {2023-04-14}, abstract = {AI is undergoing a paradigm shift with the rise of models (e.g., BERT, DALL-E, GPT-3) that are trained on broad data at scale and are adaptable to a wide range of downstream tasks. We call these models foundation models to underscore their critically central yet incomplete character. This report provides a thorough account of the opportunities and risks of foundation models, ranging from their capabilities (e.g., language, vision, robotics, reasoning, human interaction) and technical principles(e.g., model architectures, training procedures, data, systems, security, evaluation, theory) to their applications (e.g., law, healthcare, education) and societal impact (e.g., inequity, misuse, economic and environmental impact, legal and ethical considerations). Though foundation models are based on standard deep learning and transfer learning, their scale results in new emergent capabilities,and their effectiveness across so many tasks incentivizes homogenization. Homogenization provides powerful leverage but demands caution, as the defects of the foundation model are inherited by all the adapted models downstream. Despite the impending widespread deployment of foundation models, we currently lack a clear understanding of how they work, when they fail, and what they are even capable of due to their emergent properties. To tackle these questions, we believe much of the critical research on foundation models will require deep interdisciplinary collaboration commensurate with their fundamentally sociotechnical nature.}, pubstate = {preprint}, - keywords = {/unread,few-shot learning,foundation models,General ML,ML,transfer learning,transformer,zero-shot learning}, + keywords = {few-shot learning,foundation models,General ML,ML,transfer learning,transformer,zero-shot learning}, file = {/Users/wasmer/Nextcloud/Zotero/Bommasani et al_2022_On the Opportunities and Risks of Foundation Models.pdf;/Users/wasmer/Zotero/storage/72DPHWW4/2108.html} } +@online{bondesanHintonsYourNeural2021, + title = {The {{Hintons}} in Your {{Neural Network}}: A {{Quantum Field Theory View}} of {{Deep Learning}}}, + shorttitle = {The {{Hintons}} in Your {{Neural Network}}}, + author = {Bondesan, Roberto and Welling, Max}, + date = {2021-03-08}, + eprint = {2103.04913}, + eprinttype = {arxiv}, + eprintclass = {quant-ph}, + doi = {10.48550/arXiv.2103.04913}, + url = {http://arxiv.org/abs/2103.04913}, + urldate = {2023-08-22}, + abstract = {In this work we develop a quantum field theory formalism for deep learning, where input signals are encoded in Gaussian states, a generalization of Gaussian processes which encode the agent's uncertainty about the input signal. We show how to represent linear and non-linear layers as unitary quantum gates, and interpret the fundamental excitations of the quantum model as particles, dubbed ``Hintons''. On top of opening a new perspective and techniques for studying neural networks, the quantum formulation is well suited for optical quantum computing, and provides quantum deformations of neural networks that can be run efficiently on those devices. Finally, we discuss a semi-classical limit of the quantum deformed models which is amenable to classical simulation.}, + pubstate = {preprint}, + keywords = {/unread,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Bondesan_Welling_2021_The Hintons in your Neural Network.pdf;/Users/wasmer/Zotero/storage/E2RNIICV/2103.html} +} + @article{borchaniSurveyMultioutputRegression2015, title = {A Survey on Multi-Output Regression}, author = {Borchani, Hanen and Varando, Gherardo and Bielza, Concha and Larrañaga, Pedro}, @@ -1230,7 +1632,7 @@ abstract = {This thesis provides a theoretical description of magnetic nanostructures in inversion-asymmetric environments with strong spin-orbit interaction (SOI). The theoretical concepts introduced here can be applied in the field of spin-orbitronics, which consists ofexploiting the SOI to manipulate the electron spin without external magnetic fields. The investigated systems display a plethora of interesting phenomena ranging from chiral magnetic interactions to gapped magnetic excitations. In practice, we adopt two different approaches: First, a model-based one relying on the Rashba Hamiltonian, which is employed to demystify and understand magnetic and transport properties of magnetic nanostructures embedded in a Rashba electron gas. Second, we use a first-principles approach within the framework of the Korringa-Kohn-Rostoker (KKR) Green function method to investigate the ground state properties of magnetic impurities in topologically insulating hosts. This method is suitable to simulate nanostructures in real space. Then, we employed our newly developed code based on time-dependent density functional theory to compute the spin excitation spectra of these magnetic nanostructures embedded in topological insulators. Moreover, the KKR Green function method was used to simulate the electronic structure and ground state properties of large magnetic nanostructures, namely magnetic Skyrmions. In the first part, the analytical Rashba Green function and the scattering matrices modeling the magnetic impurities in the s-wave approximation are employed for the computation of the magnetic interaction tensor which contains: isotropic exchange, Dzyaloshinskii-Moriya (DM) and pseudo-dipolar interactions. The competition between these interactions leads to a rich phase diagram depending on the distance between the magnetic impurities. Next, we consider an external perturbing electric field and investigate the transport properties by computing the residual resistivity tensor within linear response theory. The contribution of SOI is explored. The investigation of arbitrary orientations of the impurity magnetic moment allowed a detailed analysis of contributions from the anisotropic magnetoresistance and planar Hall effect. Moreover, we calculate the impurity induced bound currents in the Rashba electron gas, which are used to compute the induced orbital magnetization. For a trimer of impurities with a non-vanishing spin chirality (SC) a finite orbital magnetization is observed when SOI is turned off. Since it emerges from the SC, it was named chiral orbital magnetization. [...] Bouaziz, Juba}, isbn = {9783958064294}, langid = {english}, - keywords = {Hall QHE,Hall QSHE,juKKR,KKR,PGI-1/IAS-1,skyrmions,thesis,topological insulator}, + keywords = {\_tablet,Hall QHE,Hall QSHE,juKKR,KKR,PGI-1/IAS-1,skyrmions,thesis,topological insulator}, file = {/Users/wasmer/Nextcloud/Zotero/Bouaziz_2019_Spin-orbitronics at the nanoscale.pdf;/Users/wasmer/Zotero/storage/YM28TKHA/865993.html} } @@ -1253,6 +1655,23 @@ file = {/Users/wasmer/Nextcloud/Zotero/Brack et al_2022_Ten simple rules for making a software tool workflow-ready.pdf;/Users/wasmer/Zotero/storage/QPHMATCQ/article.html} } +@online{branChemCrowAugmentingLargelanguage2023, + title = {{{ChemCrow}}: {{Augmenting}} Large-Language Models with Chemistry Tools}, + shorttitle = {{{ChemCrow}}}, + author = {Bran, Andres M. and Cox, Sam and Schilter, Oliver and Baldassari, Carlo and White, Andrew D. and Schwaller, Philippe}, + date = {2023-10-02}, + eprint = {2304.05376}, + eprinttype = {arxiv}, + eprintclass = {physics, stat}, + doi = {10.48550/arXiv.2304.05376}, + url = {http://arxiv.org/abs/2304.05376}, + urldate = {2023-10-08}, + abstract = {Over the last decades, excellent computational chemistry tools have been developed. Integrating them into a single platform with enhanced accessibility could help reaching their full potential by overcoming steep learning curves. Recently, large-language models (LLMs) have shown strong performance in tasks across domains, but struggle with chemistry-related problems. Moreover, these models lack access to external knowledge sources, limiting their usefulness in scientific applications. In this study, we introduce ChemCrow, an LLM chemistry agent designed to accomplish tasks across organic synthesis, drug discovery, and materials design. By integrating 18 expert-designed tools, ChemCrow augments the LLM performance in chemistry, and new capabilities emerge. Our agent autonomously planned and executed the syntheses of an insect repellent, three organocatalysts, and guided the discovery of a novel chromophore. Our evaluation, including both LLM and expert assessments, demonstrates ChemCrow's effectiveness in automating a diverse set of chemical tasks. Surprisingly, we find that GPT-4 as an evaluator cannot distinguish between clearly wrong GPT-4 completions and Chemcrow's performance. Our work not only aids expert chemists and lowers barriers for non-experts, but also fosters scientific advancement by bridging the gap between experimental and computational chemistry.}, + pubstate = {preprint}, + keywords = {/unread,AML,autonomous agent,chemistry,experimental,LangChain,library,LLM,ML,self-driving lab,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Bran et al_2023_ChemCrow.pdf;/Users/wasmer/Zotero/storage/FFH8F743/2304.html} +} + @unpublished{brandstetterLiePointSymmetry2022, title = {Lie {{Point Symmetry Data Augmentation}} for {{Neural PDE Solvers}}}, author = {Brandstetter, Johannes and Welling, Max and Worrall, Daniel E.}, @@ -1265,7 +1684,7 @@ url = {http://arxiv.org/abs/2202.07643}, urldate = {2022-06-09}, abstract = {Neural networks are increasingly being used to solve partial differential equations (PDEs), replacing slower numerical solvers. However, a critical issue is that neural PDE solvers require high-quality ground truth data, which usually must come from the very solvers they are designed to replace. Thus, we are presented with a proverbial chicken-and-egg problem. In this paper, we present a method, which can partially alleviate this problem, by improving neural PDE solver sample complexity -- Lie point symmetry data augmentation (LPSDA). In the context of PDEs, it turns out that we are able to quantitatively derive an exhaustive list of data transformations, based on the Lie point symmetry group of the PDEs in question, something not possible in other application areas. We present this framework and demonstrate how it can easily be deployed to improve neural PDE solver sample complexity by an order of magnitude.}, - keywords = {data augmentation,ML,neural PDE solver,PDE,PINN,symmetry,with-code}, + keywords = {data augmentation,general ML,ML,neural PDE solver,PDE,PINN,symmetry,with-code}, file = {/Users/wasmer/Nextcloud/Zotero/Brandstetter et al_2022_Lie Point Symmetry Data Augmentation for Neural PDE Solvers.pdf;/Users/wasmer/Zotero/storage/QUUR7MZV/2202.html} } @@ -1320,10 +1739,31 @@ urldate = {2023-05-26}, abstract = {Within the reduced basis methods approach, an effective low-dimensional subspace of a quantum many-body Hilbert space is constructed in order to investigate, e.g., the ground-state phase diagram. The basis of this subspace is built from solutions of snapshots, i.e., ground states corresponding to particular and well-chosen parameter values. Here, we show how a greedy strategy to assemble the reduced basis and thus to select the parameter points can be implemented based on matrix-product-states (MPS) calculations. Once the reduced basis has been obtained, observables required for the computation of phase diagrams can be computed with a computational complexity independent of the underlying Hilbert space for any parameter value. We illustrate the efficiency and accuracy of this approach for different one-dimensional quantum spin-1 models, including anisotropic as well as biquadratic exchange interactions, leading to rich quantum phase diagrams.}, pubstate = {preprint}, - keywords = {/unread,Condensed Matter - Strongly Correlated Electrons,Quantum Physics}, + keywords = {/unread,todo-tagging}, file = {/Users/wasmer/Nextcloud/Zotero/Brehmer et al_2023_Reduced basis surrogates for quantum spin systems based on tensor networks.pdf;/Users/wasmer/Zotero/storage/KVDI7XMB/2304.html} } +@article{breuckRobustModelBenchmarking2021, + title = {Robust Model Benchmarking and Bias-Imbalance in Data-Driven Materials Science: A Case Study on {{MODNet}}}, + shorttitle = {Robust Model Benchmarking and Bias-Imbalance in Data-Driven Materials Science}, + author = {Breuck, Pierre-Paul De and Evans, Matthew L. and Rignanese, Gian-Marco}, + date = {2021-07}, + journaltitle = {Journal of Physics: Condensed Matter}, + shortjournal = {J. Phys.: Condens. Matter}, + volume = {33}, + number = {40}, + pages = {404002}, + publisher = {{IOP Publishing}}, + issn = {0953-8984}, + doi = {10.1088/1361-648X/ac1280}, + url = {https://dx.doi.org/10.1088/1361-648X/ac1280}, + urldate = {2023-08-29}, + abstract = {As the number of novel data-driven approaches to material science continues to grow, it is crucial to perform consistent quality, reliability and applicability assessments of model performance. In this paper, we benchmark the Materials Optimal Descriptor Network (MODNet) method and architecture against the recently released MatBench v0.1, a curated test suite of materials datasets. MODNet is shown to outperform current leaders on 6 of the 13 tasks, while closely matching the current leaders on a further 2 tasks; MODNet performs particularly well when the number of samples is below 10 000. Attention is paid to two topics of concern when benchmarking models. First, we encourage the reporting of a more diverse set of metrics as it leads to a more comprehensive and holistic comparison of model performance. Second, an equally important task is the uncertainty assessment of a model towards a target domain. Significant variations in validation errors can be observed, depending on the imbalance and bias in the training set (i.e., similarity between training and application space). By using an ensemble MODNet model, confidence intervals can be built and the uncertainty on individual predictions can be quantified. Imbalance and bias issues are often overlooked, and yet are important for successful real-world applications of machine learning in materials science and condensed matter.}, + langid = {english}, + keywords = {AML,benchmarking,compositional descriptors,data imbalance,descriptors,ensemble learning,library,MatBench,ML,model evaluation,model reporting,MODNet,property prediction,small data,todo-tagging,uncertainty quantification}, + file = {/Users/wasmer/Nextcloud/Zotero/Breuck et al_2021_Robust model benchmarking and bias-imbalance in data-driven materials science.pdf} +} + @article{brockherdeBypassingKohnShamEquations2017, title = {Bypassing the {{Kohn-Sham}} Equations with Machine Learning}, author = {Brockherde, Felix and Vogt, Leslie and Li, Li and Tuckerman, Mark E. and Burke, Kieron and Müller, Klaus-Robert}, @@ -1389,6 +1829,27 @@ file = {/Users/wasmer/Nextcloud/Zotero/Bronstein et al_2021_Geometric Deep Learning.pdf;/Users/wasmer/Zotero/storage/6ZLIPHI5/2104.html} } +@article{brumfielTopologicalInsulatorsStar2010, + title = {Topological Insulators: {{Star}} Material}, + shorttitle = {Topological Insulators}, + author = {Brumfiel, Geoff}, + date = {2010-07-01}, + journaltitle = {Nature}, + volume = {466}, + number = {7304}, + pages = {310--311}, + publisher = {{Nature Publishing Group}}, + issn = {1476-4687}, + doi = {10.1038/466310a}, + url = {https://www.nature.com/articles/466310a}, + urldate = {2023-07-12}, + abstract = {A new class of materials is poised to take condensed-matter physics by storm. Geoff Brumfiel looks at what is making topological insulators all the rage.}, + issue = {7304}, + langid = {english}, + keywords = {history of science,op-ed,physics,popular science,topological insulator}, + file = {/Users/wasmer/Nextcloud/Zotero/Brumfiel_2010_Topological insulators.pdf;/Users/wasmer/Zotero/storage/3FD5BH4T/466310a.html} +} + @article{burkeDFTNutshell2013, title = {{{DFT}} in a Nutshell}, author = {Burke, Kieron and Wagner, Lucas O.}, @@ -1444,6 +1905,26 @@ file = {/Users/wasmer/Nextcloud/Zotero/Busk et al_2021_Calibrated uncertainty for molecular property prediction using ensembles of.pdf} } +@article{butlerMachineLearningMolecular2018, + title = {Machine Learning for Molecular and Materials Science}, + author = {Butler, Keith T. and Davies, Daniel W. and Cartwright, Hugh and Isayev, Olexandr and Walsh, Aron}, + date = {2018-07}, + journaltitle = {Nature}, + volume = {559}, + number = {7715}, + pages = {547--555}, + publisher = {{Nature Publishing Group}}, + issn = {1476-4687}, + doi = {10.1038/s41586-018-0337-2}, + url = {https://www.nature.com/articles/s41586-018-0337-2}, + urldate = {2023-08-24}, + abstract = {Here we summarize recent progress in machine learning for the chemical sciences. We outline machine-learning techniques that are suitable for addressing research questions in this domain, as well as future directions for the field. We envisage a future in which the design, synthesis, characterization and application of molecules and materials is accelerated by artificial intelligence.}, + issue = {7715}, + langid = {english}, + keywords = {AML,collection,Database,database collection,educational,introduction,library collection,materials,materials database,ML,molecules,review,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Butler et al_2018_Machine learning for molecular and materials science.pdf} +} + @article{bystromCIDERExpressiveNonlocal2022, title = {{{CIDER}}: {{An Expressive}}, {{Nonlocal Feature Set}} for {{Machine Learning Density Functionals}} with {{Exact Constraints}}}, shorttitle = {{{CIDER}}}, @@ -1521,10 +2002,25 @@ urldate = {2023-05-26}, abstract = {Rapid access to accurate equation-of-state (EOS) data is crucial in the warm-dense matter regime, as it is employed in various applications, such as providing input for hydrodynamics codes to model inertial confinement fusion processes. In this study, we develop neural network models for predicting the EOS based on first-principles data. The first model utilizes basic physical properties, while the second model incorporates more sophisticated physical information, using output from average-atom calculations as features. Average-atom models are often noted for providing a reasonable balance of accuracy and speed; however, our comparison of average-atom models and higher-fidelity calculations shows that more accurate models are required in the warm-dense matter regime. Both the neural network models we propose, particularly the physics-enhanced one, demonstrate significant potential as accurate and efficient methods for computing EOS data in warm-dense matter.}, pubstate = {preprint}, - keywords = {/unread,Physics - Computational Physics,Physics - Plasma Physics}, + keywords = {/unread,EOS,todo-tagging,warm dense matter}, file = {/Users/wasmer/Nextcloud/Zotero/Callow et al_2023_Physics-enhanced neural networks for equation-of-state calculations.pdf;/Users/wasmer/Zotero/storage/W7JDGW6I/2305.html} } +@book{cancesDensityFunctionalTheory2023, + title = {Density {{Functional Theory}}: {{Modeling}}, {{Mathematical Analysis}}, {{Computational Methods}}, and {{Applications}}}, + shorttitle = {Density {{Functional Theory}}}, + editor = {Cancès, Eric and Friesecke, Gero}, + date = {2023}, + publisher = {{Springer International Publishing}}, + location = {{Cham}}, + doi = {10.1007/978-3-031-22340-2}, + url = {https://link.springer.com/10.1007/978-3-031-22340-2}, + urldate = {2023-09-24}, + isbn = {978-3-031-22339-6 978-3-031-22340-2}, + langid = {english}, + keywords = {/unread,APW,DFA,DFT,DFT theory,DFT-FE,educational,error estimate,LAPW,learn DFT,learning material,mathematics,numerical analysis,PAW,physics,pseudopotential,review,review-of-DFT,strongly correlated maeterials,textbook} +} + @article{cancesNumericalStabilityEfficiency2023, title = {Numerical Stability and Efficiency of Response Property Calculations in Density Functional Theory}, author = {Cancès, Eric and Herbst, Michael F. and Kemlin, Gaspard and Levitt, Antoine and Stamm, Benjamin}, @@ -1542,7 +2038,7 @@ url = {http://arxiv.org/abs/2210.04512}, urldate = {2023-05-26}, abstract = {Response calculations in density functional theory aim at computing the change in ground-state density induced by an external perturbation. At finite temperature these are usually performed by computing variations of orbitals, which involve the iterative solution of potentially badly-conditioned linear systems, the Sternheimer equations. Since many sets of variations of orbitals yield the same variation of density matrix this involves a choice of gauge. Taking a numerical analysis point of view we present the various gauge choices proposed in the literature in a common framework and study their stability. Beyond existing methods we propose a new approach, based on a Schur complement using extra orbitals from the self-consistent-field calculations, to improve the stability and efficiency of the iterative solution of Sternheimer equations. We show the success of this strategy on nontrivial examples of practical interest, such as Heusler transition metal alloy compounds, where savings of around 40\% in the number of required cost-determining Hamiltonian applications have been achieved.}, - keywords = {/unread,Condensed Matter - Materials Science,Mathematics - Numerical Analysis}, + keywords = {/unread,todo-tagging}, file = {/Users/wasmer/Nextcloud/Zotero/Cancès et al_2023_Numerical stability and efficiency of response property calculations in density.pdf;/Users/wasmer/Zotero/storage/XSX6PBPG/2210.html} } @@ -1583,6 +2079,27 @@ file = {/Users/wasmer/Nextcloud/Zotero/Cao et al_2020_Artificial intelligence for high-throughput discovery of topological insulators2.pdf} } +@article{caoNeedNormalizeFailure2023, + title = {The Need to Normalize Failure}, + author = {Cao, Xiangkun Elvis and Ngetich, Gladys Chepkirui}, + date = {2023-02}, + journaltitle = {Nature Reviews Chemistry}, + shortjournal = {Nat Rev Chem}, + volume = {7}, + number = {2}, + pages = {69--70}, + publisher = {{Nature Publishing Group}}, + issn = {2397-3358}, + doi = {10.1038/s41570-022-00454-x}, + url = {https://www.nature.com/articles/s41570-022-00454-x}, + urldate = {2023-09-18}, + abstract = {Two Schmidt Science Fellows describe their academic and career failures as they pursued their PhDs. Suggestions are included for how advisors, departments, and institutions can support students to open up about failure, especially those from underrepresented backgrounds.}, + issue = {2}, + langid = {english}, + keywords = {academia,advice,careers,failure,graduate school,work-life balance,working in science}, + file = {/Users/wasmer/Nextcloud/Zotero/Cao_Ngetich_2023_The need to normalize failure.pdf} +} + @article{caoTestsAccuracyScalability2020, title = {Tests on the {{Accuracy}} and {{Scalability}} of the {{Full-Potential DFT Method Based}} on {{Multiple Scattering Theory}}}, author = {Cao, Peiyu and Fang, Jun and Gao, Xingyu and Tian, Fuyang and Song, Haifeng}, @@ -1617,6 +2134,22 @@ file = {/Users/wasmer/Nextcloud/Zotero/Capelle_2006_A bird's-eye view of density-functional theory.pdf;/Users/wasmer/Zotero/storage/8TLEU4M3/0211443.html} } +@online{carboneInvestigationMagneticProperties2023, + title = {Investigation of Magnetic Properties of 4f-Adatoms on Graphene}, + author = {Carbone, Johanna P. and Bouaziz, Juba and Bihlmayer, Gustav and Blügel, Stefan}, + date = {2023-09-27}, + eprint = {2309.15513}, + eprinttype = {arxiv}, + eprintclass = {cond-mat}, + doi = {10.48550/arXiv.2309.15513}, + url = {http://arxiv.org/abs/2309.15513}, + urldate = {2023-10-04}, + abstract = {Rare-earth (RE) atoms on top of 2D materials represent an interesting platform with the prospect of tailoring the magnetic anisotropy for practical applications. Here, we investigate the ground state and magnetic properties of selected \$4f\$-atoms deposited on a graphene substrate in the framework of the DFT+\$U\$ approach. The inherent strong spin-orbit interaction in conjunction with crystal field effects acting on the localized \$4f\$-shells results in a substantial magnetic anisotropy energy (tens of meVs), whose angular dependence is dictated by the \$C\_\{6v\}\$ symmetry of the graphene substrate. We obtain the crystal field parameters and investigate spin-flip events via quantum tunneling of magnetization in the view of achieving a protected quantum-spin behavior. Remarkably, the large spin and orbital moments of the open \$4f\$-shells (Dy, Ho and Tm) generate a strong magneto-elastic coupling which provides more flexibility to control the magnetic state via the application of external strain.}, + pubstate = {preprint}, + keywords = {2D material,adatoms,DFT,DFT+U,FLEUR,FZJ,magnetic anisotropy,magnetism,PGI,PGI-1/IAS-1,physics,quantum materials,rare earths,surface physics}, + file = {/Users/wasmer/Nextcloud/Zotero/Carbone et al_2023_Investigation of magnetic properties of $4f$-adatoms on graphene.pdf;/Users/wasmer/Zotero/storage/RWZ9JMI5/2309.html} +} + @article{carleoMachineLearningPhysical2019, title = {Machine Learning and the Physical Sciences}, author = {Carleo, Giuseppe}, @@ -1674,7 +2207,7 @@ urldate = {2023-06-28}, abstract = {Over the past decade, fundamental changes in artificial intelligence (AI) have delivered dramatic insights across a wide breadth of U.S. Department of Energy (DOE) mission space. AI is helping to augment and improve scientific and engineering workflows in national security, the Office of Science, and DOE’s applied energy programs. The progress and potential for AI in DOE science was captured in the 2020 \hspace{0pt}“AI for Science†report. In the short interim, the scale and scope of AI have accelerated, revealing new, emergent properties that yield insights that go beyond enabling opportunities to being potentially transformative in the way that scientific problems are posed and solved. These AI advances also highlight the crucial importance of responsible development of AI, focusing on challenges relating to AI technology (e.g., explainability, validation, security and privacy), implementation (e.g., transparency, safety engineering, ethics), and application (e.g., AI-Human interactions, education, and employment impacts). Under the guidance of both the Office of Science (SC) and the National Nuclear Security Administration (NNSA), the DOE national laboratories organized a series of workshops in 2022 to gather input on new and rapidly emerging opportunities and challenges of scientific AI. This 2023 report is a synthesis of those workshops. The report shows how unique DOE capabilities can enable the community to drive progress in scientific use of AI, building on DOE strengths and investments in computation, data, and communications infrastructure. This report lays out a vision for DOE to leverage and expand new capabilities in AI to accelerate the progress, and deepen the quality of mission areas spanning science, energy, and security. The vision and blueprint align precisely with the pressing need for scientific grounding in areas such as bias, transparency, explainability, security, validation, and the impact of AI on jobs. While dramatic progress being made in AI by industry and defense in the U.S. and other nations, the associated objectives and incentives only partially align with DOE’s mission. This progress also reflects the migration of AI and computer science talent to industry, creating a workforce disruption that DOE must address with urgency. Nevertheless, DOE’s investments in exascale systems, infrastructure, software, theory, and applications—combined with unique, multidisciplinary co-design approaches scaled to thousands of experts—uniquely position the DOE complex to address the challenges of responsible AI and to extend its global leadership in science, energy, and security.}, langid = {english}, - keywords = {AI,Data management,DOE,emulator,exascale,foundation models,GPT,HPC,inverse design,large models,large science facilities,LLM,ML,MLOps,perspective,report,roadmap,RSE,scientific workflows,surrogate model,workflows,zettascale}, + keywords = {AI,AI4Science,Data management,DOE,emulator,exascale,foundation models,GPT,HPC,inverse design,large models,large science facilities,LLM,ML,MLOps,perspective,report,review,review-of-AI4science,roadmap,RSE,scientific workflows,surrogate model,workflows,zettascale}, file = {/Users/wasmer/Nextcloud/Zotero/AI for Science, Energy, and Security Report Argonne National Laboratory.pdf;/Users/wasmer/Zotero/storage/ZCJ2WQX7/ai-for-science-report.html} } @@ -1696,6 +2229,23 @@ file = {/Users/wasmer/Nextcloud/Zotero/Carvalho et al_2018_Real-space mapping of topological invariants using artificial neural networks.pdf;/Users/wasmer/Zotero/storage/ZNNT2KFN/PhysRevB.97.html} } +@online{casaresGradDFTSoftware2023, + title = {Grad {{DFT}}: A Software Library for Machine Learning Enhanced Density Functional Theory}, + shorttitle = {Grad {{DFT}}}, + author = {Casares, Pablo A. M. and Baker, Jack S. and Medvidovic, Matija and family=Reis, given=Roberto, prefix=dos, useprefix=false and Arrazola, Juan Miguel}, + date = {2023-09-22}, + eprint = {2309.15127}, + eprinttype = {arxiv}, + eprintclass = {cond-mat, physics:physics, physics:quant-ph}, + doi = {10.48550/arXiv.2309.15127}, + url = {http://arxiv.org/abs/2309.15127}, + urldate = {2023-10-05}, + abstract = {Density functional theory (DFT) stands as a cornerstone method in computational quantum chemistry and materials science due to its remarkable versatility and scalability. Yet, it suffers from limitations in accuracy, particularly when dealing with strongly correlated systems. To address these shortcomings, recent work has begun to explore how machine learning can expand the capabilities of DFT; an endeavor with many open questions and technical challenges. In this work, we present Grad DFT: a fully differentiable JAX-based DFT library, enabling quick prototyping and experimentation with machine learning-enhanced exchange-correlation energy functionals. Grad DFT employs a pioneering parametrization of exchange-correlation functionals constructed using a weighted sum of energy densities, where the weights are determined using neural networks. Moreover, Grad DFT encompasses a comprehensive suite of auxiliary functions, notably featuring a just-in-time compilable and fully differentiable self-consistent iterative procedure. To support training and benchmarking efforts, we additionally compile a curated dataset of experimental dissociation energies of dimers, half of which contain transition metal atoms characterized by strong electronic correlations. The software library is tested against experimental results to study the generalization capabilities of a neural functional across potential energy surfaces and atomic species, as well as the effect of training data noise on the resulting model accuracy.}, + pubstate = {preprint}, + keywords = {\_tablet,/unread,AML,autodiff,DM21,JAX,library,ML,ML-DFA,ML-DFT,ML-ESM,prediction of Exc,transition metals,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Casares et al_2023_Grad DFT.pdf;/Users/wasmer/Zotero/storage/EZ4L7B7D/2309.html} +} + @article{cavaIntroductionQuantumMaterials2021, title = {Introduction: {{Quantum Materials}}}, shorttitle = {Introduction}, @@ -1899,6 +2449,27 @@ file = {/Users/wasmer/Nextcloud/Zotero/Chang et al_2022_Towards overcoming data scarcity in materials science.pdf} } +@article{chanussotOpenCatalyst20202021, + title = {The {{Open Catalyst}} 2020 ({{OC20}}) {{Dataset}} and {{Community Challenges}}}, + author = {Chanussot, Lowik and Das, Abhishek and Goyal, Siddharth and Lavril, Thibaut and Shuaibi, Muhammed and Riviere, Morgane and Tran, Kevin and Heras-Domingo, Javier and Ho, Caleb and Hu, Weihua and Palizhati, Aini and Sriram, Anuroop and Wood, Brandon and Yoon, Junwoong and Parikh, Devi and Zitnick, C. Lawrence and Ulissi, Zachary}, + date = {2021-05-21}, + journaltitle = {ACS Catalysis}, + shortjournal = {ACS Catal.}, + volume = {11}, + number = {10}, + eprint = {2010.09990}, + eprinttype = {arxiv}, + eprintclass = {cond-mat}, + pages = {6059--6072}, + issn = {2155-5435, 2155-5435}, + doi = {10.1021/acscatal.0c04525}, + url = {http://arxiv.org/abs/2010.09990}, + urldate = {2023-08-22}, + abstract = {Catalyst discovery and optimization is key to solving many societal and energy challenges including solar fuels synthesis, long-term energy storage, and renewable fertilizer production. Despite considerable effort by the catalysis community to apply machine learning models to the computational catalyst discovery process, it remains an open challenge to build models that can generalize across both elemental compositions of surfaces and adsorbate identity/configurations, perhaps because datasets have been smaller in catalysis than related fields. To address this we developed the OC20 dataset, consisting of 1,281,040 Density Functional Theory (DFT) relaxations (\textasciitilde 264,890,000 single point evaluations) across a wide swath of materials, surfaces, and adsorbates (nitrogen, carbon, and oxygen chemistries). We supplemented this dataset with randomly perturbed structures, short timescale molecular dynamics, and electronic structure analyses. The dataset comprises three central tasks indicative of day-to-day catalyst modeling and comes with pre-defined train/validation/test splits to facilitate direct comparisons with future model development efforts. We applied three state-of-the-art graph neural network models (CGCNN, SchNet, Dimenet++) to each of these tasks as baseline demonstrations for the community to build on. In almost every task, no upper limit on model size was identified, suggesting that even larger models are likely to improve on initial results. The dataset and baseline models are both provided as open resources, as well as a public leader board to encourage community contributions to solve these important tasks.}, + keywords = {/unread,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Chanussot et al_2021_The Open Catalyst 2020 (OC20) Dataset and Community Challenges.pdf;/Users/wasmer/Zotero/storage/9AYRS2RD/2010.html} +} + @unpublished{chardDLHubModelData2018, title = {{{DLHub}}: {{Model}} and {{Data Serving}} for {{Science}}}, shorttitle = {{{DLHub}}}, @@ -1914,6 +2485,25 @@ file = {/Users/wasmer/Nextcloud/Zotero/Chard et al_2018_DLHub.pdf;/Users/wasmer/Zotero/storage/VT5H6PP6/1811.html} } +@article{chengMappingMaterialsMolecules2020, + title = {Mapping {{Materials}} and {{Molecules}}}, + author = {Cheng, Bingqing and Griffiths, Ryan-Rhys and Wengert, Simon and Kunkel, Christian and Stenczel, Tamas and Zhu, Bonan and Deringer, Volker L. and Bernstein, Noam and Margraf, Johannes T. and Reuter, Karsten and Csanyi, Gabor}, + date = {2020-09-15}, + journaltitle = {Accounts of Chemical Research}, + shortjournal = {Acc. Chem. Res.}, + volume = {53}, + number = {9}, + pages = {1981--1991}, + publisher = {{American Chemical Society}}, + issn = {0001-4842}, + doi = {10.1021/acs.accounts.0c00403}, + url = {https://doi.org/10.1021/acs.accounts.0c00403}, + urldate = {2023-07-13}, + abstract = {ConspectusThe visualization of data is indispensable in scientific research, from the early stages when human insight forms to the final step of communicating results. In computational physics, chemistry and materials science, it can be as simple as making a scatter plot or as straightforward as looking through the snapshots of atomic positions manually. However, as a result of the “big data†revolution, these conventional approaches are often inadequate. The widespread adoption of high-throughput computation for materials discovery and the associated community-wide repositories have given rise to data sets that contain an enormous number of compounds and atomic configurations. A typical data set contains thousands to millions of atomic structures, along with a diverse range of properties such as formation energies, band gaps, or bioactivities.It would thus be desirable to have a data-driven and automated framework for visualizing and analyzing such structural data sets. The key idea is to construct a low-dimensional representation of the data, which facilitates navigation, reveals underlying patterns, and helps to identify data points with unusual attributes. Such data-intensive maps, often employing machine learning methods, are appearing more and more frequently in the literature. However, to the wider community, it is not always transparent how these maps are made and how they should be interpreted. Furthermore, while these maps undoubtedly serve a decorative purpose in academic publications, it is not always apparent what extra information can be garnered from reading or making them.This Account attempts to answer such questions. We start with a concise summary of the theory of representing chemical environments, followed by the introduction of a simple yet practical conceptual approach for generating structure maps in a generic and automated manner. Such analysis and mapping is made nearly effortless by employing the newly developed software tool ASAP. To showcase the applicability to a wide variety of systems in chemistry and materials science, we provide several illustrative examples, including crystalline and amorphous materials, interfaces, and organic molecules. In these examples, the maps not only help to sift through large data sets but also reveal hidden patterns that could be easily missed using conventional analyses.The explosion in the amount of computed information in chemistry and materials science has made visualization into a science in itself. Not only have we benefited from exploiting these visualization methods in previous works, we also believe that the automated mapping of data sets will in turn stimulate further creativity and exploration, as well as ultimately feed back into future advances in the respective fields.}, + keywords = {AML,best practices,descriptor hyperparameters,descriptors,descriptors analysis,hyperparameters,hyperparameters optimization,library,ML,MLP,SOAP,unsupervised learning,with-code,with-data}, + file = {/Users/wasmer/Nextcloud/Zotero/Cheng et al_2020_Mapping Materials and Molecules.pdf;/Users/wasmer/Nextcloud/Zotero/Cheng et al_2020_Mapping Materials and Molecules2.pdf;/Users/wasmer/Zotero/storage/J2Q79UBS/acs.accounts.html} +} + @article{chenGraphNetworksUniversal2019, title = {Graph {{Networks}} as a {{Universal Machine Learning Framework}} for {{Molecules}} and {{Crystals}}}, author = {Chen, Chi and Ye, Weike and Zuo, Yunxing and Zheng, Chen and Ong, Shyue Ping}, @@ -1974,7 +2564,7 @@ urldate = {2023-07-01}, abstract = {Machine learning (ML) models for molecules and materials commonly rely on a decomposition of the global target quantity into local, atom-centered contributions. This approach is convenient from a computational perspective, enabling large-scale ML-driven simulations with a linear-scaling cost, and also allow for the identification and post-hoc interpretation of contributions from individual chemical environments and motifs to complicated macroscopic properties. However, even though there exist practical justifications for these decompositions, only the global quantity is rigorously defined, and thus it is unclear to what extent the atomistic terms predicted by the model can be trusted. Here, we introduce a quantitative metric, which we call the local prediction rigidity (LPR), that allows one to assess how robust the locally decomposed predictions of ML models are. We investigate the dependence of LPR on the aspects of model training, particularly the composition of training dataset, for a range of different problems from simple toy models to real chemical systems. We present strategies to systematically enhance the LPR, which can be used to improve the robustness, interpretability, and transferability of atomistic ML models.}, pubstate = {preprint}, - keywords = {/unread,Condensed Matter - Materials Science,Physics - Chemical Physics}, + keywords = {/unread,todo-tagging}, file = {/Users/wasmer/Nextcloud/Zotero/Chong et al_2023_Robustness of Local Predictions in Atomistic Machine Learning Models2.pdf;/Users/wasmer/Zotero/storage/SEXVHR6B/2306.html} } @@ -2008,7 +2598,7 @@ abstract = {Lack of rigorous reproducibility and validation are major hurdles for scientific development across many fields. Materials science in particular encompasses a variety of experimental and theoretical approaches that require careful benchmarking. Leaderboard efforts have been developed previously to mitigate these issues. However, a comprehensive comparison and benchmarking on an integrated platform with multiple data modalities with both perfect and defect materials data is still lacking. This work introduces JARVIS-Leaderboard, an open-source and community-driven platform that facilitates benchmarking and enhances reproducibility. The platform allows users to set up benchmarks with custom tasks and enables contributions in the form of dataset, code, and meta-data submissions. We cover the following materials design categories: Artificial Intelligence (AI), Electronic Structure (ES), Force-fields (FF), Quantum Computation (QC) and Experiments (EXP). For AI, we cover several types of input data, including atomic structures, atomistic images, spectra, and text. For ES, we consider multiple ES approaches, software packages, pseudopotentials, materials, and properties, comparing results to experiment. For FF, we compare multiple approaches for material property predictions. For QC, we benchmark Hamiltonian simulations using various quantum algorithms and circuits. Finally, for experiments, we use the inter-laboratory approach to establish benchmarks. There are 1281 contributions to 274 benchmarks using 152 methods with more than 8 million data-points, and the leaderboard is continuously expanding. The JARVIS-Leaderboard is available at the website: https://pages.nist.gov/jarvis\_leaderboard}, langid = {english}, organization = {{arXiv.org}}, - keywords = {/unread}, + keywords = {todo-tagging}, file = {/Users/wasmer/Nextcloud/Zotero/Choudhary et al_2023_Large Scale Benchmark of Materials Design Methods.pdf} } @@ -2065,6 +2655,40 @@ file = {/Users/wasmer/Nextcloud/Zotero/Christensen et al_2022_2022 roadmap on neuromorphic computing and engineering.pdf} } +@online{cignoniElectronicExcitedStates2023, + title = {Electronic Excited States from Physically-Constrained Machine Learning}, + author = {Cignoni, Edoardo and Suman, Divya and Nigam, Jigyasa and Cupellini, Lorenzo and Mennucci, Benedetta and Ceriotti, Michele}, + date = {2023-11-01}, + eprint = {2311.00844}, + eprinttype = {arxiv}, + eprintclass = {physics}, + doi = {10.48550/arXiv.2311.00844}, + url = {http://arxiv.org/abs/2311.00844}, + urldate = {2023-11-05}, + abstract = {Data-driven techniques are increasingly used to replace electronic-structure calculations of matter. In this context, a relevant question is whether machine learning (ML) should be applied directly to predict the desired properties or be combined explicitly with physically-grounded operations. We present an example of an integrated modeling approach, in which a symmetry-adapted ML model of an effective Hamiltonian is trained to reproduce electronic excitations from a quantum-mechanical calculation. The resulting model can make predictions for molecules that are much larger and more complex than those that it is trained on, and allows for dramatic computational savings by indirectly targeting the outputs of well-converged calculations while using a parameterization corresponding to a minimal atom-centered basis. These results emphasize the merits of intertwining data-driven techniques with physical approximations, improving the transferability and interpretability of ML models without affecting their accuracy and computational efficiency, and providing a blueprint for developing ML-augmented electronic-structure methods.}, + pubstate = {preprint}, + keywords = {AML,B3LYP,basis set,emulator,excited states,hybrid AI/simulation,library,ML,ML-DFT,ML-ESM,ML-WFT,molecular orbitals,molecules,prediction of charge transfer,prediction of Hamiltonian matrix,prediction of orbital energies,STO-3G,transfer learning,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Cignoni et al_2023_Electronic excited states from physically-constrained machine learning.pdf;/Users/wasmer/Zotero/storage/XWXG8UVG/2311.html} +} + +@book{citroSketchesPhysicsCelebration2023, + title = {Sketches of {{Physics}}: {{The Celebration Collection}}}, + shorttitle = {Sketches of {{Physics}}}, + editor = {Citro, Roberta and Lewenstein, Maciej and Rubio, Angel and Schleich, Wolfgang P. and Wells, James D. and Zank, Gary P.}, + date = {2023}, + series = {Lecture {{Notes}} in {{Physics}}}, + volume = {1000}, + publisher = {{Springer International Publishing}}, + location = {{Cham}}, + doi = {10.1007/978-3-031-32469-7}, + url = {https://link.springer.com/10.1007/978-3-031-32469-7}, + urldate = {2023-10-06}, + isbn = {978-3-031-32468-0 978-3-031-32469-7}, + langid = {english}, + keywords = {2D material,educational,for introductions,learning material,physics,popular science,quantum computing,quantum materials,quantum simulator,RWTH,topological,topological insulator,Topological matter,twisted bilayer,twisted bilayer graphene}, + file = {/Users/wasmer/Nextcloud/Zotero/Citro et al_2023_Sketches of Physics.pdf} +} + @article{clementBenchmarkAFLOWData2020, title = {Benchmark {{AFLOW Data Sets}} for {{Machine Learning}}}, author = {Clement, Conrad L. and Kauwe, Steven K. and Sparks, Taylor D.}, @@ -2184,10 +2808,44 @@ url = {https://doi.org/10.1021/acs.jcim.1c00227}, urldate = {2022-07-10}, abstract = {Machine learning milestones in computational chemistry are overshadowed by their unaccountability and the overwhelming zoo of tools for each specific task. A promising path to tackle these problems is using machine learning to reproduce physical magnitudes as a basis to derive many other properties. By using a model of the electron density consisting of an analytical expansion on a linear set of isotropic and anisotropic functions, we implemented in this work a message-passing neural network able to reproduce electron density in molecules with just a 2.5\% absolute error in complex cases. We also adapted our methodology to describe electron density in large biomolecules (proteins) and to obtain atomic charges, interaction energies, and DFT energies. We show that electron density learning is a new promising avenue with a variety of forthcoming applications.}, - keywords = {analytical model,GCN,GNN,ML,molecules,MPNN,prediction of electron density}, + keywords = {AML,analytical model,GCN,GNN,library,ML,ML-DFT,ML-ESM,molecules,MPNN,prediction of electron density,with-code}, file = {/Users/wasmer/Nextcloud/Zotero/Cuevas-ZuvirÃa_Pacios_2021_Machine Learning of Analytical Electron Density in Large Molecules Through.pdf} } +@article{cuiAtomicPositionalEmbeddingBased2023, + title = {Atomic {{Positional Embedding-Based Transformer Model}} for {{Predicting}} the {{Density}} of {{States}} of {{Crystalline Materials}}}, + author = {Cui, Yaning and Chen, Kang and Zhang, Lingyao and Wang, Haotian and Bai, Lei and Elliston, David and Ren, Wei}, + date = {2023-09-07}, + journaltitle = {The Journal of Physical Chemistry Letters}, + shortjournal = {J. Phys. Chem. Lett.}, + volume = {14}, + number = {35}, + pages = {7924--7930}, + publisher = {{American Chemical Society}}, + doi = {10.1021/acs.jpclett.3c02036}, + url = {https://doi.org/10.1021/acs.jpclett.3c02036}, + urldate = {2023-09-23}, + abstract = {The rapid advancement of machine learning has revolutionized quite a few science fields, leading to a surge in the development of highly efficient and accurate materials discovery methods. Recently, predictions of multiple related properties have received attention, with a particular emphasis on spectral properties, where the electronic density of states (DOS) stands out as the fundamental data with enormous potential to advance our understanding of crystalline materials. Leveraging the power of the Transformer framework, we introduce an Atomic Positional Embedding-Based Transformer (APET), which surpasses existing state-of-the-art models for predicting ab initio DOS. APET utilizes atomic periodical positions as its positional embedding, which incorporates all of the structural information in a crystal, providing a more complete and accurate representation. Furthermore, the interpretability of APET enables us to discover the underlying physical properties of materials with greater precision and accuracy.}, + keywords = {/unread,AML,APET,attention,library,Mat2Spec,materials,ML,ML-DFT,ML-ESM,prediction of DOS,transformer,with-code}, + file = {/Users/wasmer/Zotero/storage/8XQZZLRL/acs.jpclett.html} +} + +@online{curcicParallelFortranFramework2019, + title = {A Parallel {{Fortran}} Framework for Neural Networks and Deep Learning}, + author = {Curcic, Milan}, + date = {2019-03-25}, + eprint = {1902.06714}, + eprinttype = {arxiv}, + eprintclass = {cs, stat}, + doi = {10.48550/arXiv.1902.06714}, + url = {http://arxiv.org/abs/1902.06714}, + urldate = {2023-09-30}, + abstract = {This paper describes neural-fortran, a parallel Fortran framework for neural networks and deep learning. It features a simple interface to construct feed-forward neural networks of arbitrary structure and size, several activation functions, and stochastic gradient descent as the default optimization algorithm. Neural-fortran also leverages the Fortran 2018 standard collective subroutines to achieve data-based parallelism on shared- or distributed-memory machines. First, I describe the implementation of neural networks with Fortran derived types, whole-array arithmetic, and collective sum and broadcast operations to achieve parallelism. Second, I demonstrate the use of neural-fortran in an example of recognizing hand-written digits from images. Finally, I evaluate the computational performance in both serial and parallel modes. Ease of use and computational performance are similar to an existing popular machine learning framework, making neural-fortran a viable candidate for further development and use in production.}, + pubstate = {preprint}, + keywords = {/unread,alternative approaches,Deep learning,Fortran,General ML,HPC,library,ML,NN,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Curcic_2019_A parallel Fortran framework for neural networks and deep learning.pdf;/Users/wasmer/Zotero/storage/PINUPP44/1902.html} +} + @article{curtaroloAFLOWAutomaticFramework2012, title = {{{AFLOW}}: {{An}} Automatic Framework for High-Throughput Materials Discovery}, shorttitle = {{{AFLOW}}}, @@ -2278,7 +2936,7 @@ urldate = {2023-03-19}, abstract = {High-throughput data generation methods and machine learning (ML) algorithms have given rise to a new era of computational materials science by learning relationships among composition, structure, and properties and by exploiting such relations for design. However, to build these connections, materials data must be translated into a numerical form, called a representation, that can be processed by a machine learning model. Datasets in materials science vary in format (ranging from images to spectra), size, and fidelity. Predictive models vary in scope and property of interests. Here, we review context-dependent strategies for constructing representations that enable the use of materials as inputs or outputs of machine learning models. Furthermore, we discuss how modern ML techniques can learn representations from data and transfer chemical and physical information between tasks. Finally, we outline high-impact questions that have not been fully resolved and thus, require further investigation.}, pubstate = {preprint}, - keywords = {\_tablet,/unread,AML,defects,descriptors,disordered,materials,ML,review,review-of-descriptors,TODO}, + keywords = {\_tablet,AML,defects,descriptors,disordered,materials,ML,review,review-of-descriptors,TODO}, file = {/Users/wasmer/Nextcloud/Zotero/Damewood et al_2023_Representations of Materials for Machine Learning.pdf;/Users/wasmer/Zotero/storage/7JZ95596/2301.html} } @@ -2330,7 +2988,7 @@ abstract = {Many atomic descriptors are currently limited by their unfavourable scaling with the number of chemical elements S e.g. the length of body-ordered descriptors, such as the SOAP power spectrum (3-body) and the (ACE) (multiple body-orders), scales as (NS)ν where ν\,+\,1 is the body-order and N is the number of radial basis functions used in the density expansion. We introduce two distinct approaches which can be used to overcome this scaling for the SOAP power spectrum. Firstly, we show that the power spectrum is amenable to lossless compression with respect to both S and N, so that the descriptor length can be reduced from \$\$\{\{\{\textbackslash mathcal\{O\}\}\}\}(\{N\}\^\{2\}\{S\}\^\{2\})\$\$to \$\$\{\{\{\textbackslash mathcal\{O\}\}\}\}\textbackslash left(NS\textbackslash right)\$\$. Secondly, we introduce a generalised SOAP kernel, where compression is achieved through the use of the total, element agnostic density, in combination with radial projection. The ideas used in the generalised kernel are equally applicably to any other body-ordered descriptors and we demonstrate this for the (ACSF).}, issue = {1}, langid = {english}, - keywords = {\_tablet,ACE,ACSF,chemical species scaling problem,descriptor dimred,descriptors,descriptors analysis,dimensionality reduction,library,ML,SOAP,with-code}, + keywords = {\_tablet,ACE,ACSF,chemical species scaling problem,descriptor dimred,descriptors,descriptors analysis,dimensionality reduction,HEA,library,ML,SOAP,with-code}, file = {/Users/wasmer/Nextcloud/Zotero/Darby et al_2022_Compressing local atomic neighbourhood descriptors.pdf;/Users/wasmer/Zotero/storage/WR6IJ7MC/s41524-022-00847-y.html} } @@ -2350,6 +3008,24 @@ file = {/Users/wasmer/Nextcloud/Zotero/Darby et al_2022_Tensor-reduced atomic density representations.pdf;/Users/wasmer/Zotero/storage/6XMXCLL4/2210.html} } +@article{darbyTensorReducedAtomicDensity2023, + title = {Tensor-{{Reduced Atomic Density Representations}}}, + author = {Darby, James P. and Kovács, Dávid P. and Batatia, Ilyes and Caro, Miguel A. and Hart, Gus L. W. and Ortner, Christoph and Csányi, Gábor}, + date = {2023-07-13}, + journaltitle = {Physical Review Letters}, + shortjournal = {Phys. Rev. Lett.}, + volume = {131}, + number = {2}, + pages = {028001}, + publisher = {{American Physical Society}}, + doi = {10.1103/PhysRevLett.131.028001}, + url = {https://link.aps.org/doi/10.1103/PhysRevLett.131.028001}, + urldate = {2023-09-22}, + abstract = {Density-based representations of atomic environments that are invariant under Euclidean symmetries have become a widely used tool in the machine learning of interatomic potentials, broader data-driven atomistic modeling, and the visualization and analysis of material datasets. The standard mechanism used to incorporate chemical element information is to create separate densities for each element and form tensor products between them. This leads to a steep scaling in the size of the representation as the number of elements increases. Graph neural networks, which do not explicitly use density representations, escape this scaling by mapping the chemical element information into a fixed dimensional space in a learnable way. By exploiting symmetry, we recast this approach as tensor factorization of the standard neighbour-density-based descriptors and, using a new notation, identify connections to existing compression algorithms. In doing so, we form compact tensor-reduced representation of the local atomic environment whose size does not depend on the number of chemical elements, is systematically convergable, and therefore remains applicable to a wide range of data analysis and regression tasks.}, + keywords = {ACE,AML,chemical species scaling problem,descriptor comparison,descriptors,descriptors analysis,dimensionality reduction,HEA,invariance,ML,MPNN,organic chemistry,SOAP,todo-tagging}, + file = {/Users/wasmer/Zotero/storage/BW53SBPW/Darby et al. - 2023 - Tensor-Reduced Atomic Density Representations.pdf;/Users/wasmer/Zotero/storage/PF3QMDXQ/PhysRevLett.131.html} +} + @article{dasCrysXPPExplainableProperty2022, title = {{{CrysXPP}}: {{An}} Explainable Property Predictor for Crystalline Materials}, shorttitle = {{{CrysXPP}}}, @@ -2372,6 +3048,18 @@ file = {/Users/wasmer/Nextcloud/Zotero/Das et al_2022_CrysXPP.pdf;/Users/wasmer/Zotero/storage/SY9ITHM4/s41524-022-00716-8.html} } +@unpublished{dasPromisesPitfallsFoundation2023, + title = {Promises (and {{Pitfalls}}) of {{Foundation Models}} for {{Science}}}, + author = {Das, Payel}, + date = {2023-10-05}, + url = {https://www.youtube.com/watch?v=61nHgqQw_5E}, + urldate = {2023-10-08}, + abstract = {Payel Das is a AI Science Department, IBM The Applied Machine Learning Days channel features talks and performances from the Applied Machine Learning Days held at the EPFL. AMLD is one of the largest machine learning \& AI events in Europe, focused specifically on the applications of machine learning and AI, making it particularly interesting to industry and academia. Follow AMLD: on Twitter: https://www.twitter.com/appliedmldays on LinkedIn: https://www.linkedin.com/company/appl... on Mastodon : https://mastodon.social/@amld AMLD Website: https://www.appliedmldays.org}, + eventtitle = {Applied {{Machine Learning Days}}}, + venue = {{EPFL, Lausanne, Switzerland}}, + keywords = {/unread,AML,chemistry,IBM,large dataset,LLM,ML,MoLFormer,multi-target prediction,pretrained models,transformer} +} + @inproceedings{davidsonProvenanceScientificWorkflows2008, title = {Provenance and Scientific Workflows: Challenges and Opportunities}, shorttitle = {Provenance and Scientific Workflows}, @@ -2391,6 +3079,51 @@ file = {/Users/wasmer/Nextcloud/Zotero/Davidson_Freire_2008_Provenance and scientific workflows.pdf} } +@unpublished{dawidModernApplicationsMachine2022, + title = {Modern Applications of Machine Learning in Quantum Sciences}, + author = {Dawid, Anna and Arnold, Julian and Requena, Borja and Gresch, Alexander and PÅ‚odzieÅ„, Marcin and Donatella, Kaelan and Nicoli, Kim A. and Stornati, Paolo and Koch, Rouven and Büttner, Miriam and OkuÅ‚a, Robert and Muñoz-Gil, Gorka and Vargas-Hernández, Rodrigo A. and Cervera-Lierta, Alba and Carrasquilla, Juan and Dunjko, Vedran and Gabrié, Marylou and Huembeli, Patrick and family=Nieuwenburg, given=Evert, prefix=van, useprefix=true and Vicentini, Filippo and Wang, Lei and Wetzel, Sebastian J. and Carleo, Giuseppe and Greplová, EliÅ¡ka and Krems, Roman and Marquardt, Florian and Tomza, MichaÅ‚ and Lewenstein, Maciej and Dauphin, Alexandre}, + date = {2022-04-08}, + url = {https://arxiv.org/abs/2204.04198v2}, + urldate = {2023-08-25}, + abstract = {In these Lecture Notes, we provide a comprehensive introduction to the most recent advances in the application of machine learning methods in quantum sciences. We cover the use of deep learning and kernel methods in supervised, unsupervised, and reinforcement learning algorithms for phase classification, representation of many-body quantum states, quantum feedback control, and quantum circuits optimization. Moreover, we introduce and discuss more specialized topics such as differentiable programming, generative models, statistical approach to machine learning, and quantum machine learning.}, + langid = {english}, + keywords = {condensed matter,educational,equivariant,general ML,Ising,learning material,lecture notes,magnetic structure,ML,ML-WFT,NQS,quantum science,spin,spin symmetry,spin texture,symmetrization,symmetry,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Dawid et al_2022_Modern applications of machine learning in quantum sciences3.pdf} +} + +@software{dawidTutorialsPreparedSummer2022, + title = {Tutorials Prepared for the "{{Summer School}}: {{Machine Learning}} in {{Quantum Physics}} and {{Chemistry}}"}, + shorttitle = {Tutorials Prepared for the "{{Summer School}}}, + author = {Dawid, Anna}, + date = {2022-06-21}, + origdate = {2021-08-19T16:11:26Z}, + url = {https://github.com/Shmoo137/SummerSchool2021_MLinQuantum}, + urldate = {2023-08-25}, + abstract = {The repository contains Jupyter notebooks for hands-on tutorials organized within the Summer School: Machine Learning for Quantum Physics and Chemistry (24th August - 3rd September 2021, Warsaw).}, + keywords = {/unread} +} + +@article{debreuckMaterialsPropertyPrediction2021, + title = {Materials Property Prediction for Limited Datasets Enabled by Feature Selection and Joint Learning with {{MODNet}}}, + author = {De Breuck, Pierre-Paul and Hautier, Geoffroy and Rignanese, Gian-Marco}, + date = {2021-06-03}, + journaltitle = {npj Computational Materials}, + shortjournal = {npj Comput Mater}, + volume = {7}, + number = {1}, + pages = {1--8}, + publisher = {{Nature Publishing Group}}, + issn = {2057-3960}, + doi = {10.1038/s41524-021-00552-2}, + url = {https://www.nature.com/articles/s41524-021-00552-2}, + urldate = {2023-08-29}, + abstract = {In order to make accurate predictions of material properties, current machine-learning approaches generally require large amounts of data, which are often not available in practice. In this work, MODNet, an all-round framework, is presented which relies on a feedforward neural network, the selection of physically meaningful features, and when applicable, joint-learning. Next to being faster in terms of training time, this approach is shown to outperform current graph-network models on small datasets. In particular, the vibrational entropy at 305\,K of crystals is predicted with a mean absolute test error of 0.009\,meV/K/atom (four times lower than previous studies). Furthermore, joint learning reduces the test error compared to single-target learning and enables the prediction of multiple properties at once, such as temperature functions. Finally, the selection algorithm highlights the most important features and thus helps to understand the underlying physics.}, + issue = {1}, + langid = {english}, + keywords = {AML,compositional descriptors,descriptors,feature importance,library,MEGNet,ML,MODNet,NN,original publication,property prediction,small data,todo-tagging,transfer learning}, + file = {/Users/wasmer/Nextcloud/Zotero/De Breuck et al_2021_Materials property prediction for limited datasets enabled by feature selection.pdf} +} + @article{deComparingMoleculesSolids2016, title = {Comparing Molecules and Solids across Structural and Alchemical Space}, author = {De, Sandip and Bartók, Albert P. and Csányi, Gábor and Ceriotti, Michele}, @@ -2431,6 +3164,27 @@ file = {/Users/wasmer/Nextcloud/Zotero/Degrave et al_2022_Magnetic control of tokamak plasmas through deep reinforcement learning.pdf;/Users/wasmer/Zotero/storage/U6PRS6KM/s41586-021-04301-9.html} } +@article{delrioDeepLearningFramework2023, + title = {A Deep Learning Framework to Emulate Density Functional Theory}, + author = {family=Rio, given=Beatriz G., prefix=del, useprefix=true and Phan, Brandon and Ramprasad, Rampi}, + date = {2023-08-29}, + journaltitle = {npj Computational Materials}, + shortjournal = {npj Comput Mater}, + volume = {9}, + number = {1}, + pages = {1--9}, + publisher = {{Nature Publishing Group}}, + issn = {2057-3960}, + doi = {10.1038/s41524-023-01115-3}, + url = {https://www.nature.com/articles/s41524-023-01115-3}, + urldate = {2023-09-22}, + abstract = {Density functional theory (DFT) has been a critical component of computational materials research and discovery for decades. However, the computational cost of solving the central Kohn–Sham equation remains a major obstacle for dynamical studies of complex phenomena at-scale. Here, we propose an end-to-end machine learning (ML) model that emulates the essence of DFT by mapping the atomic structure of the system to its electronic charge density, followed by the prediction of other properties such as density of states, potential energy, atomic forces, and stress tensor, by using the atomic structure and charge density as input. Our deep learning model successfully bypasses the explicit solution of the Kohn-Sham equation with orders of magnitude speedup (linear scaling with system size with a small prefactor), while maintaining chemical accuracy. We demonstrate the capability of this ML-DFT concept for an extensive database of organic molecules, polymer chains, and polymer crystals.}, + issue = {1}, + langid = {english}, + keywords = {\_tablet,ACDC,AGNI desriptor,AML,chemistry,descriptors,emulator,grid-based descriptors,invariance,library,linear scaling,materials,ML,ML-DFT,ML-ESM,molecules,multi-step model,NN,organic chemistry,PBE,prediction of DOS,prediction of electron density,TensorFlow,tensorial target,VASP,with-code,with-data}, + file = {/Users/wasmer/Zotero/storage/EHN4XYXG/del Rio et al_2023_A deep learning framework to emulate density functional theory.pdf} +} + @article{delrioEfficientDeepLearning2020, title = {An {{Efficient Deep Learning Scheme To Predict}} the {{Electronic Structure}} of {{Materials}} and {{Molecules}}: {{The Example}} of {{Graphene-Derived Allotropes}}}, shorttitle = {An {{Efficient Deep Learning Scheme To Predict}} the {{Electronic Structure}} of {{Materials}} and {{Molecules}}}, @@ -2451,6 +3205,44 @@ file = {/Users/wasmer/Nextcloud/Zotero/del Rio et al_2020_An Efficient Deep Learning Scheme To Predict the Electronic Structure of.pdf;/Users/wasmer/Zotero/storage/46EGTQLS/acs.jpca.html} } +@online{dengCHGNetPretrainedUniversal2023, + title = {{{CHGNet}}: {{Pretrained}} Universal Neural Network Potential for Charge-Informed Atomistic Modeling}, + shorttitle = {{{CHGNet}}}, + author = {Deng, Bowen and Zhong, Peichen and Jun, KyuJung and Riebesell, Janosh and Han, Kevin and Bartel, Christopher J. and Ceder, Gerbrand}, + date = {2023-06-20}, + eprint = {2302.14231}, + eprinttype = {arxiv}, + eprintclass = {cond-mat}, + doi = {10.48550/arXiv.2302.14231}, + url = {http://arxiv.org/abs/2302.14231}, + urldate = {2023-08-23}, + abstract = {The simulation of large-scale systems with complex electron interactions remains one of the greatest challenges for the atomistic modeling of materials. Although classical force fields often fail to describe the coupling between electronic states and ionic rearrangements, the more accurate \textbackslash textit\{ab-initio\} molecular dynamics suffers from computational complexity that prevents long-time and large-scale simulations, which are essential to study many technologically relevant phenomena, such as reactions, ion migrations, phase transformations, and degradation. In this work, we present the Crystal Hamiltonian Graph neural Network (CHGNet) as a novel machine-learning interatomic potential (MLIP), using a graph-neural-network-based force field to model a universal potential energy surface. CHGNet is pretrained on the energies, forces, stresses, and magnetic moments from the Materials Project Trajectory Dataset, which consists of over 10 years of density functional theory static and relaxation trajectories of \$\textbackslash sim 1.5\$ million inorganic structures. The explicit inclusion of magnetic moments enables CHGNet to learn and accurately represent the orbital occupancy of electrons, enhancing its capability to describe both atomic and electronic degrees of freedom. We demonstrate several applications of CHGNet in solid-state materials, including charge-informed molecular dynamics in Li\$\_x\$MnO\$\_2\$, the finite temperature phase diagram for Li\$\_x\$FePO\$\_4\$ and Li diffusion in garnet conductors. We critically analyze the significance of including charge information for capturing appropriate chemistry, and we provide new insights into ionic systems with additional electronic degrees of freedom that can not be observed by previous MLIPs.}, + pubstate = {preprint}, + keywords = {AML,ase,charge transfer,Database,DFT,electrochemistry,electrostatic interaction,GGA,GGA+U,ionic systems,LAMMPS,library,magnetic moment,magnetism,materials,materials project,ML,MLP,MPNN,periodic table,prediction of energy,prediction of forces,prediction of magnetic moment,prediction of stress,structure relaxation,transition metals,universal potential,VASP,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Deng et al_2023_CHGNet.pdf;/Users/wasmer/Zotero/storage/IWKNTQHT/2302.html} +} + +@article{dengCHGNetPretrainedUniversal2023a, + title = {{{CHGNet}} as a Pretrained Universal Neural Network Potential for Charge-Informed Atomistic Modelling}, + author = {Deng, Bowen and Zhong, Peichen and Jun, KyuJung and Riebesell, Janosh and Han, Kevin and Bartel, Christopher J. and Ceder, Gerbrand}, + date = {2023-09}, + journaltitle = {Nature Machine Intelligence}, + shortjournal = {Nat Mach Intell}, + volume = {5}, + number = {9}, + pages = {1031--1041}, + publisher = {{Nature Publishing Group}}, + issn = {2522-5839}, + doi = {10.1038/s42256-023-00716-3}, + url = {https://www.nature.com/articles/s42256-023-00716-3}, + urldate = {2023-11-11}, + abstract = {Large-scale simulations with complex electron interactions remain one of the greatest challenges for atomistic modelling. Although classical force fields often fail to describe the coupling between electronic states and ionic rearrangements, the more accurate ab initio molecular dynamics suffers from computational complexity that prevents long-time and large-scale simulations, which are essential to study technologically relevant phenomena. Here we present the Crystal Hamiltonian Graph Neural Network (CHGNet), a graph neural network-based machine-learning interatomic potential (MLIP) that models the universal potential energy surface. CHGNet is pretrained on the energies, forces, stresses and magnetic moments from the Materials Project Trajectory Dataset, which consists of over 10\,years of density functional theory calculations of more than 1.5\,million inorganic structures. The explicit inclusion of magnetic moments enables CHGNet to learn and accurately represent the orbital occupancy of electrons, enhancing its capability to describe both atomic and electronic degrees of freedom. We demonstrate several applications of CHGNet in solid-state materials, including charge-informed molecular dynamics in LixMnO2, the finite temperature phase diagram for LixFePO4 and Li diffusion in garnet conductors. We highlight the significance of charge information for capturing appropriate chemistry and provide insights into ionic systems with additional electronic degrees of freedom that cannot be observed by previous MLIPs.}, + issue = {9}, + langid = {english}, + keywords = {/unread,AML,ase,charge transfer,Database,DFT,electrochemistry,electrostatic interaction,GGA,GGA+U,ionic systems,LAMMPS,library,magnetic moment,magnetism,materials,materials project,ML,MLP,MPNN,periodic table,prediction of energy,prediction of forces,prediction of magnetic moment,prediction of stress,structure relaxation,transition metals,universal potential,VASP,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Deng et al_2023_CHGNet as a pretrained universal neural network potential for charge-informed.pdf} +} + @article{dengImageNetLargescaleHierarchical2009, title = {{{ImageNet}}: {{A}} Large-Scale Hierarchical Image Database}, shorttitle = {{{ImageNet}}}, @@ -2740,7 +3532,6 @@ urldate = {2023-02-15}, abstract = {The electronic structure in matter under extreme conditions is a challenging complex system prevalent in astrophysical objects and highly relevant for technological applications. We show how machine-learning surrogates in terms of neural networks have a profound impact on the efficient modeling of matter under extreme conditions. We demonstrate the utility of a surrogate model that is trained on \textbackslash emph\{ab initio\} quantum Monte Carlo data for various applications in the emerging field of warm dense matter research.}, pubstate = {preprint}, - keywords = {/unread}, file = {/Users/wasmer/Nextcloud/Zotero/Dornheim et al_2021_A Machine-Learning Surrogate Model for ab initio Electronic Correlations at.pdf;/Users/wasmer/Zotero/storage/F4428MJB/2104.html} } @@ -2763,19 +3554,6 @@ file = {/Users/wasmer/Nextcloud/Zotero/Dragoni et al_2018_Achieving DFT accuracy with a machine-learning interatomic potential.pdf;/Users/wasmer/Zotero/storage/H8ISZZP6/PhysRevMaterials.2.html} } -@online{dralBookQuantumChemistry2022, - title = {Book “{{Quantum Chemistry}} in the {{Age}} of {{Machine Learning}}â€}, - author = {Dral, Pavlo}, - date = {2022-09-20T09:27:08+00:00}, - url = {http://dr-dral.com/book-quantum-chemistry-in-the-age-of-machine-learning/}, - urldate = {2023-03-02}, - abstract = {The book “Quantum Chemistry in the Age of Machine Learning†guides aspiring beginners and specialists in this exciting field by covering topics ranging from basic concepts to comprehens…}, - langid = {american}, - organization = {{Dral's Group}}, - keywords = {/unread,AML,book,ML,ML-ESM,prediction of electron density,tutorial,with-code}, - file = {/Users/wasmer/Zotero/storage/ZPMKDHTP/book-quantum-chemistry-in-the-age-of-machine-learning.html} -} - @article{dralMLatomIntegrativePlatform2021, title = {{{MLatom}} 2: {{An Integrative Platform}} for {{Atomistic Machine Learning}}}, shorttitle = {{{MLatom}} 2}, @@ -2833,6 +3611,23 @@ file = {/Users/wasmer/Nextcloud/Zotero/Dral_2020_Quantum Chemistry in the Age of Machine Learning.pdf;/Users/wasmer/Zotero/storage/4NY56BJV/acs.jpclett.html} } +@book{dralQuantumChemistryAge2022, + title = {Quantum {{Chemistry}} in the {{Age}} of {{Machine Learning}}}, + editor = {Dral, Pavlo O.}, + date = {2022-09-15}, + edition = {1}, + publisher = {{Elsevier}}, + doi = {10.1016/B978-0-323-90049-2.09989-3}, + url = {https://www.sciencedirect.com/science/article/pii/B9780323900492099893}, + urldate = {2023-09-30}, + abstract = {Quantum chemistry is simulating atomistic systems according to the laws of quantum mechanics, and such simulations are essential for our understanding of the world and for technological progress. Machine learning revolutionizes quantum chemistry by increasing simulation speed and accuracy and obtaining new insights. However, for nonspecialists, learning about this vast field is a formidable challenge. Quantum Chemistry in the Age of Machine Learning covers this exciting field in detail, ranging from basic concepts to comprehensive methodological details to providing detailed codes and hands-on tutorials. Such an approach helps readers get a quick overview of existing techniques and provides an opportunity to learn the intricacies and inner workings of state-of-the-art methods. The book describes the underlying concepts of machine learning and quantum chemistry, machine learning potentials and learning of other quantum chemical properties, machine learning-improved quantum chemical methods, analysis of Big Data from simulations, and materials design with machine learning. Drawing on the expertise of a team of specialist contributors, this book serves as a valuable guide for both aspiring beginners and specialists in this exciting field.}, + isbn = {978-0-323-90049-2}, + langid = {english}, + pagetotal = {698}, + keywords = {active learning,AML,educational,Gaussian process,introduction,kernel methods,learning material,ML,ML-DFA,ML-DFT,ML-ESM,ML-WFT,MLP,NN,prediction of electron density,prediction of energy,prediction of Exc,prediction of ground-state properties,prediction of polarizability,prediction of wavefunction,review,review-of-AML,textbook,tutorial}, + file = {/Users/wasmer/Nextcloud/Zotero/Dral_2022_Quantum Chemistry in the Age of Machine Learning.pdf;/Users/wasmer/Zotero/storage/E24LPQ5A/B9780323900492099893.html} +} + @article{drautzAtomicClusterExpansion2019, title = {Atomic Cluster Expansion for Accurate and Transferable Interatomic Potentials}, author = {Drautz, Ralf}, @@ -2933,6 +3728,20 @@ file = {/Users/wasmer/Nextcloud/Zotero/Draxl_Scheffler_2019_The NOMAD laboratory.pdf} } +@book{dreizlerDensityFunctionalTheory1990, + title = {Density {{Functional Theory}}}, + author = {Dreizler, Reiner M. and Gross, Eberhard K. U.}, + date = {1990}, + publisher = {{Springer}}, + location = {{Berlin, Heidelberg}}, + doi = {10.1007/978-3-642-86105-5}, + url = {http://link.springer.com/10.1007/978-3-642-86105-5}, + urldate = {2023-09-21}, + isbn = {978-3-642-86107-9 978-3-642-86105-5}, + langid = {english}, + file = {/Users/wasmer/Zotero/storage/8AP7L4DE/Dreizler and Gross - 1990 - Density Functional Theory.pdf} +} + @book{dresselhausGroupTheory2007, title = {Group {{Theory}}}, author = {Dresselhaus, Mildred S. and Dresselhaus, Gene and Jorio, Ado}, @@ -2968,6 +3777,27 @@ file = {/Users/wasmer/Nextcloud/Zotero/Drozdov et al_2015_Conventional superconductivity at 203 kelvin at high pressures in the sulfur.pdf;/Users/wasmer/Zotero/storage/CJIZLLVA/nature14964.html} } +@article{dumazTopicModelingDensity2023, + title = {Topic Modeling in Density Functional Theory on Citations of Condensed Matter Electronic Structure Packages}, + author = {Dumaz, Marie and Romero-Bohórquez, Camila and Adjeroh, Donald and Romero, Aldo H.}, + date = {2023-07-23}, + journaltitle = {Scientific Reports}, + shortjournal = {Sci Rep}, + volume = {13}, + number = {1}, + pages = {11881}, + publisher = {{Nature Publishing Group}}, + issn = {2045-2322}, + doi = {10.1038/s41598-023-38551-6}, + url = {https://www.nature.com/articles/s41598-023-38551-6}, + urldate = {2023-07-24}, + abstract = {With an increasing number of new scientific papers being released, it becomes harder for researchers to be aware of recent articles in their field of study. Accurately classifying papers is a first step in the direction of personalized catering and easy access to research of interest. The field of Density Functional Theory (DFT) in particular is a good example of a methodology used in very different studies, and interconnected disciplines, which has a very strong community publishing many research articles. We devise a new unsupervised method for classifying publications, based on topic modeling, and use a DFT-related selection of documents as a use case. We first create topics from word analysis and clustering of the abstracts from the publications, then attribute each publication/paper to a topic based on word similarity. We then make interesting observations by analyzing connections between the topics and publishers, journals, country or year of publication. The proposed approach is general, and can be applied to analyze publication and citation trends in other areas of study, beyond the field of Density Function Theory.}, + issue = {1}, + langid = {english}, + keywords = {bibliometrics,DFT,ESM,literature analysis,physics,report,review,topic modeling,Web of Science,with-data}, + file = {/Users/wasmer/Nextcloud/Zotero/Dumaz et al_2023_Topic modeling in density functional theory on citations of condensed matter.pdf} +} + @article{dunnBenchmarkingMaterialsProperty2020, title = {Benchmarking Materials Property Prediction Methods: The {{Matbench}} Test Set and {{Automatminer}} Reference Algorithm}, shorttitle = {Benchmarking Materials Property Prediction Methods}, @@ -3004,6 +3834,24 @@ file = {/Users/wasmer/Nextcloud/Zotero/Dusson et al_2021_Atomic Cluster Expansion.pdf;/Users/wasmer/Zotero/storage/7WDUQE6K/1911.html} } +@article{duTuningFermiLevels2021, + title = {Tuning {{Fermi Levels}} in {{Intrinsic Antiferromagnetic Topological Insulators MnBi2Te4}} and {{MnBi4Te7}} by {{Defect Engineering}} and {{Chemical Doping}}}, + author = {Du, Mao-Hua and Yan, Jiaqiang and Cooper, Valentino R. and Eisenbach, Markus}, + date = {2021}, + journaltitle = {Advanced Functional Materials}, + volume = {31}, + number = {3}, + pages = {2006516}, + issn = {1616-3028}, + doi = {10.1002/adfm.202006516}, + url = {https://onlinelibrary.wiley.com/doi/abs/10.1002/adfm.202006516}, + urldate = {2023-09-19}, + abstract = {MnBi2Te4 and MnBi4Te7 are intrinsic antiferromagnetic topological insulators, offering a promising materials platform for realizing exotic topological quantum states. However, high densities of intrinsic defects in these materials not only cause bulk metallic conductivity, preventing the measurement of quantum transport in surface states, but may also affect magnetism and topological properties. In this paper, systematic density functional theory calculations reveal specific material chemistry and growth conditions that determine the defect formation and dopant incorporation in MnBi2Te4 and MnBi4Te7. The large strain induced by the internal heterostructure promotes the formation of large-size-mismatched antisite defects and substitutional dopants. The results here show that the abundance of antisite defects is responsible for the observed n-type metallic conductivity. A Te-rich growth condition is predicted to reduce the bulk free electron density, which is confirmed by experimental synthesis and transport measurements in MnBi2Te4. Furthermore, Na doping is proposed to be an effective acceptor dopant to pin the Fermi level within the bulk band gap to enable the observation of surface quantum transport. The defect engineering and doping strategies proposed here should stimulate further studies for improving synthesis and for manipulating magnetic and topological properties in MnBi2Te4, MnBi4Te7, and related magnetic topological insulators.}, + langid = {english}, + keywords = {/unread,defect engineering,defects,DFT,todo-tagging,topological insulators}, + file = {/Users/wasmer/Nextcloud/Zotero/Du et al_2021_Tuning Fermi Levels in Intrinsic Antiferromagnetic Topological Insulators.pdf} +} + @unpublished{dymLowDimensionalInvariant2022, title = {Low {{Dimensional Invariant Embeddings}} for {{Universal Geometric Learning}}}, author = {Dym, Nadav and Gortler, Steven J.}, @@ -3054,6 +3902,114 @@ file = {/Users/wasmer/Nextcloud/Zotero/Eckhoff_Behler_2021_High-Dimensional Neural Network Potentials for Magnetic Systems Using.pdf;/Users/wasmer/Zotero/storage/KW8NBSDW/2104.html} } +@article{eisenbachGPUAccelerationLocally2017, + title = {{{GPU}} Acceleration of the {{Locally Selfconsistent Multiple Scattering}} Code for First Principles Calculation of the Ground State and Statistical Physics of Materials}, + author = {Eisenbach, Markus and Larkin, Jeff and Lutjens, Justin and Rennich, Steven and Rogers, James H.}, + date = {2017-02-01}, + journaltitle = {Computer Physics Communications}, + shortjournal = {Computer Physics Communications}, + series = {High {{Performance Computing}} for {{Advanced Modeling}} and {{Simulation}} of {{Materials}}}, + volume = {211}, + pages = {2--7}, + issn = {0010-4655}, + doi = {10.1016/j.cpc.2016.07.013}, + url = {https://www.sciencedirect.com/science/article/pii/S0010465516301953}, + urldate = {2023-09-19}, + abstract = {The Locally Self-consistent Multiple Scattering (LSMS) code solves the first principles Density Functional theory Kohn–Sham equation for a wide range of materials with a special focus on metals, alloys and metallic nano-structures. It has traditionally exhibited near perfect scalability on massively parallel high performance computer architectures. We present our efforts to exploit GPUs to accelerate the LSMS code to enable first principles calculations of O(100,000) atoms and statistical physics sampling of finite temperature properties. We reimplement the scattering matrix calculation for GPUs with a block matrix inversion algorithm that only uses accelerator memory. Using the Cray XK7 system Titan at the Oak Ridge Leadership Computing Facility we achieve a sustained performance of 14.5PFlop/s and a speedup of 8.6 compared to the CPU only code.}, + keywords = {todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Eisenbach et al_2017_GPU acceleration of the Locally Selfconsistent Multiple Scattering code for.pdf} +} + +@report{eisenbachLSMS2017, + title = {{{LSMS}}}, + author = {Eisenbach, Markus and Li, Ying Wai and Liu, Xianglin and Odbadrakh, Od K. and Pei, Zongrui and Stocks, George M. and Yin, Junqi}, + date = {2017-12-01}, + number = {LSMS; 005587WKSTN00}, + institution = {{Oak Ridge National Laboratory (ORNL), Oak Ridge, TN (United States)}}, + url = {https://www.osti.gov/biblio/1420087}, + urldate = {2023-09-19}, + abstract = {LSMS is a first principles, Density Functional theory based, electronic structure code targeted mainly at materials applications. LSMS calculates the local spin density approximation to the diagonal part of the electron Green's function. The electron/spin density and energy are easily determined once the Green's function is known. Linear scaling with system size is achieved in the LSMS by using several unique properties of the real space multiple scattering approach to the Green's function.}, + langid = {english}, + keywords = {/unread,DFT,KKR,library,Multiple scattering theory,physics} +} + +@online{eisenbachMachineLearningFirst2022, + title = {Machine {{Learning}} for {{First Principles Calculations}} of {{Material Properties}} for {{Ferromagnetic Materials}}}, + author = {Eisenbach, Markus and Karabin, Mariia and Pasini, Massimiliano Lupo and Yin, Junqi}, + date = {2022-07-29}, + eprint = {2207.10144}, + eprinttype = {arxiv}, + eprintclass = {cond-mat, physics:physics}, + doi = {10.48550/arXiv.2207.10144}, + url = {http://arxiv.org/abs/2207.10144}, + urldate = {2023-09-19}, + abstract = {The investigation of finite temperature properties using Monte-Carlo (MC) methods requires a large number of evaluations of the system's Hamiltonian to sample the phase space needed to obtain physical observables as function of temperature. DFT calculations can provide accurate evaluations of the energies, but they are too computationally expensive for routine simulations. To circumvent this problem, machine-learning (ML) based surrogate models have been developed and implemented on high-performance computing (HPC) architectures. In this paper, we describe two ML methods (linear mixing model and HydraGNN) as surrogates for first principles density functional theory (DFT) calculations with classical MC simulations. These two surrogate models are used to learn the dependence of target physical properties from complex compositions and interactions of their constituents. We present the predictive performance of these two surrogate models with respect to their complexity while avoiding the danger of overfitting the model. An important aspect of our approach is the periodic retraining with newly generated first principles data based on the progressive exploration of the system's phase space by the MC simulation. The numerical results show that HydraGNN model attains superior predictive performance compared to the linear mixing model for magnetic alloy materials.}, + pubstate = {preprint}, + version = {2}, + keywords = {active learning,alloys,AML,binary systems,DFT,Ferromagnetism,GNN,HEA,HydraGNN,linear mixing model,magnetism,MC,ML,ML-DFT,multi-task learning,Multiple scattering theory,prediction of charge transfer,prediction of energy,prediction of magnetic moment,surrogate model}, + file = {/Users/wasmer/Nextcloud/Zotero/Eisenbach et al_2022_Machine Learning for First Principles Calculations of Material Properties for.pdf;/Users/wasmer/Zotero/storage/ZHJES8AM/2207.html} +} + +@inproceedings{eisenbachMachineLearningFirst2022a, + title = {Machine {{Learning}} for~{{First Principles Calculations}} of~{{Material Properties}} for~{{Ferromagnetic Materials}}}, + booktitle = {Accelerating {{Science}} and {{Engineering Discoveries Through Integrated Research Infrastructure}} for {{Experiment}}, {{Big Data}}, {{Modeling}} and {{Simulation}}}, + author = {Eisenbach, Markus and Karabin, Mariia and Lupo Pasini, Massimiliano and Yin, Junqi}, + editor = {Doug, Kothe and Al, Geist and Pophale, Swaroop and Liu, Hong and Parete-Koon, Suzanne}, + date = {2022}, + series = {Communications in {{Computer}} and {{Information Science}}}, + pages = {75--86}, + publisher = {{Springer Nature Switzerland}}, + location = {{Cham}}, + doi = {10.1007/978-3-031-23606-8_5}, + abstract = {The investigation of finite temperature properties using Monte-Carlo (MC) methods requires a large number of evaluations of the system’s Hamiltonian to sample the phase space needed to obtain physical observables as function of temperature. DFT calculations can provide accurate evaluations of the energies, but they are too computationally expensive for routine simulations. To circumvent this problem, machine-learning (ML) based surrogate models have been developed and implemented on high-performance computing (HPC) architectures. In this paper, we describe two ML methods (linear mixing model and HydraGNN) as surrogates for first principles density functional theory (DFT) calculations with classical MC simulations. These two surrogate models are used to learn the dependence of target physical properties from complex compositions and interactions of their constituents. We present the predictive performance of these two surrogate models with respect to their complexity while avoiding the danger of overfitting the model. An important aspect of our approach is the periodic retraining with newly generated first principles data based on the progressive exploration of the system’s phase space by the MC simulation. The numerical results show that HydraGNN model attains superior predictive performance compared to the linear mixing model for magnetic alloy materials.}, + isbn = {978-3-031-23606-8}, + langid = {english}, + keywords = {AML,binary systems,charge transfer,DFT,Ferromagnetism,GNN,HydraGNN,library,magnetism,ML,ML-DFT,ML-ESM,multi-task learning,ORNL,prediction of charge transfer,prediction of energy,prediction of magnetic moment,PyTorch,surrogate model,with-code,with-data}, + file = {/Users/wasmer/Nextcloud/Zotero/Eisenbach et al_2022_Machine Learning for First Principles Calculations of Material Properties.pdf} +} + +@unpublished{eisenbachMachineLearningSurrogate, + title = {Machine {{Learning Surrogate Models}} to {{Accelerate Monte-Carlo Calculation}}}, + author = {Eisenbach, Markus}, + url = {https://meetings.aps.org/Meeting/MAR19/Session/F21.3}, + urldate = {2023-09-19}, + abstract = {While modern Monte-Carlo algorithms are highly efficient for computational statistical mechanics in many systems, it is desirable for many materials simulations to utilize energies that are evaluated using density functional theory to capture the complex interactions in multicomponent systems. In the past we have performed calculations by combining our LSMS first principles code with Wang-Landau Monte-Carlo calculations. The number of Monte-Carlo steps limits the applicability of this method even on high-performance computer systems. Thus, we are integrating a machine learning derived surrogate model with Monte-Carlo calculations. Here we present our results of deriving surrogate models from total energy calculations that replicate the behavior of first principles calculations of alloy ordering transitions. In addition to evaluating the attainable speedup, we explore strategies for reducing the dimensionality of the surrogate model as well as the impact of the model on the accuracy of the Monte-Carlo results. *This work is supported in part by the Office of Science of the Department of Energy and by the LDRD Program of Oak Ridge National Laboratory. It used resources of the Oak Ridge Leadership Computing Facility, supported by the Office of Science of the U.S. Department of Energy.}, + eventtitle = {{{APS March Meeting}} 2019}, + venue = {{Boston}}, + keywords = {/unread,todo-tagging}, + annotation = {Authors: + +Markus Eisenbach (Oak Ridge National Laboratory) + +Jiaxin Zhang (Oak Ridge National Laboratory) + +Zongrui Pei (Oak Ridge National Laboratory) + +Massimiliano Lupo Pasini (Oak Ridge National Laboratory) + +Ying Wai Li (Oak Ridge National Laboratory) + +Junqi Yin (Oak Ridge National Laboratory)}, + file = {/Users/wasmer/Zotero/storage/729Z2H6S/F21.html} +} + +@inproceedings{eisenbachScalableMethodInitio2009, + title = {A Scalable Method for Ab Initio Computation of Free Energies in Nanoscale Systems}, + booktitle = {Proceedings of the {{Conference}} on {{High Performance Computing Networking}}, {{Storage}} and {{Analysis}}}, + author = {Eisenbach, M. and Zhou, C.-G. and Nicholson, D. M. and Brown, G. and Larkin, J. and Schulthess, T. C.}, + date = {2009-11-14}, + series = {{{SC}} '09}, + pages = {1--8}, + publisher = {{Association for Computing Machinery}}, + location = {{New York, NY, USA}}, + doi = {10.1145/1654059.1654125}, + url = {https://doi.org/10.1145/1654059.1654125}, + urldate = {2023-09-19}, + abstract = {Calculating the thermodynamics of nanoscale systems presents challenges in the simultaneous treatment of the electronic structure, which determines the interactions between atoms, and the statistical fluctuations that become ever more important at shorter length scales. Here we present a highly scalable method that combines ab initio electronic structure techniques, we use the Locally Self-Consitent Multiple Scattering (LSMS) technique, with the Wang-Landau (WL) algorithm to compute free energies and other thermodynamic properties of nanoscale systems. The combined WL-LSMS code is targeted to the study of nanomagnetic systems that have anywhere from about one hundred to a few thousand atoms. The code scales very well on the Cray XT5 system at ORNL, sustaining 1.03 Petaflop/s in double precision on 147,464 cores.}, + isbn = {978-1-60558-744-8}, + keywords = {/unread,todo-tagging} +} + @article{ellisAcceleratingFinitetemperatureKohnSham2021, title = {Accelerating Finite-Temperature {{Kohn-Sham}} Density Functional Theory with Deep Neural Networks}, author = {Ellis, J. A. and Fiedler, L. and Popoola, G. A. and Modine, N. A. and Stephens, J. A. and Thompson, A. P. and Cangi, A. and Rajamanickam, S.}, @@ -3068,10 +4024,24 @@ url = {https://link.aps.org/doi/10.1103/PhysRevB.104.035120}, urldate = {2021-12-05}, abstract = {We present a numerical modeling workflow based on machine learning which reproduces the total energies produced by Kohn-Sham density functional theory (DFT) at finite electronic temperature to within chemical accuracy at negligible computational cost. Based on deep neural networks, our workflow yields the local density of states (LDOS) for a given atomic configuration. From the LDOS, spatially resolved, energy-resolved, and integrated quantities can be calculated, including the DFT total free energy, which serves as the Born-Oppenheimer potential energy surface for the atoms. We demonstrate the efficacy of this approach for both solid and liquid metals and compare results between independent and unified machine-learning models for solid and liquid aluminum. Our machine-learning density functional theory framework opens up the path towards multiscale materials modeling for matter under ambient and extreme conditions at a computational scale and cost that is unattainable with current algorithms.}, - keywords = {DFT,finite-temperature DFT,LAMMPS,library,MALA,ML,ML-DFT,ML-ESM,prediction of electron density,prediction of LDOS,quantum,Quantum ESPRESSO,SNAP,VASP,with-code}, + keywords = {AML,descriptors,DFT,FCNN,finite-temperature DFT,LAMMPS,library,MALA,ML,ML-DFT,ML-ESM,NN,original publication,prediction of electron density,prediction of energy,prediction of LDOS,Quantum ESPRESSO,SNAP,VASP,with-code}, file = {/Users/wasmer/Nextcloud/Zotero/Ellis et al_2021_Accelerating finite-temperature Kohn-Sham density functional theory with deep.pdf;/Users/wasmer/Zotero/storage/AS2E35V9/PhysRevB.104.html} } +@report{erwinWhitePaperLeveraging2023, + type = {White Paper}, + title = {White {{Paper}} - {{Leveraging Physics-Based Models}} and {{AI}} for New {{Material Development}}}, + author = {Erwin, William and Edkins, Stephen}, + date = {2023-06}, + institution = {{Citrine Informatics}}, + url = {https://citrine.io/success/white-papers/white-paper-leveraging-physics-based-models-and-ai-for-new-material-development/}, + urldate = {2023-08-19}, + abstract = {Physics-based models (PBMs) are used extensively in materials research, primarily as a proxy for more time and cost intensive experimental material production and characterization.}, + langid = {american}, + keywords = {AML,case study,Citrine Informatics,customer story,descriptors,DFT,industrial application,materials,ML,ML-DFT,multi-fidelity,sequential learning}, + file = {/Users/wasmer/Nextcloud/Zotero/Erwin_Edkins_2023_White Paper - Leveraging Physics-Based Models and AI for new Material.pdf;/Users/wasmer/Zotero/storage/QJBC8LD2/white-paper-leveraging-physics-based-models-and-ai-for-new-material-development.html} +} + @article{evansGroupTheory2004, title = {Group {{Theory}}}, author = {Evans, Tim S. and Vvedensky, Dimitri D.}, @@ -3172,6 +4142,100 @@ file = {/Users/wasmer/Nextcloud/Zotero/Faraji et al_2017_High accuracy and transferability of a neural network potential through charge.pdf;/Users/wasmer/Zotero/storage/GU7MU2BP/PhysRevB.95.html} } +@article{faulknerCalculatingPropertiesCoherentpotential1980, + title = {Calculating Properties with the Coherent-Potential Approximation}, + author = {Faulkner, J. S. and Stocks, G. M.}, + date = {1980-04-15}, + journaltitle = {Physical Review B}, + shortjournal = {Phys. Rev. B}, + volume = {21}, + number = {8}, + pages = {3222--3244}, + publisher = {{American Physical Society}}, + doi = {10.1103/PhysRevB.21.3222}, + url = {https://link.aps.org/doi/10.1103/PhysRevB.21.3222}, + urldate = {2023-09-19}, + abstract = {It is demonstrated that the expression that has hitherto been used for calculating the Bloch spectral-density function AB(E,→k) in the Korringa-Kohn-Rostoker coherent-potential-approximation theory of alloys leads to manifestly unphysical results. No manipulation of the expression can eliminate this behavior. We develop an averaged Green's-function formulation and from it derive a new expression for AB(E,→k) which does not contain unphysical features. The earlier expression for AB(E,→k) was suggested as plausible on the basis that it is a spectral decomposition of the Lloyd formula. Expressions for many other properties of alloys have been obtained by manipulations of the Lloyd formula, and it is now clear that all such expressions must be considered suspect. It is shown by numerical and algebraic comparisons that some of the expressions obtained in this way are equivalent to the ones obtained from a Green's function, while others are not. In addition to studying these questions, the averaged Green's-function formulation developed in this paper is shown to furnish an interesting new way to approach many problems in alloy theory. The method is described in such a way that the aspects of the formulation that arise from the single-site approximation can be distinguished from those that depend on a specific choice for the effective scatterer.}, + keywords = {CPA,KKR}, + file = {/Users/wasmer/Nextcloud/Zotero/Faulkner_Stocks_1980_Calculating properties with the coherent-potential approximation.pdf;/Users/wasmer/Zotero/storage/SM5WK843/PhysRevB.21.html} +} + +@book{faulknerMultipleScatteringTheory2018, + title = {Multiple {{Scattering Theory}}: {{Electronic}} Structure of Solids}, + shorttitle = {Multiple {{Scattering Theory}}}, + author = {Faulkner, J. S. and Stocks, G. Malcolm and Wang, Yang}, + date = {2018-12-01}, + publisher = {{IOP Publishing}}, + url = {https://iopscience.iop.org/book/mono/978-0-7503-1490-9}, + urldate = {2023-09-19}, + abstract = {{$<$}p{$>$}In 1947, it was discovered that multiple scattering theory can be used to solve the Schrödinger equation for the stationary states of electrons in a solid. Written by experts in the field, Dr J S Faulkner, G M Stocks, and Yang Wang, this book collates the results of numerous studies in the field of multiple scattering theory and provides a comprehensive, systematic approach to MSTs.{$<$}/p{$><$}p{$>$}For many scientists, students and engineers working with multiple scattering programmes, this will be a useful guide that help with the existing knowledge of MST as well as understanding its future implications. For those interested in learning about multiple scattering theory, this book will serve as an introduction for those wanting to use MST for their own calculations.{$<$}/p{$>$}}, + isbn = {978-0-7503-1490-9}, + langid = {english}, + keywords = {todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Faulkner et al_2018_Multiple Scattering Theory.pdf} +} + +@patent{feinbergSystemsMethodsSpatial2023, + type = {patentus}, + title = {Systems and {{Methods}} for {{Spatial Graph Convolutions}} with {{Applications}} to {{Drug Discovery}} and {{Molecular Simulation}}}, + author = {Feinberg, Evan Nathaniel and Pande, Vijay Satyanand and Ramsundar, Bharath}, + holder = {{Leland Stanford Junior University}}, + date = {2023-09-07}, + number = {20230281465A1}, + url = {https://patents.google.com/patent/US20230281465A1/en}, + urldate = {2023-09-25}, + keywords = {/unread,convolutions,graph,molecule,molecules,neural network}, + file = {/Users/wasmer/Nextcloud/Zotero/Feinberg et al_2023_Systems and Methods for Spatial Graph Convolutions with Applications to Drug.pdf} +} + +@patent{feinbergSystemsMethodsSpatial2023a, + type = {patentus}, + title = {Systems and Methods for Spatial Graph Convolutions with Applications to Drug Discovery and Molecular Simulation}, + author = {Feinberg, Evan Nathaniel and Pande, Vijay Satyanand and Ramsundar, Bharath}, + holder = {{Leland Stanford Junior University}}, + date = {2023-08-15}, + number = {11727282B2}, + url = {https://patents.google.com/patent/US11727282B2/en}, + urldate = {2023-09-25}, + langid = {english}, + keywords = {/unread,atoms,graph,molecules,nodes,pair}, + file = {/Users/wasmer/Nextcloud/Zotero/Feinberg et al_2023_Systems and methods for spatial graph convolutions with applications to drug2.pdf} +} + +@online{fengMayForceBe2023, + title = {May the {{Force}} Be with {{You}}: {{Unified Force-Centric Pre-Training}} for {{3D Molecular Conformations}}}, + shorttitle = {May the {{Force}} Be with {{You}}}, + author = {Feng, Rui and Zhu, Qi and Tran, Huan and Chen, Binghong and Toland, Aubrey and Ramprasad, Rampi and Zhang, Chao}, + date = {2023-08-23}, + eprint = {2308.14759}, + eprinttype = {arxiv}, + eprintclass = {physics, q-bio}, + doi = {10.48550/arXiv.2308.14759}, + url = {http://arxiv.org/abs/2308.14759}, + urldate = {2023-09-22}, + abstract = {Recent works have shown the promise of learning pre-trained models for 3D molecular representation. However, existing pre-training models focus predominantly on equilibrium data and largely overlook off-equilibrium conformations. It is challenging to extend these methods to off-equilibrium data because their training objective relies on assumptions of conformations being the local energy minima. We address this gap by proposing a force-centric pretraining model for 3D molecular conformations covering both equilibrium and off-equilibrium data. For off-equilibrium data, our model learns directly from their atomic forces. For equilibrium data, we introduce zero-force regularization and forced-based denoising techniques to approximate near-equilibrium forces. We obtain a unified pre-trained model for 3D molecular representation with over 15 million diverse conformations. Experiments show that, with our pre-training objective, we increase forces accuracy by around 3 times compared to the un-pre-trained Equivariant Transformer model. By incorporating regularizations on equilibrium data, we solved the problem of unstable MD simulations in vanilla Equivariant Transformers, achieving state-of-the-art simulation performance with 2.45 times faster inference time than NequIP. As a powerful molecular encoder, our pre-trained model achieves on-par performance with state-of-the-art property prediction tasks.}, + pubstate = {preprint}, + keywords = {AML,ANI1-x,fine-tuning,finetuning,MD,MD17,ML,MLP,molecules,NequIP,out-of-equilibrium,pretrained models,QM9,representation learning}, + file = {/Users/wasmer/Zotero/storage/IAZ2DY2B/Feng et al. - 2023 - May the Force be with You Unified Force-Centric P.pdf;/Users/wasmer/Zotero/storage/UG7IJTUA/2308.html} +} + +@online{fengPolyGETAcceleratingPolymer2023, + title = {{{PolyGET}}: {{Accelerating Polymer Simulations}} by {{Accurate}} and {{Generalizable Forcefield}} with {{Equivariant Transformer}}}, + shorttitle = {{{PolyGET}}}, + author = {Feng, Rui and Tran, Huan and Toland, Aubrey and Chen, Binghong and Zhu, Qi and Ramprasad, Rampi and Zhang, Chao}, + date = {2023-09-01}, + eprint = {2309.00585}, + eprinttype = {arxiv}, + eprintclass = {cond-mat}, + doi = {10.48550/arXiv.2309.00585}, + url = {http://arxiv.org/abs/2309.00585}, + urldate = {2023-09-22}, + abstract = {Polymer simulation with both accuracy and efficiency is a challenging task. Machine learning (ML) forcefields have been developed to achieve both the accuracy of ab initio methods and the efficiency of empirical force fields. However, existing ML force fields are usually limited to single-molecule settings, and their simulations are not robust enough. In this paper, we present PolyGET, a new framework for Polymer Forcefields with Generalizable Equivariant Transformers. PolyGET is designed to capture complex quantum interactions between atoms and generalize across various polymer families, using a deep learning model called Equivariant Transformers. We propose a new training paradigm that focuses exclusively on optimizing forces, which is different from existing methods that jointly optimize forces and energy. This simple force-centric objective function avoids competing objectives between energy and forces, thereby allowing for learning a unified forcefield ML model over different polymer families. We evaluated PolyGET on a large-scale dataset of 24 distinct polymer types and demonstrated state-of-the-art performance in force accuracy and robust MD simulations. Furthermore, PolyGET can simulate large polymers with high fidelity to the reference ab initio DFT method while being able to generalize to unseen polymers.}, + pubstate = {preprint}, + keywords = {AML,benchmarking,EGNN,equivariant,MD,ML,ML-FF,MLP,NequIP,PolyGET,polymers,SchNet,TorchMDNet,transformer}, + file = {/Users/wasmer/Zotero/storage/KZMGIR4P/Feng et al. - 2023 - PolyGET Accelerating Polymer Simulations by Accur.pdf;/Users/wasmer/Zotero/storage/AYKT7AGJ/2309.html} +} + @article{fernandez-delgadoWeNeedHundreds2014, title = {Do We {{Need Hundreds}} of {{Classifiers}} to {{Solve Real World Classification Problems}}?}, author = {Fernández-Delgado, Manuel and Cernadas, Eva and Barro, Senén and Amorim, Dinani}, @@ -3213,7 +4277,7 @@ urldate = {2023-02-15}, abstract = {We introduce a practical hybrid approach that combines orbital-free density functional theory (DFT) with Kohn-Sham DFT for speeding up first-principles molecular dynamics simulations. Equilibrated ionic configurations are generated using orbital-free DFT for subsequent Kohn-Sham DFT molecular dynamics. This leads to a massive reduction of the simulation time without any sacrifice in accuracy. We assess this finding across systems of different sizes and temperature, up to the warm dense matter regime. To that end, we use the cosine distance between the time series of radial distribution functions representing the ionic configurations. Likewise, we show that the equilibrated ionic configurations from this hybrid approach significantly enhance the accuracy of machine-learning models that replace Kohn-Sham DFT. Our hybrid scheme enables systematic first-principles simulations of warm dense matter that are otherwise hampered by the large numbers of atoms and the prevalent high temperatures. Moreover, our finding provides an additional motivation for developing kinetic and noninteracting free energy functionals for orbital-free DFT.}, pubstate = {preprint}, - keywords = {AIMD,DFT,MD,OF-DFT}, + keywords = {AIMD,AML,DFT,finite-temperature DFT,LDA,library,MALA,MD,ML,ML-DFT,ML-ESM,OF-DFT,Quantum ESPRESSO,single-element,VASP,warm dense matter,with-code}, file = {/Users/wasmer/Nextcloud/Zotero/Fiedler et al_2022_Accelerating Equilibration in First-Principles Molecular Dynamics with.pdf;/Users/wasmer/Zotero/storage/TA7XVJUP/2206.html} } @@ -3277,10 +4341,30 @@ urldate = {2023-06-13}, abstract = {We introduce machine learning (ML) models that predict the electronic structure of materials across a wide temperature range. Our models employ neural networks and are trained on density functional theory (DFT) data. Unlike other ML models that use DFT data, our models directly predict the local density of states (LDOS) of the electronic structure. This provides several advantages, including access to multiple observables such as the electronic density and electronic total free energy. Moreover, our models account for both the electronic and ionic temperatures independently, making them ideal for applications like laser-heating of matter. We validate the efficacy of our LDOS-based models on a metallic test system. They accurately capture energetic effects induced by variations in ionic and electronic temperatures over a broad temperature range, even when trained on a subset of these temperatures. These findings open up exciting opportunities for investigating the electronic structure of materials under both ambient and extreme conditions.}, pubstate = {preprint}, - keywords = {aluminium,AML,DFT,finite-temperature DFT,library,MALA,Metals and alloys,ML,ML-DFT,ML-ESM,model reporting,PAW,PBE,prediction of electron density,prediction of LDOS,prediction of total energy,Quantum ESPRESSO,transfer learning,VASP,warm dense matter,with-code}, + keywords = {aluminium,AML,descriptors,DFT,finite-temperature DFT,library,MALA,Metals and alloys,ML,ML-DFT,ML-ESM,model reporting,PAW,PBE,prediction of electron density,prediction of LDOS,prediction of total energy,Quantum ESPRESSO,transfer learning,VASP,warm dense matter,with-code}, file = {/Users/wasmer/Nextcloud/Zotero/Fiedler et al_2023_Machine learning the electronic structure of matter across temperatures.pdf;/Users/wasmer/Zotero/storage/YX8Z9U2B/2306.html} } +@incollection{fiedlerMachineLearningStaticDynamic2023, + title = {Machine-{{Learning}} for {{Static}} and~{{Dynamic Electronic Structure~Theory}}}, + booktitle = {Machine {{Learning}} in {{Molecular Sciences}}}, + author = {Fiedler, Lenz and Shah, Karan and Cangi, Attila}, + editor = {Qu, Chen and Liu, Hanchao}, + date = {2023-10-02}, + series = {Challenges and {{Advances}} in {{Computational Chemistry}} and {{Physics}}}, + pages = {113--160}, + publisher = {{Springer International Publishing}}, + location = {{Cham}}, + doi = {10.1007/978-3-031-37196-7_5}, + url = {https://doi.org/10.1007/978-3-031-37196-7_5}, + urldate = {2023-10-06}, + abstract = {Machine learning has emerged as a powerful technique for processing large and complex datasets. Recently it has been utilized for both improving the accuracy and accelerating the computational speed of electronic structure theoryElectronic structure theory. In this chapter, we provide the theoretical background of both density functional theoryDensity Functional Theory (DFT), the most widely used electronic structure method, and machine learning on a generally accessible level. We provide a brief overview of the most impactful results in recent times. We, further, showcase how machine learning is used to advance static and dynamic electronic structure calculations with concrete examples. This chapter highlights that fusing concepts of machine learning and density functional theoryDensity Functional Theory (DFT)~holds the promise to greatly advance electronic structure calculations enabling unprecedented applications for in-silico materialsMaterials discovery~discovery and the search for novel chemical reaction pathwaysChemical reaction pathway.In-silico}, + isbn = {978-3-031-37196-7}, + langid = {english}, + keywords = {AML,educational,finite-temperature,finite-temperature DFT,introduction,MALA,ML,ML-DFT,ML-ESM,PINN,review,TDDFT,tutorial}, + file = {/Users/wasmer/Nextcloud/Zotero/Fiedler et al_2023_Machine-Learning for Static and Dynamic Electronic Structure Theory.pdf} +} + @online{fiedlerPredictingElectronicStructures2022, title = {Predicting Electronic Structures at Any Length Scale with Machine Learning}, author = {Fiedler, Lenz and Modine, Normand A. and Schmerler, Steve and Vogel, Dayton J. and Popoola, Gabriel A. and Thompson, Aidan P. and Rajamanickam, Sivasankaran and Cangi, Attila}, @@ -3293,7 +4377,7 @@ urldate = {2023-02-15}, abstract = {The properties of electrons in matter are of fundamental importance. They give rise to virtually all molecular and material properties and determine the physics at play in objects ranging from semiconductor devices to the interior of giant gas planets. Modeling and simulation of such diverse applications rely primarily on density functional theory (DFT), which has become the principal method for predicting the electronic structure of matter. While DFT calculations have proven to be very useful to the point of being recognized with a Nobel prize in 1998, their computational scaling limits them to small systems. We have developed a machine learning framework for predicting the electronic structure on any length scale. It shows up to three orders of magnitude speedup on systems where DFT is tractable and, more importantly, enables predictions on scales where DFT calculations are infeasible. Our work demonstrates how machine learning circumvents a long-standing computational bottleneck and advances science to frontiers intractable with any current solutions. This unprecedented modeling capability opens up an inexhaustible range of applications in astrophysics, novel materials discovery, and energy solutions for a sustainable future.}, pubstate = {preprint}, - keywords = {DFT,finite-temperature DFT,LAMMPS,library,MALA,ML,ML-DFT,ML-ESM,prediction of electron density,prediction of LDOS,quantum,Quantum ESPRESSO,SNAP,VASP,with-code}, + keywords = {AIMD,AML,bispectrum,defect 2D,defects,descriptors,DFT,disordered,finite-temperature DFT,grid-based descriptors,LAMMPS,library,linear scaling,linear-scaling DFT,MALA,ML,ML-DFT,ML-ESM,prediction of electron density,prediction of energy,prediction of LDOS,Quantum ESPRESSO,scaling,VASP,with-code}, file = {/Users/wasmer/Nextcloud/Zotero/Fiedler et al_2022_Predicting electronic structures at any length scale with machine learning.pdf;/Users/wasmer/Zotero/storage/9AYDDQ8T/2210.html} } @@ -3314,14 +4398,14 @@ abstract = {The properties of electrons in matter are of fundamental importance. They give rise to virtually all material properties and determine the physics at play in objects ranging from semiconductor devices to the interior of giant gas planets. Modeling and simulation of such diverse applications rely primarily on density functional theory (DFT), which has become the principal method for predicting the electronic structure of matter. While DFT calculations have proven to be very useful, their computational scaling limits them to small systems. We have developed a machine learning framework for predicting the electronic structure on any length scale. It shows up to three orders of magnitude speedup on systems where DFT is tractable and, more importantly, enables predictions on scales where DFT calculations are infeasible. Our work demonstrates how machine learning circumvents a long-standing computational bottleneck and advances materials science to frontiers intractable with any current solutions.}, issue = {1}, langid = {english}, - keywords = {DFT,Electronic properties and materials,Electronic structure,finite-temperature DFT,LAMMPS,library,MALA,ML,ML-DFT,ML-ESM,prediction of electron density,prediction of LDOS,quantum,Quantum ESPRESSO,SNAP,VASP,with-code}, + keywords = {AIMD,AML,bispectrum,defect 2D,defects,descriptors,DFT,disordered,finite-temperature DFT,grid-based descriptors,LAMMPS,library,linear scaling,linear-scaling DFT,MALA,ML,ML-DFT,ML-ESM,prediction of electron density,prediction of energy,prediction of LDOS,Quantum ESPRESSO,scaling,VASP,with-code}, file = {/Users/wasmer/Nextcloud/Zotero/Fiedler et al_2023_Predicting electronic structures at any length scale with machine learning.pdf} } @article{fiedlerTrainingfreeHyperparameterOptimization2022, title = {Training-Free Hyperparameter Optimization of Neural Networks for Electronic Structures in Matter}, author = {Fiedler, Lenz and Hoffmann, Nils and Mohammed, Parvez and Popoola, Gabriel A. and Yovell, Tamar and Oles, Vladyslav and Ellis, J. Austin and Rajamanickam, Siva and Cangi, Attila}, - date = {2022-12-01}, + date = {2022-08-08}, journaltitle = {Machine Learning: Science and Technology}, shortjournal = {Mach. Learn.: Sci. Technol.}, volume = {3}, @@ -3335,10 +4419,46 @@ url = {http://arxiv.org/abs/2202.09186}, urldate = {2023-02-15}, abstract = {A myriad of phenomena in materials science and chemistry rely on quantum-level simulations of the electronic structure in matter. While moving to larger length and time scales has been a pressing issue for decades, such large-scale electronic structure calculations are still challenging despite modern software approaches and advances in high-performance computing. The silver lining in this regard is the use of machine learning to accelerate electronic structure calculations -- this line of research has recently gained growing attention. The grand challenge therein is finding a suitable machine-learning model during a process called hyperparameter optimization. This, however, causes a massive computational overhead in addition to that of data generation. We accelerate the construction of neural network models by roughly two orders of magnitude by circumventing excessive training during the hyperparameter optimization phase. We demonstrate our workflow for Kohn-Sham density functional theory, the most popular computational method in materials science and chemistry.}, - keywords = {\_tablet,AML,hyperparameters,hyperparameters optimization,MALA,ML,ML-DFT,Optuna,prediction of electron density,prediction of LDOS}, + keywords = {\_tablet,AML,Deep learning,hyperparameters,hyperparameters optimization,library,MALA,ML,ML-DFT,ML-ESM,Optuna,prediction of electron density,prediction of LDOS,with-code}, file = {/Users/wasmer/Nextcloud/Zotero/Fiedler et al_2022_Training-free hyperparameter optimization of neural networks for electronic.pdf;/Users/wasmer/Zotero/storage/6BKNM2VX/2202.html} } +@article{fiedlerTrainingfreeHyperparameterOptimization2022a, + title = {Training-Free Hyperparameter Optimization of Neural Networks for Electronic Structures in Matter}, + author = {Fiedler, Lenz and Hoffmann, Nils and Mohammed, Parvez and Popoola, Gabriel A. and Yovell, Tamar and Oles, Vladyslav and Ellis, J. Austin and Rajamanickam, Sivasankaran and Cangi, Attila}, + date = {2022-10-28}, + journaltitle = {Machine Learning: Science and Technology}, + shortjournal = {Mach. Learn.: Sci. Technol.}, + volume = {3}, + number = {4}, + pages = {045008}, + publisher = {{IOP Publishing}}, + issn = {2632-2153}, + doi = {10.1088/2632-2153/ac9956}, + url = {https://dx.doi.org/10.1088/2632-2153/ac9956}, + urldate = {2023-10-04}, + abstract = {A myriad of phenomena in materials science and chemistry rely on quantum-level simulations of the electronic structure in matter. While moving to larger length and time scales has been a pressing issue for decades, such large-scale electronic structure calculations are still challenging despite modern software approaches and advances in high-performance computing. The silver lining in this regard is the use of machine learning to accelerate electronic structure calculations—this line of research has recently gained growing attention. The grand challenge therein is finding a suitable machine-learning model during a process called hyperparameter optimization. This, however, causes a massive computational overhead in addition to that of data generation. We accelerate the construction of neural network models by roughly two orders of magnitude by circumventing excessive training during the hyperparameter optimization phase. We demonstrate our workflow for Kohn–Sham density functional theory, the most popular computational method in materials science and chemistry.}, + langid = {english}, + keywords = {AML,Deep learning,hyperparameters,hyperparameters optimization,library,MALA,ML,ML-DFT,ML-ESM,Optuna,prediction of electron density,prediction of LDOS,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Fiedler et al_2022_Training-free hyperparameter optimization of neural networks for electronic2.pdf} +} + +@online{finziPracticalMethodConstructing2021, + title = {A {{Practical Method}} for {{Constructing Equivariant Multilayer Perceptrons}} for {{Arbitrary Matrix Groups}}}, + author = {Finzi, Marc and Welling, Max and Wilson, Andrew Gordon}, + date = {2021-04-19}, + eprint = {2104.09459}, + eprinttype = {arxiv}, + eprintclass = {cs, math, stat}, + doi = {10.48550/arXiv.2104.09459}, + url = {http://arxiv.org/abs/2104.09459}, + urldate = {2023-08-22}, + abstract = {Symmetries and equivariance are fundamental to the generalization of neural networks on domains such as images, graphs, and point clouds. Existing work has primarily focused on a small number of groups, such as the translation, rotation, and permutation groups. In this work we provide a completely general algorithm for solving for the equivariant layers of matrix groups. In addition to recovering solutions from other works as special cases, we construct multilayer perceptrons equivariant to multiple groups that have never been tackled before, including \$\textbackslash mathrm\{O\}(1,3)\$, \$\textbackslash mathrm\{O\}(5)\$, \$\textbackslash mathrm\{Sp\}(n)\$, and the Rubik's cube group. Our approach outperforms non-equivariant baselines, with applications to particle physics and dynamical systems. We release our software library to enable researchers to construct equivariant layers for arbitrary matrix groups.}, + pubstate = {preprint}, + keywords = {/unread,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Finzi et al_2021_A Practical Method for Constructing Equivariant Multilayer Perceptrons for.pdf;/Users/wasmer/Zotero/storage/CFQ89M8L/2104.html} +} + @article{flores-livasPredictionHotSuperconductivity2019, title = {A {{Prediction}} for “{{Hot}}†{{Superconductivity}}}, author = {Flores-Livas, José A. and Arita, Ryotaro}, @@ -3478,6 +4598,25 @@ file = {/Users/wasmer/Nextcloud/Zotero/Fraux et al_2020_Chemiscope.pdf;/Users/wasmer/Zotero/storage/TCQI9XE2/joss.html} } +@article{freitagRealClimateTransformative2021, + title = {The Real Climate and Transformative Impact of {{ICT}}: {{A}} Critique of Estimates, Trends, and Regulations}, + shorttitle = {The Real Climate and Transformative Impact of {{ICT}}}, + author = {Freitag, Charlotte and Berners-Lee, Mike and Widdicks, Kelly and Knowles, Bran and Blair, Gordon S. and Friday, Adrian}, + date = {2021-09-10}, + journaltitle = {Patterns}, + shortjournal = {Patterns}, + volume = {2}, + number = {9}, + pages = {100340}, + issn = {2666-3899}, + doi = {10.1016/j.patter.2021.100340}, + url = {https://www.sciencedirect.com/science/article/pii/S2666389921001884}, + urldate = {2023-09-19}, + abstract = {In this paper, we critique ICT's current and projected climate impacts. Peer-reviewed studies estimate ICT's current share of global greenhouse gas (GHG) emissions at 1.8\%–2.8\% of global GHG emissions; adjusting for truncation of supply chain pathways, we find that this share could actually be between 2.1\% and 3.9\%. For ICT's future emissions, we explore assumptions underlying analysts' projections to understand the reasons for their variability. All analysts agree that ICT emissions will not reduce without major concerted efforts involving broad political and industrial action. We provide three reasons to believe ICT emissions are going to increase barring intervention and find that not all carbon pledges in the ICT sector are ambitious enough to meet climate targets. We explore the underdevelopment of policy mechanisms for enforcing sector-wide compliance, and contend that, without a global carbon constraint, a new regulatory framework is required to keep the ICT sector's footprint aligned with the Paris Agreement.}, + keywords = {/unread,ecological footprint,economics,energy consumption,energy efficiency,environmental impact,for introductions,ICT sector,low-power electronics,world energy consumption}, + file = {/Users/wasmer/Nextcloud/Zotero/Freitag et al_2021_The real climate and transformative impact of ICT.pdf;/Users/wasmer/Zotero/storage/3IPQYR9I/S2666389921001884.html} +} + @article{freyMachineLearningEnabledDesign2020, title = {Machine {{Learning-Enabled Design}} of {{Point Defects}} in {{2D Materials}} for {{Quantum}} and {{Neuromorphic Information Processing}}}, author = {Frey, Nathan C. and Akinwande, Deji and Jariwala, Deep and Shenoy, Vivek B.}, @@ -3521,7 +4660,7 @@ pages = {253--305}, doi = {10.1103/RevModPhys.86.253}, keywords = {basics,defects,DFT,impurity embedding,MPI Eisenforschung,review}, - file = {/Users/wasmer/Zotero/storage/26HPBYJN/RevModPhys.86.html} + file = {/Users/wasmer/Nextcloud/Zotero/Freysoldt_2014_First-principles calculations for point defects in solids.pdf;/Users/wasmer/Zotero/storage/26HPBYJN/RevModPhys.86.html} } @article{freysoldtLimitationsEmpiricalSupercell2022, @@ -3584,6 +4723,22 @@ file = {/Users/wasmer/Nextcloud/Zotero/Frolov_2021_Quantum computing’s reproducibility crisis.pdf;/Users/wasmer/Zotero/storage/CLEGVGB5/d41586-021-00954-8.html} } +@online{frolovWeCannotBelieve2022, + title = {We Cannot Believe We Overlooked These {{Majorana}} Discoveries}, + author = {Frolov, Sergey and Mourik, Vincent}, + date = {2022-03-31}, + eprint = {2203.17060}, + eprinttype = {arxiv}, + eprintclass = {cond-mat, physics:quant-ph}, + doi = {10.48550/arXiv.2203.17060}, + url = {http://arxiv.org/abs/2203.17060}, + urldate = {2023-09-18}, + abstract = {In 2011-2012 we performed experiments on hybrid superconductor-semiconductor nanowire devices which yielded signatures of Majorana fermions based on zero-bias peaks in tunneling measurements. The research field that grew out of those findings and other contemporary works has advanced significantly, and a lot of new knowledge and insights were gained. However, key smoking gun evidence of Majorana is still lacking. In this paper, we report that while reviewing our old data recently, armed with a decade of knowledge, we realized that back in 2012 our results contained two breakthrough Majorana discoveries. Specifically, we have observed quantized zero-bias peaks, the hallmark of ideal Majorana states. Furthermore, we have observed the closing and re-opening of the induced gap perfectly correlated with the emergence of the zero-bias peak - clear evidence of the topological quantum phase superconducting transition. These insights should pave the way to topological Majorana qubits, and you should also check supplementary information for important disclosures.}, + pubstate = {preprint}, + keywords = {/unread,experimental,failure,FZJ,Majorana,MZM,PGI,physics,superconductor,topological,Topological Superconductor}, + file = {/Users/wasmer/Nextcloud/Zotero/Frolov_Mourik_2022_We cannot believe we overlooked these Majorana discoveries.pdf;/Users/wasmer/Zotero/storage/IR3K8NZ9/2203.html} +} + @online{fuchsSETransformers3D2020, title = {{{SE}}(3)-{{Transformers}}: {{3D Roto-Translation Equivariant Attention Networks}}}, shorttitle = {{{SE}}(3)-{{Transformers}}}, @@ -3597,7 +4752,7 @@ urldate = {2022-10-03}, abstract = {We introduce the SE(3)-Transformer, a variant of the self-attention module for 3D point clouds and graphs, which is equivariant under continuous 3D roto-translations. Equivariance is important to ensure stable and predictable performance in the presence of nuisance transformations of the data input. A positive corollary of equivariance is increased weight-tying within the model. The SE(3)-Transformer leverages the benefits of self-attention to operate on large point clouds and graphs with varying number of points, while guaranteeing SE(3)-equivariance for robustness. We evaluate our model on a toy N-body particle simulation dataset, showcasing the robustness of the predictions under rotations of the input. We further achieve competitive performance on two real-world datasets, ScanObjectNN and QM9. In all cases, our model outperforms a strong, non-equivariant attention baseline and an equivariant model without attention.}, pubstate = {preprint}, - keywords = {equivariant,GCN,GNN,library,ML,QM9,SchNet,SE(3),self-attention,transformer,with-code}, + keywords = {equivariant,GCN,general ML,GNN,library,ML,QM9,SchNet,SE(3),self-attention,transformer,with-code}, file = {/Users/wasmer/Nextcloud/Zotero/Fuchs et al_2020_SE(3)-Transformers.pdf;/Users/wasmer/Zotero/storage/UMVV286P/2006.html} } @@ -3654,6 +4809,25 @@ file = {/Users/wasmer/Nextcloud/Zotero/Fung et al_2022_Physically Informed Machine Learning Prediction of Electronic Density of States.pdf;/Users/wasmer/Zotero/storage/MFQH6849/acs.chemmater.html} } +@article{fungPhysicallyInformedMachine2022a, + title = {Physically {{Informed Machine Learning Prediction}} of {{Electronic Density}} of {{States}}}, + author = {Fung, Victor and Ganesh, P. and Sumpter, Bobby G.}, + date = {2022-06-14}, + journaltitle = {Chemistry of Materials}, + shortjournal = {Chem. Mater.}, + volume = {34}, + number = {11}, + pages = {4848--4855}, + publisher = {{American Chemical Society}}, + issn = {0897-4756}, + doi = {10.1021/acs.chemmater.1c04252}, + url = {https://doi.org/10.1021/acs.chemmater.1c04252}, + urldate = {2023-09-23}, + abstract = {The electronic structure of a material, such as its density of states (DOS), provides key insights into its physical and functional properties and serves as a valuable source of high-quality features for many materials screening and discovery workflows. However, the computational cost of calculating the DOS, most commonly with density functional theory (DFT), becomes prohibitive for meeting high-fidelity or high-throughput requirements, necessitating a cheaper but sufficiently accurate surrogate. To fulfill this demand, we develop a general machine learning method based on graph neural networks for predicting the DOS purely from atomic positions, six orders of magnitude faster than DFT. This approach can effectively use large materials databases and be applied generally across the entire periodic table to materials classes of arbitrary compositional and structural diversity. We furthermore devise a highly adaptable scheme for physically informed learning which encourages the DOS prediction to favor physically reasonable solutions defined by any set of desired constraints. This functionality provides a means for ensuring that the predicted DOS is reliable enough to be used as an input to downstream materials screening workflows to predict more complex functional properties, which rely on accurate physical features.}, + keywords = {/unread,AML,GNN,materials,ML,ML-DFT,ML-ESM,perovskites,physics-informed ML,prediction of DOS,SOAP}, + file = {/Users/wasmer/Zotero/storage/D55696WS/acs.chemmater.html} +} + @online{galkinGraphML20222021, title = {Graph {{ML}} in 2022: {{Where Are We Now}}?}, shorttitle = {Graph {{ML}} in 2022}, @@ -3897,7 +5071,7 @@ abstract = {Predictive atomistic simulations are increasingly employed for data intensive high throughput studies that take advantage of constantly growing computational resources. To handle the sheer number of individual calculations that are needed in such studies, workflow management packages for atomistic simulations have been developed for a rapidly growing user base. These packages are predominantly designed to handle computationally heavy ab initio calculations, usually with a focus on data provenance and reproducibility. However, in related simulation communities, e.g. the developers of machine learning interatomic potentials (MLIPs), the computational requirements are somewhat different: the types, sizes, and numbers of computational tasks are more diverse, and therefore require additional ways of parallelization and local or remote execution for optimal efficiency. In this work, we present the atomistic simulation and MLIP fitting workflow management package wfl and Python remote execution package ExPyRe to meet these requirements. With wfl and ExPyRe, versatile Atomic Simulation Environment based workflows that perform diverse procedures can be written. This capability is based on a low-level developer-oriented framework, which can be utilized to construct high level functionality for user-friendly programs. Such high level capabilities to automate machine learning interatomic potential fitting procedures are already incorporated in wfl, which we use to showcase its capabilities in this work. We believe that wfl fills an important niche in several growing simulation communities and will aid the development of efficient custom computational tasks.}, langid = {english}, organization = {{arXiv.org}}, - keywords = {/unread}, + keywords = {todo-tagging}, file = {/Users/wasmer/Nextcloud/Zotero/GelžinytÄ— et al_2023_wfl Python Toolkit for Creating Machine Learning Interatomic Potentials and.pdf} } @@ -3932,6 +5106,22 @@ file = {/Users/wasmer/Nextcloud/Zotero/Gerard et al_2022_Gold-standard solutions to the Schr-odinger equation using deep learning.pdf;/Users/wasmer/Zotero/storage/DWVRHXZW/2205.html} } +@online{gerhorstPhononsDensityFunctionalPerturbation2023, + title = {Phonons from {{Density-Functional Perturbation Theory}} Using the {{All-Electron Full-Potential Linearized Augmented Plane-Wave Method FLEUR}}}, + author = {Gerhorst, Christian-Roman and Neukirchen, Alexander and Klüppelberg, Daniel A. and Bihlmayer, Gustav and Betzinger, Markus and Michalicek, Gregor and Wortmann, Daniel and Blügel, Stefan}, + date = {2023-09-26}, + eprint = {2309.14799}, + eprinttype = {arxiv}, + eprintclass = {cond-mat}, + doi = {10.48550/arXiv.2309.14799}, + url = {http://arxiv.org/abs/2309.14799}, + urldate = {2023-10-04}, + abstract = {Phonons are quantized vibrations of a crystal lattice that play a crucial role in understanding many properties of solids. Density functional theory (DFT) provides a state-of-the-art computational approach to lattice vibrations from first-principles. We present a successful software implementation for calculating phonons in the harmonic approximation, employing density-functional perturbation theory (DFPT) within the framework of the full-potential linearized augmented plane-wave (FLAPW) method as implemented in the electronic structure package FLEUR. The implementation, which involves the Sternheimer equation for the linear response of the wave function, charge density, and potential with respect to infinitesimal atomic displacements, as well as the setup of the dynamical matrix, is presented and the specifics due to the muffin-tin sphere centered LAPW basis-set and the all-electron nature are discussed. As a test, we calculate the phonon dispersion of several solids including an insulator, a semiconductor as well as several metals. The latter are comprised of magnetic, simple, and transition metals. The results are validated on the basis of phonon dispersions calculated using the finite displacement approach in conjunction with the FLEUR code and the phonopy package, as well as by some experimental results. An excellent agreement is obtained.}, + pubstate = {preprint}, + keywords = {DFPT,DFT,DFT theory,FLEUR,FZJ,LAPW,perturbation theory,PGI,PGI-1/IAS-1,phonon,physics,quantum materials}, + file = {/Users/wasmer/Nextcloud/Zotero/Gerhorst et al_2023_Phonons from Density-Functional Perturbation Theory using the All-Electron.pdf;/Users/wasmer/Zotero/storage/93USRLZ6/2309.html} +} + @book{geronHandsonMachineLearning2019, title = {Hands-on Machine Learning with {{Scikit-Learn}}, {{Keras}}, and {{TensorFlow}}: Concepts, Tools, and Techniques to Build Intelligent Systems}, shorttitle = {Hands-on Machine Learning with {{Scikit-Learn}}, {{Keras}}, and {{TensorFlow}}}, @@ -4038,10 +5228,28 @@ urldate = {2022-10-28}, abstract = {In this article we demonstrate the applications of classical and quantum machine learning in quantum transport and spintronics. With the help of a two terminal device with magnetic impurity we show how machine learning algorithms can predict the highly non-linear nature of conductance as well as the non-equilibrium spin response function for any random magnetic configuration. We finally describe the applicability of quantum machine learning which has the capability to handle a significantly large configuration space. Our approach is also applicable for molecular systems. These outcomes are crucial in predicting the behaviour of large scale systems where a quantum mechanical calculation is computationally challenging and therefore would play a crucial role in designing nano devices.}, pubstate = {preprint}, - keywords = {\_tablet,ML,PGI-1/IAS-1,QML,QSVM,quantum computing,quantum transport,random forest,rec-by-ghosh,spin dynamics,Spintronics,SVM,tight binding,transport properties}, + keywords = {\_tablet,FZJ,ML,PGI,PGI-1/IAS-1,QML,QSVM,quantum computing,quantum transport,random forest,rec-by-ghosh,spin dynamics,Spintronics,SVM,tight binding,transport properties}, file = {/Users/wasmer/Nextcloud/Zotero/Ghosh_Ghosh_2022_Classical and quantum machine learning applications in spintronics.pdf;/Users/wasmer/Zotero/storage/FEUD8XZQ/2207.html} } +@article{ghoshPerspectiveSpinOrbit2023, + title = {Perspective on Spin–Orbit Torque, Topology, and Reciprocal and Real-Space Spin Textures in Magnetic Materials and Heterostructures}, + author = {Ghosh, Sumit and Rüßmann, Philipp and Mokrousov, Yuriy and Freimuth, Frank and Kosma, Adamantia}, + date = {2023-06-15}, + journaltitle = {Journal of Applied Physics}, + shortjournal = {Journal of Applied Physics}, + volume = {133}, + number = {23}, + pages = {230901}, + issn = {0021-8979}, + doi = {10.1063/5.0149849}, + url = {https://doi.org/10.1063/5.0149849}, + urldate = {2023-08-09}, + abstract = {In this Perspective, we present some important aspects of two fundamental concepts of modern spintronics, namely, spin–orbit torque and topology. Although these two fields emerged separately in condensed matter physics, in spintronics they show a deep connection, which requires further theoretical and experimental investigation. The topological features can arise both from momentum space via the wave functions as well as from real space via complex magnetic configurations. These features manifest themselves as unique aspects of different equilibrium and non-equilibrium properties. Physical interactions of such a topological origin can open new possibilities for more efficient mechanisms for manipulating magnetic order with electrical currents, which, in turn, can lead to faster and more efficient spintronics devices.}, + keywords = {berry curvature,DFT,Dzyaloshinskii–Moriya interaction,FZJ,Hall AHE,Hall effect,Hall THE,impurity embedding,juKKR,Keldysh formalism,KKR,Kubo,Kubo-Bastin formalism,magnetic impurity,magnetic structure,magnetism,non-collinear,perspective,perspective-spintronics,PGI,PGI-1/IAS-1,physics,skyrmions,Spin-orbit effects,spin-orbit torque,spintronics,topological,topological insulator}, + file = {/Users/wasmer/Nextcloud/Zotero/Ghosh et al_2023_Perspective on spin–orbit torque, topology, and reciprocal and real-space spin.pdf;/Users/wasmer/Zotero/storage/UH6NDHMP/2896791.html} +} + @article{ghoshShortrangeOrderPhase2022, title = {Short-Range Order and Phase Stability of {{CrCoNi}} Explored with Machine Learning Potentials}, author = {Ghosh, Sheuly and Sotskov, Vadim and Shapeev, Alexander V. and Neugebauer, Jörg and Körmann, Fritz}, @@ -4199,6 +5407,23 @@ file = {/Users/wasmer/Zotero/storage/LGPLQHSH/global-mlops-and-ml-tools-landscape.html} } +@online{goFirstprinciplesCalculationOrbital2023, + title = {First-Principles Calculation of Orbital {{Hall}} Effect by {{Wannier}} Interpolation: {{Role}} of Orbital Dependence of the Anomalous Position}, + shorttitle = {First-Principles Calculation of Orbital {{Hall}} Effect by {{Wannier}} Interpolation}, + author = {Go, Dongwook and Lee, Hyun-Woo and Oppeneer, Peter M. and Blügel, Stefan and Mokrousov, Yuriy}, + date = {2023-09-25}, + eprint = {2309.13996}, + eprinttype = {arxiv}, + eprintclass = {cond-mat}, + doi = {10.48550/arXiv.2309.13996}, + url = {http://arxiv.org/abs/2309.13996}, + urldate = {2023-10-04}, + abstract = {The position operator in a Bloch representation acquires a gauge correction in the momentum space on top of the canonical position, which is called the anomalous position. We show that the anomalous position is generally orbital-dependent and thus plays a crucial role in the description of the intrinsic orbital Hall effect in terms of Wannier basis. We demonstrate this from the first-principles calculation of orbital Hall conductivities of transition metals by Wannier interpolation. Our results show that consistent treatment of the velocity operator by adding the additional term originating from the anomalous position predicts the orbital Hall conductivities different from those obtained by considering only the group velocity. We find the difference is crucial in several metals. For example, we predict the negative sign of the orbital Hall conductivities for elements in the groups X and XI such as Cu, Ag, Au, and Pd, for which the previous studies predicted the positive sign. Our work suggests the importance of consistently describing the spatial dependence of basis functions by first-principles methods as it is fundamentally missing in the tight-binding approximation.}, + pubstate = {preprint}, + keywords = {DFT,FLEUR,FZJ,Hall effect,Hall OHE,Hall SHE,orbital angular momentum,PGI,PGI-1/IAS-1,physics,quantum materials,SOC,thin film,transition metals,Wannier}, + file = {/Users/wasmer/Nextcloud/Zotero/Go et al_2023_First-principles calculation of orbital Hall effect by Wannier interpolation.pdf;/Users/wasmer/Zotero/storage/E39Y6NJ8/2309.html} +} + @article{golzeGWCompendiumPractical2019, title = {The {{GW Compendium}}: {{A Practical Guide}} to {{Theoretical Photoemission Spectroscopy}}}, shorttitle = {The {{GW Compendium}}}, @@ -4255,6 +5480,24 @@ file = {/Users/wasmer/Nextcloud/Zotero/Gong et al_2023_General framework for E(3)-equivariant neural network representation of density.pdf;/Users/wasmer/Nextcloud/Zotero/Gong et al_2023_General framework for E(3)-equivariant neural network representation of density2.pdf;/Users/wasmer/Nextcloud/Zotero/Gong et al_2023_General framework for E(3)-equivariant neural network representation of density3.pdf;/Users/wasmer/Nextcloud/Zotero/Gong et al_2023_General framework for E(3)-equivariant neural network representation of density4.pdf} } +@book{gonisMultipleScatteringSolids2000, + title = {Multiple {{Scattering}} in {{Solids}}}, + author = {Gonis, Antonios and Butler, William H.}, + editorb = {Berry, R. Stephen and Birman, Joseph L. and Lynn, Jeffrey W. and Silverman, Mark P. and Stanley, H. Eugene and Voloshin, Mikhail}, + editorbtype = {redactor}, + date = {2000}, + series = {Graduate {{Texts}} in {{Contemporary Physics}}}, + publisher = {{Springer}}, + location = {{New York, NY}}, + doi = {10.1007/978-1-4612-1290-4}, + url = {https://link.springer.com/10.1007/978-1-4612-1290-4}, + urldate = {2023-09-19}, + isbn = {978-1-4612-7080-5 978-1-4612-1290-4}, + langid = {english}, + keywords = {electricity,electronic structure of materials,Helmholtz equation,mechanics,muffin-tin potentials,Muliple scattering theory,partial waves,scattering theory,space-filling cells}, + file = {/Users/wasmer/Nextcloud/Zotero/Gonis_Butler_2000_Multiple Scattering in Solids.pdf} +} + @unpublished{goodallRapidDiscoveryStable2022, title = {Rapid {{Discovery}} of {{Stable Materials}} by {{Coordinate-free Coarse Graining}}}, author = {Goodall, Rhys E. A. and Parackal, Abhijith S. and Faber, Felix A. and Armiento, Rickard and Lee, Alpha A.}, @@ -4309,6 +5552,22 @@ file = {/Users/wasmer/Zotero/storage/HRL7NEIR/pyguide.html} } +@online{goOrbitalPumpingMagnetization2023, + title = {Orbital {{Pumping}} by {{Magnetization Dynamics}} in {{Ferromagnets}}}, + author = {Go, Dongwook and Ando, Kazuya and Pezo, Armando and Blügel, Stefan and Manchon, Aurélien and Mokrousov, Yuriy}, + date = {2023-09-26}, + eprint = {2309.14817}, + eprinttype = {arxiv}, + eprintclass = {cond-mat}, + doi = {10.48550/arXiv.2309.14817}, + url = {http://arxiv.org/abs/2309.14817}, + urldate = {2023-10-04}, + abstract = {We show that dynamics of the magnetization in ferromagnets can pump the orbital angular momentum, which we denote by orbital pumping. This is the reciprocal phenomenon to the orbital torque that induces magnetization dynamics by the orbital angular momentum in non-equilibrium. The orbital pumping is analogous to the spin pumping established in spintronics but requires the spin-orbit coupling for the orbital angular momentum to interact with the magnetization. We develop a formalism that describes the generation of the orbital angular momentum by magnetization dynamics within the adiabatic perturbation theory. Based on this, we perform first-principles calculation of the orbital pumping in prototypical \$3d\$ ferromagnets, Fe, Co, and Ni. The results show that the ratio between the orbital pumping and the spin pumping ranges from 5 to 15 percents, being smallest in Fe and largest in Ni. This implies that ferromagnetic Ni is a good candidate for measuring the orbital pumping. Implications of our results on experiments are also discussed.}, + pubstate = {preprint}, + keywords = {Ferromagnetism,FZJ,magnetization,orbital angular momentum,PGI,PGI-1/IAS-1,physics,pumping,quantum materials,SOC,transition metals}, + file = {/Users/wasmer/Nextcloud/Zotero/Go et al_2023_Orbital Pumping by Magnetization Dynamics in Ferromagnets.pdf;/Users/wasmer/Zotero/storage/EHVUDCNY/2309.html} +} + @book{gorelickHighPerformancePython2020, title = {High Performance {{Python}}: Practical Performance Programming for Humans}, shorttitle = {High Performance {{Python}}}, @@ -4357,6 +5616,36 @@ file = {/Users/wasmer/Nextcloud/Zotero/Goscinski et al_2021_The role of feature space in atomistic learning.pdf} } +@online{goscinskiSuiteGeneralisableMachine2023, + title = {A {{Suite}} of {{Generalisable Machine Learning Methods Born}} out of {{Chemistry}} and {{Materials Science}} [Version 2; Peer Review: 1 Approved, 1 Approved with Reservations]}, + author = {Goscinski, Alexander and Principe, Victor P. and Fraux, Guillaume and Kliavinek, Sergei and Helfrecht, Benjamin A. and Loche, Philip and Ceriotti, Michele and Cersonsky, Rose K.}, + date = {2023-09-18}, + eprinttype = {Open Research Europe}, + doi = {10.12688/openreseurope.15789.2}, + url = {https://open-research-europe.ec.europa.eu/articles/3-81/v2}, + urldate = {2023-10-01}, + abstract = {Easy-to-use libraries such as scikit-learn have accelerated the adoption and application of machine learning (ML) workflows and data-driven methods. While many of the algorithms implemented in these libraries originated in specific scientific fields, they have gained in popularity in part because of their generalisability across multiple domains. Over the past two decades, researchers in the chemical and materials science community have put forward general-purpose machine learning methods. The deployment of these methods into workflows of other domains, however, is often burdensome due to the entanglement with domain-specific functionalities. We present the python library scikit-matter that targets domain-agnostic implementations of methods developed in the computational chemical and materials science community, following the scikit-learn API and coding guidelines to promote usability and interoperability with existing workflows.}, + pubstate = {preprint}, + keywords = {\_tablet,AML,convex hull,CUR decomposition,descriptors,dimensionality reduction,error estimate,farthest point sampling,feature reconstuction measure,feature selection,kernel methods,library,ML,PCovR,regression,sample selection,scikit-learn,SOAP,unsupervised learning,with-code,with-data}, + file = {/Users/wasmer/Nextcloud/Zotero/Goscinski et al_2023_A Suite of Generalisable Machine Learning Methods Born out of Chemistry and.pdf;/Users/wasmer/Zotero/storage/IAJ8UA3M/v2.html} +} + +@online{grassanoHighthroughputScreeningWeyl2023, + title = {High-Throughput Screening of {{Weyl}} Semimetals}, + author = {Grassano, Davide and Marzari, Nicola and Campi, Davide}, + date = {2023-08-04}, + eprint = {2308.01663}, + eprinttype = {arxiv}, + eprintclass = {cond-mat, physics:physics}, + doi = {10.48550/arXiv.2308.01663}, + url = {http://arxiv.org/abs/2308.01663}, + urldate = {2023-10-08}, + abstract = {Topological Weyl semimetals represent a novel class of non-trivial materials, where band crossings with linear dispersions take place at generic momenta across reciprocal space. These crossings give rise to low-energy properties akin to those of Weyl fermions, and are responsible for several exotic phenomena. Up to this day, only a handful of Weyl semimetals have been discovered, and the search for new ones remains a very active area. The main challenge on the computational side arises from the fact that many of the tools used to identify the topological class of a material do not provide a complete picture in the case of Weyl semimetals. In this work, we propose an alternative and inexpensive, criterion to screen for possible Weyl fermions, based on the analysis of the band structure along high-symmetry directions in the absence of spin-orbit coupling. We test the method by running a high-throughput screening on a set of 5455 inorganic bulk materials and identify 49 possible candidates for topological properties. A further analysis, carried out by identifying and characterizing the crossings in the Brillouin zone, shows us that 3 of these candidates are Weyl semimetals. Interestingly, while these 3 materials underwent other high-throughput screenings, none had revealed their topological behavior before.}, + pubstate = {preprint}, + keywords = {AiiDA,DFT,Hall effect,Hall QAHE,High-throughput,HTC,Materials Cloud,materials screening,PBE,physics,Quantum ESPRESSO,quantum materials,SOC,topological,Topological matter,TRS,Weyl semimetal,with-code,with-data,workflows}, + file = {/Users/wasmer/Nextcloud/Zotero/Grassano et al_2023_High-throughput screening of Weyl semimetals.pdf;/Users/wasmer/Zotero/storage/QWHL9R54/2308.html} +} + @online{grisafiElectronicstructurePropertiesAtomcentered2022, title = {Electronic-Structure Properties from Atom-Centered Predictions of the Electron Density}, author = {Grisafi, Andrea and Lewis, Alan M. and Rossi, Mariana and Ceriotti, Michele}, @@ -4515,6 +5804,23 @@ file = {/home/johannes/Books/data_science/general_practice/Grus_DataScienceFromScratchPython_2e-2019.epub} } +@book{guidrySymmetryBrokenSymmetry2022, + title = {Symmetry, {{Broken Symmetry}}, and {{Topology}} in {{Modern Physics}}: {{A First Course}}}, + shorttitle = {Symmetry, {{Broken Symmetry}}, and {{Topology}} in {{Modern Physics}}}, + author = {Guidry, Mike and Sun, Yang}, + date = {2022-03-31}, + edition = {1}, + publisher = {{Cambridge University Press}}, + doi = {10.1017/9781009000949}, + url = {https://www.cambridge.org/highereducation/books/symmetry-broken-symmetry-and-topology-in-modern-physics/794C53F5AFDB06E0EE6F4310C8346DFD}, + urldate = {2023-09-30}, + abstract = {Written for use in teaching and for self-study, this book provides a comprehensive and pedagogical introduction to groups, algebras, geometry, and topology. It assimilates modern applications of these concepts, assuming only an advanced undergraduate preparation in physics. It provides a balanced view of group theory, Lie algebras, and topological concepts, while emphasizing a broad range of modern applications such as Lorentz and Poincaré invariance, coherent states, quantum phase transitions, the quantum Hall effect, topological matter, and Chern numbers, among many others. An example based approach is adopted from the outset, and the book includes worked examples and informational boxes to illustrate and expand on key concepts. 344 homework problems are included, with full solutions available to instructors, and a subset of 172 of these problems have full solutions available to students.}, + isbn = {978-1-00-900094-9}, + langid = {english}, + keywords = {/unread,educational,group theory,Hall effect,Hall QHE,learning material,physics,symmetry,symmetry breaking,textbook,topological,topological insulator,topological insulators,TRS}, + file = {/Users/wasmer/Zotero/storage/U9L2ZDUH/794C53F5AFDB06E0EE6F4310C8346DFD.html} +} + @inproceedings{gundersenStateArtReproducibility2018, title = {State of the {{Art}}: {{Reproducibility}} in {{Artificial Intelligence}}}, shorttitle = {State of the {{Art}}}, @@ -4545,6 +5851,40 @@ file = {/Users/wasmer/Nextcloud/Zotero/Gutmann_2022_Pen and Paper Exercises in Machine Learning.pdf;/Users/wasmer/Zotero/storage/KMSFX6RY/2206.html} } +@article{gyorffyCoherentPotentialApproximationNonoverlappingMuffinTinPotential1972, + title = {Coherent-{{Potential Approximation}} for a {{Nonoverlapping-Muffin-Tin-Potential Model}} of {{Random Substitutional Alloys}}}, + author = {Gyorffy, B. L.}, + date = {1972-03-15}, + journaltitle = {Physical Review B}, + shortjournal = {Phys. Rev. B}, + volume = {5}, + number = {6}, + pages = {2382--2384}, + publisher = {{American Physical Society}}, + doi = {10.1103/PhysRevB.5.2382}, + url = {https://link.aps.org/doi/10.1103/PhysRevB.5.2382}, + urldate = {2023-09-19}, + abstract = {Soven's method of doing the coherent-potential approximation for a nonoverlapping-muffin-tin-potential model of random substitutional alloys has been rederived without the use of an energy-dependent model potential. As a consequence of the present approach, a much simplified expression for the density of states is obtained.}, + file = {/Users/wasmer/Nextcloud/Zotero/Gyorffy_1972_Coherent-Potential Approximation for a Nonoverlapping-Muffin-Tin-Potential.pdf} +} + +@article{h.borgQuantifyingPerformanceMachine2023, + title = {Quantifying the Performance of Machine Learning Models in Materials Discovery}, + author = {H.~Borg, Christopher K. and S.~Muckley, Eric and Nyby, Clara and E.~Saal, James and Ward, Logan and Mehta, Apurva and Meredig, Bryce}, + date = {2023}, + journaltitle = {Digital Discovery}, + volume = {2}, + number = {2}, + pages = {327--338}, + publisher = {{Royal Society of Chemistry}}, + doi = {10.1039/D2DD00113F}, + url = {https://pubs.rsc.org/en/content/articlelanding/2023/dd/d2dd00113f}, + urldate = {2023-08-19}, + langid = {english}, + keywords = {/unread,Citrine Informatics,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/H. Borg et al_2023_Quantifying the performance of machine learning models in materials discovery.pdf} +} + @article{hafizHighthroughputDataAnalysis2018, title = {A High-Throughput Data Analysis and Materials Discovery Tool for Strongly Correlated Materials}, author = {Hafiz, Hasnain and Khair, Adnan Ibne and Choi, Hongchul and Mueen, Abdullah and Bansil, Arun and Eidenbenz, Stephan and Wills, John and Zhu, Jian-Xin and Balatsky, Alexander V. and Ahmed, Towfiq}, @@ -4584,6 +5924,23 @@ file = {/Users/wasmer/Nextcloud/Zotero/Hafner_2008_Ab-initio simulations of materials using VASP.pdf;/Users/wasmer/Zotero/storage/UCE26HNQ/jcc.html} } +@inproceedings{hammermeshGroupTheoryIts1963, + title = {\emph{Group }{{\emph{Theory}}}\emph{ and }{{\emph{Its Application}}}\emph{ to }{{\emph{Physical Problems}}}}, + booktitle = {Physics {{Today}}}, + author = {Hammermesh, Morton and Flammer, Carson}, + date = {1963-02-01}, + volume = {16}, + number = {2}, + pages = {62--64}, + issn = {0031-9228, 1945-0699}, + doi = {10.1063/1.3050758}, + url = {https://pubs.aip.org/physicstoday/article/16/2/62/423155/Group-Theory-and-Its-Application-to-Physical}, + urldate = {2023-09-20}, + abstract = {Semantic Scholar extracted view of "Group theory and its application to physical problems" by M. Hamermesh}, + langid = {english}, + keywords = {/unread,group theory,textbook} +} + @article{handleyNextGenerationInteratomic2014, title = {Next Generation Interatomic Potentials for Condensed Systems}, author = {Handley, Christopher Michael and Behler, Jörg}, @@ -4603,6 +5960,22 @@ file = {/Users/wasmer/Nextcloud/Zotero/Handley_Behler_2014_Next generation interatomic potentials for condensed systems.pdf} } +@online{hansonStrainScientificPublishing2023, + title = {The Strain on Scientific Publishing}, + author = {Hanson, Mark A. and Barreiro, Pablo Gómez and Crosetto, Paolo and Brockington, Dan}, + date = {2023-09-27}, + eprint = {2309.15884}, + eprinttype = {arxiv}, + eprintclass = {cs}, + doi = {10.48550/arXiv.2309.15884}, + url = {http://arxiv.org/abs/2309.15884}, + urldate = {2023-10-02}, + abstract = {Scientists are increasingly overwhelmed by the volume of articles being published. Total articles indexed in Scopus and Web of Science have grown exponentially in recent years; in 2022 the article total was 47\% higher than in 2016, which has outpaced the limited growth, if any, in the number of practising scientists. Thus, publication workload per scientist (writing, reviewing, editing) has increased dramatically. We define this problem as the strain on scientific publishing. To analyse this strain, we present five data-driven metrics showing publisher growth, processing times, and citation behaviours. We draw these data from web scrapes, requests for data from publishers, and material that is freely available through publisher websites. Our findings are based on millions of papers produced by leading academic publishers. We find specific groups have disproportionately grown in their articles published per year, contributing to this strain. Some publishers enabled this growth by adopting a strategy of hosting special issues, which publish articles with reduced turnaround times. Given pressures on researchers to publish or perish to be competitive for funding applications, this strain was likely amplified by these offers to publish more articles. We also observed widespread year-over-year inflation of journal impact factors coinciding with this strain, which risks confusing quality signals. Such exponential growth cannot be sustained. The metrics we define here should enable this evolving conversation to reach actionable solutions to address the strain on scientific publishing.}, + pubstate = {preprint}, + keywords = {/unread,academia,criticism,literature analysis,publishing,scientific journals,working in science}, + file = {/Users/wasmer/Nextcloud/Zotero/Hanson et al_2023_The strain on scientific publishing.pdf;/Users/wasmer/Zotero/storage/YH795P59/2309.html} +} + @article{hartlNationaleForschungsdateninfrastrukturNFDI2021, title = {Nationale Forschungsdateninfrastruktur (NFDI)}, author = {Hartl, Nathalie and Wössner, Elena and Sure-Vetter, York}, @@ -4656,7 +6029,7 @@ url = {https://link.aps.org/doi/10.1103/RevModPhys.82.3045}, urldate = {2023-06-15}, abstract = {Topological insulators are electronic materials that have a bulk band gap like an ordinary insulator but have protected conducting states on their edge or surface. These states are possible due to the combination of spin-orbit interactions and time-reversal symmetry. The two-dimensional (2D) topological insulator is a quantum spin Hall insulator, which is a close cousin of the integer quantum Hall state. A three-dimensional (3D) topological insulator supports novel spin-polarized 2D Dirac fermions on its surface. In this Colloquium the theoretical foundation for topological insulators and superconductors is reviewed and recent experiments are described in which the signatures of topological insulators have been observed. Transport experiments on HgTe∕CdTe quantum wells are described that demonstrate the existence of the edge states predicted for the quantum spin Hall insulator. Experiments on Bi1−xSbx, Bi2Se3, Bi2Te3, and Sb2Te3 are then discussed that establish these materials as 3D topological insulators and directly probe the topology of their surface states. Exotic states are described that can occur at the surface of a 3D topological insulator due to an induced energy gap. A magnetic gap leads to a novel quantum Hall state that gives rise to a topological magnetoelectric effect. A superconducting energy gap leads to a state that supports Majorana fermions and may provide a new venue for realizing proposals for topological quantum computation. Prospects for observing these exotic states are also discussed, as well as other potential device applications of topological insulators.}, - keywords = {\_tablet}, + keywords = {\_tablet,2D,3D,BdG,Bi2Te3,bismuth selenide,bismuth telluride,Chern number,colloquium,graphene,groundbreaking,Hall effect,Hall QAHE,Hall QSHE,Majorana,MZM,superconductor,TKNN,topological,topological insulator,TRS}, file = {/Users/wasmer/Nextcloud/Zotero/Hasan_Kane_2010_Colloquium.pdf;/Users/wasmer/Zotero/storage/RXPD79NW/RevModPhys.82.html} } @@ -4698,6 +6071,42 @@ file = {/Users/wasmer/Nextcloud/Zotero/Hegde_Bowen_2017_Machine-learned approximations to Density Functional Theory Hamiltonians.pdf} } +@online{hegdeQuantifyingUncertaintyHighthroughput2022, + title = {Quantifying Uncertainty in High-Throughput Density Functional Theory: A Comparison of {{AFLOW}}, {{Materials Project}}, and {{OQMD}}}, + shorttitle = {Quantifying Uncertainty in High-Throughput Density Functional Theory}, + author = {Hegde, Vinay I. and Borg, Christopher K. H. and family=Rosario, given=Zachary, prefix=del, useprefix=true and Kim, Yoolhee and Hutchinson, Maxwell and Antono, Erin and Ling, Julia and Saxe, Paul and Saal, James E. and Meredig, Bryce}, + date = {2022-11-05}, + eprint = {2007.01988}, + eprinttype = {arxiv}, + eprintclass = {cond-mat}, + doi = {10.48550/arXiv.2007.01988}, + url = {http://arxiv.org/abs/2007.01988}, + urldate = {2023-08-19}, + abstract = {A central challenge in high throughput density functional theory (HT-DFT) calculations is selecting a combination of input parameters and post-processing techniques that can be used across all materials classes, while also managing accuracy-cost tradeoffs. To investigate the effects of these parameter choices, we consolidate three large HT-DFT databases: Automatic-FLOW (AFLOW), the Materials Project (MP), and the Open Quantum Materials Database (OQMD), and compare reported properties across each pair of databases for materials calculated using the same initial crystal structure. We find that HT-DFT formation energies and volumes are generally more reproducible than band gaps and total magnetizations; for instance, a notable fraction of records disagree on whether a material is metallic (up to 7\%) or magnetic (up to 15\%). The variance between calculated properties is as high as 0.105 eV/atom (median relative absolute difference, or MRAD, of 6\%) for formation energy, 0.65 \{\textbackslash AA\}\$\^3\$/atom (MRAD of 4\%) for volume, 0.21 eV (MRAD of 9\%) for band gap, and 0.15 \$\textbackslash mu\_\{\textbackslash rm B\}\$/formula unit (MRAD of 8\%) for total magnetization, comparable to the differences between DFT and experiment. We trace some of the larger discrepancies to choices involving pseudopotentials, the DFT+U formalism, and elemental reference states, and argue that further standardization of HT-DFT would be beneficial to reproducibility.}, + pubstate = {preprint}, + keywords = {AFLOWLIB,Citrine Informatics,DFT,magnetization,materials database,materials project,OQMD,reproducibility,todo-tagging,uncertainty quantification}, + file = {/Users/wasmer/Nextcloud/Zotero/Hegde et al_2022_Quantifying uncertainty in high-throughput density functional theory.pdf;/Users/wasmer/Zotero/storage/CCU4ZPKZ/2007.html} +} + +@article{hegdeQuantifyingUncertaintyHighthroughput2023, + title = {Quantifying Uncertainty in High-Throughput Density Functional Theory: {{A}} Comparison of {{AFLOW}}, {{Materials Project}}, and {{OQMD}}}, + shorttitle = {Quantifying Uncertainty in High-Throughput Density Functional Theory}, + author = {Hegde, Vinay I. and Borg, Christopher K. H. and family=Rosario, given=Zachary, prefix=del, useprefix=true and Kim, Yoolhee and Hutchinson, Maxwell and Antono, Erin and Ling, Julia and Saxe, Paul and Saal, James E. and Meredig, Bryce}, + date = {2023-05-30}, + journaltitle = {Physical Review Materials}, + shortjournal = {Phys. Rev. Mater.}, + volume = {7}, + number = {5}, + pages = {053805}, + publisher = {{American Physical Society}}, + doi = {10.1103/PhysRevMaterials.7.053805}, + url = {https://link.aps.org/doi/10.1103/PhysRevMaterials.7.053805}, + urldate = {2023-10-08}, + abstract = {A central challenge in high-throughput density functional theory (HT-DFT) calculations is selecting a combination of input parameters and postprocessing techniques that can be used across all materials classes, while also managing accuracy-cost tradeoffs. To investigate the effects of these parameter choices, we consolidate three large HT-DFT databases: Automatic-FLOW (AFLOW), the Materials Project (MP), and the Open Quantum Materials Database (OQMD), and compare reported properties across each pair of databases for materials calculated using the same initial crystal structure. We find that HT-DFT formation energies and volumes are generally more reproducible than band gaps and total magnetizations; for instance, a notable fraction of records disagree on whether a material is metallic (up to 7\%) or magnetic (up to 15\%). The variance between calculated properties is as high as 0.105 eV/atom (median relative absolute difference, or MRAD, of 6\%) for formation energy, 0.65 Ã…3/atom (MRAD of 4\%) for volume, 0.21 eV (MRAD of 9\%) for band gap, and 0.15μB/formula unit (MRAD of 8\%) for total magnetization, comparable to the differences between DFT and experiment. We trace some of the larger discrepancies to choices involving pseudopotentials, the DFT+U formalism, and elemental reference states, and argue that further standardization of HT-DFT would be beneficial to reproducibility.}, + keywords = {AFLOW,Database,DFT,HT-DFT,HTC,materials database,materials informatics,materials project,OQMD,uncertainty quantification}, + file = {/Users/wasmer/Nextcloud/Zotero/Hegde et al_2023_Quantifying uncertainty in high-throughput density functional theory.pdf;/Users/wasmer/Zotero/storage/Q2RHZQ8Q/PhysRevMaterials.7.html} +} + @article{helfrechtStructurepropertyMapsKernel2020, title = {Structure-Property Maps with {{Kernel}} Principal Covariates Regression}, author = {Helfrecht, Benjamin A. and Cersonsky, Rose K. and Fraux, Guillaume and Ceriotti, Michele}, @@ -4794,9 +6203,10 @@ file = {/Users/wasmer/Nextcloud/Zotero/Herbst_Levitt_2021_Black-box inhomogeneous preconditioning for self-consistent field iterations in.pdf;/Users/wasmer/Zotero/storage/66YFJEHI/2009.html} } -@online{herbstCECAMErrorControl, +@online{herbstCECAMErrorControl2022, title = {{{CECAM}} - {{Error}} Control in First-Principles {{modellingError}} Control in First-Principles Modelling}, author = {Herbst, Michael F. and Csányi, Gábor and Dusson, Genevieve and Marzouk, Youssef}, + date = {2022-06-20}, url = {https://www.cecam.org/workshop-details/1115}, urldate = {2023-05-31}, keywords = {/unread,active learning,AML,Bayesian methods,DFT,error estimate,event,ML,MLP,uncertainty quantification,workshop}, @@ -4929,6 +6339,25 @@ file = {/Users/wasmer/Nextcloud/Zotero/Herr et al_2019_Compressing physics with an autoencoder.pdf} } +@article{herrCompressingPhysicsAutoencoder2019a, + title = {Compressing Physics with an Autoencoder: {{Creating}} an Atomic Species Representation to Improve Machine Learning Models in the Chemical Sciences}, + shorttitle = {Compressing Physics with an Autoencoder}, + author = {Herr, John E. and Koh, Kevin and Yao, Kun and Parkhill, John}, + date = {2019-08-22}, + journaltitle = {The Journal of Chemical Physics}, + shortjournal = {The Journal of Chemical Physics}, + volume = {151}, + number = {8}, + pages = {084103}, + issn = {0021-9606}, + doi = {10.1063/1.5108803}, + url = {https://doi.org/10.1063/1.5108803}, + urldate = {2023-08-24}, + abstract = {We define a vector quantity which corresponds to atomic species identity by compressing a set of physical properties with an autoencoder. This vector, referred to here as the elemental modes, provides many advantages in downstream machine learning tasks. Using the elemental modes directly as the feature vector, we trained a neural network to predict formation energies of elpasolites with improved accuracy over previous works on the same task. Combining the elemental modes with geometric features used in high-dimensional neural network potentials (HD-NNPs) solves many problems of scaling and efficiency in the development of such neural network potentials. Whereas similar models in the past have been limited to typically four atomic species (H, C, N, and O), our implementation does not scale in cost by adding more atomic species and allows us to train an HD-NNP model which treats molecules containing H, C, N, O, F, P, S, Cl, Se, Br, and I. Finally, we establish that our implementation allows us to define feature vectors for alchemical intermediate states in the HD-NNP model, which opens up new possibilities for performing alchemical free energy calculations on systems where bond breaking/forming is important.}, + keywords = {/unread,AML,autoencoder,descriptors,dimensionality reduction,library,ML,prediction of formation energy,representation learning,todo-tagging,VAE}, + file = {/Users/wasmer/Nextcloud/Zotero/Herr et al_2019_Compressing physics with an autoencoder2.pdf;/Users/wasmer/Zotero/storage/9G25EF2D/315342.html} +} + @article{heTopologicalInsulatorSpintronics2019, title = {Topological Insulator: {{Spintronics}} and Quantum Computations}, shorttitle = {Topological Insulator}, @@ -5023,6 +6452,24 @@ file = {/Users/wasmer/Nextcloud/Zotero/Hirohata et al_2020_Review on spintronics.pdf} } +@inproceedings{hoangMachineLearningPotential2023, + title = {Machine {{Learning Potential Function Generation}} for Ab Initio {{Electronic Structure Calculations}}}, + booktitle = {Practice and {{Experience}} in {{Advanced Research Computing}}}, + author = {Hoang, Duong Thuy and Rulis, Paul}, + date = {2023-09-10}, + series = {{{PEARC}} '23}, + pages = {448--449}, + publisher = {{Association for Computing Machinery}}, + location = {{New York, NY, USA}}, + doi = {10.1145/3569951.3597581}, + url = {https://dl.acm.org/doi/10.1145/3569951.3597581}, + urldate = {2023-09-21}, + abstract = {First-principles electronic structure calculations based on density functional theory (DFT) are well-known to have a high computational cost that scales algorithmically as O(N3), where N is the number of electrons. Reducing that cost is a key goal of the computational materials physics community and machine learning (ML) is viewed as an essential tool for that task. However, ML model training requires an appropriate match between the input descriptors and the target property as well as copious quantities of training data. Therefore, we present a computer program that is designed to automate the generation of local atomic environment descriptors for single element systems that may be used for training neural networks to predict electronic potential function coefficients, \{Ai\}, which are used within the DFT based orthogonalized linear combination of atomic orbitals (OLCAO) method [2]. In our approach, the total electronic potential function of a periodic crystal, , is expressed as a sum of localized atom-centered Gaussian functions. Each Gaussian function, i, in the set of all Gaussian functions has a fixed αi coefficient. The set of \{Ai\} coefficients are updated in each cycle of the self-consistent field (SCF) iterations in accordance with the charge density that was computed in the previous SCF step. However, if the choice of coefficients \{Ai\} can be accurately predicted for a given system, then the SCF process can be skipped entirely, satisfying an important requirement of our goal to reduce the computational cost. The prediction method uses suitable neural networks (NNs) where the input values are a set of local atomic environment descriptors and the output values are the \{Ai\} coefficients for a targeted system. The descriptors we opted to use are the bispectrum components but other additional descriptors may be incorporated. Bispectrum components are geometric calculations that smoothly capture subtle variations in the local atomic environment and that are invariant under translation, rotation, and permutation of neighborhood atoms. The bispectrum components can also easily incorporate different types and numbers of elements, and they have been used by others for a similar purpose [3, 4]. Those requirements are difficult to achieve using other methods such as a list of bond angles and bond lengths toward nearest neighbor atoms while maintaining a fixed number of NN input features. where are expansion coefficients, is the coupling coefficient for four-dimensional spherical harmonics, analogous to the Clebsch-Gordan coefficients for rotations in three dimensional space. One challenge in this research is defining a suitable cut-off radius for evaluation of the bispectrum component to avoid neglecting the interaction between a targeted atom and its neighbors. The cut-off radius is weighted as a function of the elements involved to accommodate different types of bonding (e.g., ionic, covalent, metallic). Additionally, for properly defining and training a neural network (see below), it is vital that we provide a clear correlation between the physical (geometric) features of the bispectrum components and the electronic features that may simultaneously be present to avoid too much redundancy in the input data. This lack of understanding can limit the development of methods to predict the electronic structure properties based on the bispectrum components, underscoring the need for further research in this area. A supervised training framework for a proposed neural network is demonstrated using a data set of pure Si models that includes amorphous Si, crystalline Si, Si with a passive defect, and Si with self-interstitials. Other models will be implemented to compare efficiency. For each model, the input/target output training pairs consist of local environment descriptors - bispectrum components (input) that encode the structure of neighboring atoms relative to the central atom i at a specific point in real space, along with the converged potential functions obtained by the SCF process (target output). The data set must be partitioned into training, test, and validation sets for use in subsequent iterations of training and validation to evaluate and optimize the model’s performance during the training process. In OLCAO, the total electronic potential function of a crystal is expressed as a sum of atom-centered potential functions. Each atom-centered potential function is represented as a sum of Gaussian functions. However, it is vital to recognize that although the potential function is an assembly of site-centered functions it cannot be said that the potential function from a given site is the potential function "of" the atom at that site. Rather, the potential function at a given site is determined by the influence of all nearby atoms. Therefore, it is intuitive to seek a ML model that follows a similar structure. In this case, it is important to find a way for the input data structure to incorporate that feature of the potential function, which consists of a mixture of influences derived from the neighboring atoms. Each component of this mixture represents a cluster or subpopulation within the local region. To capture this structure, we propose a neural network framework based on Mixture Density Network (MDN) [1] for the training process. This approach involves encoding the local, medium-range, and long-range (global) influences for each atom. In many cases, electron interactions are considered ’short-sighted,’ meaning that they are mainly affected by nearby atoms only. However, our proposed method overcomes this limitation and effectively addresses novel long-range electronic structure properties such as those found in metallic or certain magnetic materials. Results regarding the optimization of the run time for calculating the bispectrum component is discussed, including a comparison with key function program code that uses third-party libraries such as SymPy. A computer program is developed to automatically generate bispectrum components for a single-element system in a periodic unit cell. We investigated the symmetric properties of the bispectrum components, which align with the proof established in [4]. However, further development and testing of the program are necessary before it can be applied to multiple-element systems. Overall, this research contributes to the ongoing effort to develop new and improved neural network frameworks for predicting the electronic structure properties of materials with desirable features. When combined with other unique aspects of the OLCAO method it is expected that this approach will enable us to overcome the O(N3) algorithmic complexity scaling problem and thereby address multi-scale physics problems that require both direct access to the electronic wave function and a large number of atoms to realistically model.}, + isbn = {978-1-4503-9985-2}, + keywords = {ACDC,AML,bispectrum,defects,descriptors,disordered,LCAO,long-range interaction,materials,mixture density network,ML,ML-DFT,ML-ESM,prediction of electron potential,silicon,single-element,uncertainty quantification}, + file = {/Users/wasmer/Zotero/storage/7MSTHV87/Hoang and Rulis - 2023 - Machine Learning Potential Function Generation for.pdf} +} + @online{hodappEquivariantTensorNetworks2023, title = {Equivariant {{Tensor Networks}}}, author = {Hodapp, Max and Shapeev, Alexander}, @@ -5153,6 +6600,23 @@ file = {/Users/wasmer/Nextcloud/Zotero/Honrao et al_2020_Augmenting machine learning of energy landscapes with local structural.pdf;/Users/wasmer/Zotero/storage/VFKSDW8H/Augmenting-machine-learning-of-energy-landscapes.html} } +@online{hoogeboomEquivariantDiffusionMolecule2022, + title = {Equivariant {{Diffusion}} for {{Molecule Generation}} in {{3D}}}, + author = {Hoogeboom, Emiel and Satorras, Victor Garcia and Vignac, Clément and Welling, Max}, + date = {2022-06-16}, + eprint = {2203.17003}, + eprinttype = {arxiv}, + eprintclass = {cs, q-bio, stat}, + doi = {10.48550/arXiv.2203.17003}, + url = {http://arxiv.org/abs/2203.17003}, + urldate = {2023-08-22}, + abstract = {This work introduces a diffusion model for molecule generation in 3D that is equivariant to Euclidean transformations. Our E(3) Equivariant Diffusion Model (EDM) learns to denoise a diffusion process with an equivariant network that jointly operates on both continuous (atom coordinates) and categorical features (atom types). In addition, we provide a probabilistic analysis which admits likelihood computation of molecules using our model. Experimentally, the proposed method significantly outperforms previous 3D molecular generative methods regarding the quality of generated samples and efficiency at training time.}, + pubstate = {preprint}, + version = {2}, + keywords = {diffusion model,generative models,todo-tagging,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Hoogeboom et al_2022_Equivariant Diffusion for Molecule Generation in 3D.pdf;/Users/wasmer/Zotero/storage/K6KWYTSV/2203.html} +} + @article{huAisNetUniversalInteratomic2023, title = {{{AisNet}}: {{A Universal Interatomic Potential Neural Network}} with {{Encoded Local Environment Features}}}, shorttitle = {{{AisNet}}}, @@ -5172,6 +6636,23 @@ keywords = {/unread,AML,ML,MLP,universal potential} } +@article{huangCentralRoleDensity2023, + title = {The Central Role of Density Functional Theory in the {{AI}} Age}, + author = {Huang, Bing and family=Rudorff, given=Guido Falk, prefix=von, useprefix=true and family=Lilienfeld, given=O. Anatole, prefix=von, useprefix=true}, + date = {2023-07-14}, + journaltitle = {Science}, + volume = {381}, + number = {6654}, + pages = {170--175}, + publisher = {{American Association for the Advancement of Science}}, + doi = {10.1126/science.abn3445}, + url = {https://www.science.org/doi/10.1126/science.abn3445}, + urldate = {2023-07-14}, + abstract = {Density functional theory (DFT) plays a pivotal role in chemical and materials science because of its relatively high predictive power, applicability, versatility, and computational efficiency. We review recent progress in machine learning (ML) model developments, which have relied heavily on DFT for synthetic data generation and for the design of model architectures. The general relevance of these developments is placed in a broader context for chemical and materials sciences. DFT-based ML models have reached high efficiency, accuracy, scalability, and transferability and pave the way to the routine use of successful experimental planning software within self-driving laboratories.}, + keywords = {AML,digital twin,ML,ML-DFT,ML-ESM,review,review-of-AML,review-of-ML-DFT}, + file = {/Users/wasmer/Nextcloud/Zotero/Huang et al_2023_The central role of density functional theory in the AI age.pdf} +} + @article{huangEmergingTopologicalStates2017, title = {Emerging Topological States in Quasi-Two-Dimensional Materials}, author = {Huang, Huaqing and Xu, Yong and Wang, Jianfeng and Duan, Wenhui}, @@ -5186,6 +6667,7 @@ urldate = {2023-06-15}, abstract = {Inspired by the discovery of graphene, various two-dimensional (2D) materials have been experimentally realized, which exhibit novel physical properties and support promising applications. Exotic topological states in 2D materials (including quantum spin Hall and quantum anomalous Hall insulators), which are characterized by nontrivial metallic edge states within the insulating bulk gap, have attracted considerable attentions in the past decade due to their great importance for fundamental research and practical applications. They also create a surge of research activities and attract extensive efforts to search for new topological materials in realistic 2D/quasi-2D systems. This review presents a comprehensive survey of recent progress in designing of topological states in quasi-2D materials, including various quantum well heterostructures and 2D atomic lattice structures. In particular, the possibilities of constructing topological nontrivial states from commonly used materials are discussed and the ways of enlarging energy gaps of topological states and realizing different topological states in a single material are presented. WIREs Comput Mol Sci 2017, 7:e1296. doi: 10.1002/wcms.1296 This article is categorized under: Structure and Mechanism {$>$} Computational Materials Science}, langid = {english}, + keywords = {2D material,band inversion,Bi2Te3,Chern number,graphene,Hall effect,Hall QAHE,Hall QSHE,heterostructures,materials,perovskites,physics,review,topolog,topological,topological insulator,topological invariant,TRS}, file = {/Users/wasmer/Nextcloud/Zotero/Huang et al_2017_Emerging topological states in quasi-two-dimensional materials.pdf;/Users/wasmer/Zotero/storage/R26FLUTA/wcms.html} } @@ -5206,7 +6688,7 @@ abstract = {Modification of physical properties of materials and design of materials with on-demand characteristics is at the heart of modern technology. Rare application relies on pure materials—most devices and technologies require careful design of materials properties through alloying, creating heterostructures of composites, or controllable introduction of defects. At the same time, such designer materials are notoriously difficult to model. Thus, it is very tempting to apply machine learning methods to such systems. Unfortunately, there is only a handful of machine learning-friendly material databases available these days. We develop a platform for easy implementation of machine learning techniques to materials design and populate it with datasets on pristine and defected materials. Here we introduce the 2D Material Defect (2DMD) datasets that include defect properties of represented 2D materials such as MoS2, WSe2, hBN, GaSe, InSe, and black phosphorous, calculated using DFT. Our study provides a data-driven physical understanding of complex behaviors of defect properties in 2D materials, holding promise for a guide to the development of efficient machine learning models. In addition, with the increasing enrollment of datasets, our database could provide a platform for designing materials with predetermined properties.}, issue = {1}, langid = {english}, - keywords = {2D material,2DMD dataset,AML,Database,database generation,defect descriptor,defect screening,defects,disordered,high-density defects,High-throughput,HTC,materials screening,ML,MoS2,PBE,physics,point defects,sampling,spin-polarized,TMDC,vacancies,VASP,with-data}, + keywords = {\_tablet,2D material,2DMD dataset,AML,Database,database generation,defect descriptor,defect screening,defects,disordered,high-density defects,High-throughput,HTC,materials screening,ML,MoS2,PBE,physics,point defects,sampling,spin-polarized,TMDC,vacancies,VASP,with-data}, file = {/Users/wasmer/Nextcloud/Zotero/Huang et al_2023_Unveiling the complex structure-property correlation of defects in 2D materials.pdf} } @@ -5263,11 +6745,61 @@ abstract = {The prediction of material properties based on density-functional theory has become routinely common, thanks, in part, to the steady increase in the number and robustness of available simulation packages. This plurality of codes and methods is both a boon and a burden. While providing great opportunities for cross-verification, these packages adopt different methods, algorithms, and paradigms, making it challenging to choose, master, and efficiently use them. We demonstrate how developing common interfaces for workflows that automatically compute material properties greatly simplifies interoperability and cross-verification. We introduce design rules for reusable, code-agnostic, workflow interfaces to compute well-defined material properties, which we implement for eleven quantum engines and use to compute various material properties. Each implementation encodes carefully selected simulation parameters and workflow logic, making the implementer’s expertise of the quantum engine directly available to non-experts. All workflows are made available as open-source and full reproducibility of the workflows is guaranteed through the use of the AiiDA infrastructure.}, issue = {1}, langid = {english}, - keywords = {AiiDA,AiiDA-FLEUR,DFT,FAIR,FLEUR,provenance,workflows}, + keywords = {AiiDA,AiiDA-FLEUR,DFT,FAIR,FLEUR,FZJ,PGI,PGI-1/IAS-1,provenance,workflows}, file = {/Users/wasmer/Nextcloud/Zotero/Huber et al_2021_Common workflows for computing material properties using different quantum3.pdf} } -@unpublished{huoUnifiedRepresentationMolecules2018, +@online{huguenin-dumittanPhysicsinspiredEquivariantDescriptors2023, + title = {Physics-Inspired {{Equivariant Descriptors}} of {{Non-bonded Interactions}}}, + author = {Huguenin-Dumittan, Kevin K. and Loche, Philip and Haoran, Ni and Ceriotti, Michele}, + date = {2023-08-25}, + eprint = {2308.13208}, + eprinttype = {arxiv}, + eprintclass = {physics}, + doi = {10.48550/arXiv.2308.13208}, + url = {http://arxiv.org/abs/2308.13208}, + urldate = {2023-08-29}, + abstract = {Most of the existing machine-learning schemes applied to atomic-scale simulations rely on a local description of the geometry of a structure, and struggle to model effects that are driven by long-range physical interactions. Efforts to overcome these limitations have focused on the direct incorporation of electrostatics, which is the most prominent effect, often relying on architectures that mirror the functional form of explicit physical models. Including other forms of non-bonded interactions, or predicting properties other than the interatomic potential, requires ad hoc modifications. We propose an alternative approach that extends the long-distance equivariant (LODE) framework to generate local descriptors of an atomic environment that resemble non-bonded potentials with arbitrary asymptotic behaviors, ranging from point-charge electrostatics to dispersion forces. We show that the LODE formalism is amenable to a direct physical interpretation in terms of a generalized multipole expansion, that simplifies its implementation and reduces the number of descriptors needed to capture a given asymptotic behavior. These generalized LODE features provide improved extrapolation capabilities when trained on structures dominated by a given asymptotic behavior, but do not help in capturing the wildly different energy scales that are relevant for a more heterogeneous data set. This approach provides a practical scheme to incorporate different types of non-bonded interactions, and a framework to investigate the interplay of physical and data-related considerations that underlie this challenging modeling problem.}, + pubstate = {preprint}, + keywords = {/unread,AML,descriptors,electrostatic interaction,library,LODE,long-range interaction,ML,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Huguenin-Dumittan et al_2023_Physics-inspired Equivariant Descriptors of Non-bonded Interactions.pdf;/Users/wasmer/Zotero/storage/H7WQMUXS/2308.html} +} + +@online{huOGBLSCLargeScaleChallenge2021, + title = {{{OGB-LSC}}: {{A Large-Scale Challenge}} for {{Machine Learning}} on {{Graphs}}}, + shorttitle = {{{OGB-LSC}}}, + author = {Hu, Weihua and Fey, Matthias and Ren, Hongyu and Nakata, Maho and Dong, Yuxiao and Leskovec, Jure}, + date = {2021-10-20}, + eprint = {2103.09430}, + eprinttype = {arxiv}, + eprintclass = {cs}, + doi = {10.48550/arXiv.2103.09430}, + url = {http://arxiv.org/abs/2103.09430}, + urldate = {2023-07-24}, + abstract = {Enabling effective and efficient machine learning (ML) over large-scale graph data (e.g., graphs with billions of edges) can have a great impact on both industrial and scientific applications. However, existing efforts to advance large-scale graph ML have been largely limited by the lack of a suitable public benchmark. Here we present OGB Large-Scale Challenge (OGB-LSC), a collection of three real-world datasets for facilitating the advancements in large-scale graph ML. The OGB-LSC datasets are orders of magnitude larger than existing ones, covering three core graph learning tasks -- link prediction, graph regression, and node classification. Furthermore, we provide dedicated baseline experiments, scaling up expressive graph ML models to the massive datasets. We show that expressive models significantly outperform simple scalable baselines, indicating an opportunity for dedicated efforts to further improve graph ML at scale. Moreover, OGB-LSC datasets were deployed at ACM KDD Cup 2021 and attracted more than 500 team registrations globally, during which significant performance improvements were made by a variety of innovative techniques. We summarize the common techniques used by the winning solutions and highlight the current best practices in large-scale graph ML. Finally, we describe how we have updated the datasets after the KDD Cup to further facilitate research advances. The OGB-LSC datasets, baseline code, and all the information about the KDD Cup are available at https://ogb.stanford.edu/docs/lsc/ .}, + pubstate = {preprint}, + keywords = {/unread,benchmark dataset,benchmarking,Database,graph database,graph ML,OGB,with-data}, + file = {/Users/wasmer/Nextcloud/Zotero/Hu et al_2021_OGB-LSC.pdf;/Users/wasmer/Zotero/storage/CCNGLUCV/2103.html} +} + +@online{huOpenGraphBenchmark2021, + title = {Open {{Graph Benchmark}}: {{Datasets}} for {{Machine Learning}} on {{Graphs}}}, + shorttitle = {Open {{Graph Benchmark}}}, + author = {Hu, Weihua and Fey, Matthias and Zitnik, Marinka and Dong, Yuxiao and Ren, Hongyu and Liu, Bowen and Catasta, Michele and Leskovec, Jure}, + date = {2021-02-24}, + eprint = {2005.00687}, + eprinttype = {arxiv}, + eprintclass = {cs, stat}, + doi = {10.48550/arXiv.2005.00687}, + url = {http://arxiv.org/abs/2005.00687}, + urldate = {2023-07-24}, + abstract = {We present the Open Graph Benchmark (OGB), a diverse set of challenging and realistic benchmark datasets to facilitate scalable, robust, and reproducible graph machine learning (ML) research. OGB datasets are large-scale, encompass multiple important graph ML tasks, and cover a diverse range of domains, ranging from social and information networks to biological networks, molecular graphs, source code ASTs, and knowledge graphs. For each dataset, we provide a unified evaluation protocol using meaningful application-specific data splits and evaluation metrics. In addition to building the datasets, we also perform extensive benchmark experiments for each dataset. Our experiments suggest that OGB datasets present significant challenges of scalability to large-scale graphs and out-of-distribution generalization under realistic data splits, indicating fruitful opportunities for future research. Finally, OGB provides an automated end-to-end graph ML pipeline that simplifies and standardizes the process of graph data loading, experimental setup, and model evaluation. OGB will be regularly updated and welcomes inputs from the community. OGB datasets as well as data loaders, evaluation scripts, baseline code, and leaderboards are publicly available at https://ogb.stanford.edu .}, + pubstate = {preprint}, + keywords = {/unread,benchmark dataset,benchmarking,Database,graph database,graph ML,library,original publication,with-code,with-data}, + file = {/Users/wasmer/Nextcloud/Zotero/Hu et al_2021_Open Graph Benchmark.pdf;/Users/wasmer/Zotero/storage/L795VLZX/2005.html} +} + +@unpublished{huoUnifiedRepresentationMolecules2018, title = {Unified {{Representation}} of {{Molecules}} and {{Crystals}} for {{Machine Learning}}}, author = {Huo, Haoyan and Rupp, Matthias}, date = {2018-01-02}, @@ -5281,6 +6813,38 @@ file = {/Users/wasmer/Nextcloud/Zotero/Huo_Rupp_2018_Unified Representation of Molecules and Crystals for Machine Learning.pdf;/Users/wasmer/Zotero/storage/EZJ986KS/1704.html} } +@online{huStrategiesPretrainingGraph2020, + title = {Strategies for {{Pre-training Graph Neural Networks}}}, + author = {Hu, Weihua and Liu, Bowen and Gomes, Joseph and Zitnik, Marinka and Liang, Percy and Pande, Vijay and Leskovec, Jure}, + date = {2020-02-18}, + eprint = {1905.12265}, + eprinttype = {arxiv}, + eprintclass = {cs, stat}, + doi = {10.48550/arXiv.1905.12265}, + url = {http://arxiv.org/abs/1905.12265}, + urldate = {2023-09-25}, + abstract = {Many applications of machine learning require a model to make accurate pre-dictions on test examples that are distributionally different from training ones, while task-specific labels are scarce during training. An effective approach to this challenge is to pre-train a model on related tasks where data is abundant, and then fine-tune it on a downstream task of interest. While pre-training has been effective in many language and vision domains, it remains an open question how to effectively use pre-training on graph datasets. In this paper, we develop a new strategy and self-supervised methods for pre-training Graph Neural Networks (GNNs). The key to the success of our strategy is to pre-train an expressive GNN at the level of individual nodes as well as entire graphs so that the GNN can learn useful local and global representations simultaneously. We systematically study pre-training on multiple graph classification datasets. We find that naive strategies, which pre-train GNNs at the level of either entire graphs or individual nodes, give limited improvement and can even lead to negative transfer on many downstream tasks. In contrast, our strategy avoids negative transfer and improves generalization significantly across downstream tasks, leading up to 9.4\% absolute improvements in ROC-AUC over non-pre-trained models and achieving state-of-the-art performance for molecular property prediction and protein function prediction.}, + pubstate = {preprint}, + keywords = {/unread,AML,GNN,graph ML,ML,pretrained models,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Hu et al_2020_Strategies for Pre-training Graph Neural Networks.pdf;/Users/wasmer/Zotero/storage/C2HIQ9TR/1905.html} +} + +@online{hutchinsonOvercomingDataScarcity2017, + title = {Overcoming Data Scarcity with Transfer Learning}, + author = {Hutchinson, Maxwell L. and Antono, Erin and Gibbons, Brenna M. and Paradiso, Sean and Ling, Julia and Meredig, Bryce}, + date = {2017-11-02}, + eprint = {1711.05099}, + eprinttype = {arxiv}, + eprintclass = {cond-mat, stat}, + doi = {10.48550/arXiv.1711.05099}, + url = {http://arxiv.org/abs/1711.05099}, + urldate = {2023-08-19}, + abstract = {Despite increasing focus on data publication and discovery in materials science and related fields, the global view of materials data is highly sparse. This sparsity encourages training models on the union of multiple datasets, but simple unions can prove problematic as (ostensibly) equivalent properties may be measured or computed differently depending on the data source. These hidden contextual differences introduce irreducible errors into analyses, fundamentally limiting their accuracy. Transfer learning, where information from one dataset is used to inform a model on another, can be an effective tool for bridging sparse data while preserving the contextual differences in the underlying measurements. Here, we describe and compare three techniques for transfer learning: multi-task, difference, and explicit latent variable architectures. We show that difference architectures are most accurate in the multi-fidelity case of mixed DFT and experimental band gaps, while multi-task most improves classification performance of color with band gaps. For activation energies of steps in NO reduction, the explicit latent variable method is not only the most accurate, but also enjoys cancellation of errors in functions that depend on multiple tasks. These results motivate the publication of high quality materials datasets that encode transferable information, independent of industrial or academic interest in the particular labels, and encourage further development and application of transfer learning methods to materials informatics problems.}, + pubstate = {preprint}, + keywords = {AML,chemical reaction,Citrine Informatics,DFT,difference learning,experimental data,materials,materials project,ML,multi-fidelity,multi-task learning,prediction of bandgap,random forest,small data,transfer learning}, + file = {/Users/wasmer/Nextcloud/Zotero/Hutchinson et al_2017_Overcoming data scarcity with transfer learning.pdf;/Users/wasmer/Zotero/storage/6XFVNRE9/1711.html} +} + @article{hutsonArtificialIntelligenceFaces2018, title = {Artificial Intelligence Faces Reproducibility Crisis}, author = {Hutson, Matthew}, @@ -5365,6 +6929,59 @@ file = {/Users/wasmer/Nextcloud/Zotero/2022_Improving the efficiency of ab initio electronic-structure calculations by deep.pdf} } +@report{informaticsChallengesMachineLearning2021, + type = {White Paper}, + title = {Challenges in {{Machine Learning}} for {{Materials}} - {{AI White Paper}}}, + author = {Informatics, Citrine}, + date = {2021-04}, + institution = {{Citrine Informatics}}, + url = {https://citrine.io/success/white-papers/white-paper-challenges-in-machine-learning-for-materials/}, + urldate = {2023-08-19}, + abstract = {Learn about challenges in Machine Learning for Materials. See how Citrine has overcome these challenges and why off-the-shelf open-source AI will require a lot of tailoring to make it work in this space.}, + langid = {american}, + keywords = {/unread,AML,Citrine Informatics,compositional descriptors,materials,ML,sequential learning,small data,surrogate model,transfer learning,uncertainty quantification}, + file = {/Users/wasmer/Nextcloud/Zotero/Informatics_2021_Challenges in Machine Learning for Materials - AI White Paper.pdf;/Users/wasmer/Zotero/storage/M5PWI97V/white-paper-challenges-in-machine-learning-for-materials.html} +} + +@article{inizanScalableHybridDeep2023, + title = {Scalable Hybrid Deep Neural Networks/Polarizable Potentials Biomolecular Simulations Including Long-Range Effects}, + author = {Inizan, Théo Jaffrelot and Plé, Thomas and Adjoua, Olivier and Ren, Pengyu and Gökcan, Hatice and Isayev, Olexandr and Lagardère, Louis and Piquemal, Jean-Philip}, + date = {2023-05-24}, + journaltitle = {Chemical Science}, + shortjournal = {Chem. Sci.}, + volume = {14}, + number = {20}, + pages = {5438--5452}, + publisher = {{The Royal Society of Chemistry}}, + issn = {2041-6539}, + doi = {10.1039/D2SC04815A}, + url = {https://pubs.rsc.org/en/content/articlelanding/2023/sc/d2sc04815a}, + urldate = {2023-10-05}, + abstract = {Deep-HP is a scalable extension of the Tinker-HP multi-GPU molecular dynamics (MD) package enabling the use of Pytorch/TensorFlow Deep Neural Network (DNN) models. Deep-HP increases DNNs' MD capabilities by orders of magnitude offering access to ns simulations for 100k-atom biosystems while offering the possibility of coupling DNNs to any classical (FFs) and many-body polarizable (PFFs) force fields. It allows therefore the introduction of the ANI-2X/AMOEBA hybrid polarizable potential designed for ligand binding studies where solvent–solvent and solvent–solute interactions are computed with the AMOEBA PFF while solute–solute ones are computed by the ANI-2X DNN. ANI-2X/AMOEBA explicitly includes AMOEBA's physical long-range interactions via an efficient Particle Mesh Ewald implementation while preserving ANI-2X's solute short-range quantum mechanical accuracy. The DNN/PFF partition can be user-defined allowing for hybrid simulations to include key ingredients of biosimulation such as polarizable solvents, polarizable counter ions, etc.… ANI-2X/AMOEBA is accelerated using a multiple-timestep strategy focusing on the model's contributions to low-frequency modes of nuclear forces. It primarily evaluates AMOEBA forces while including ANI-2X ones only via correction-steps resulting in an order of magnitude acceleration over standard Velocity Verlet integration. Simulating more than 10 μs, we compute charged/uncharged ligand solvation free energies in 4 solvents, and absolute binding free energies of host–guest complexes from SAMPL challenges. ANI-2X/AMOEBA average errors are discussed in terms of statistical uncertainty and appear in the range of chemical accuracy compared to experiment. The availability of the Deep-HP computational platform opens the path towards large-scale hybrid DNN simulations, at force-field cost, in biophysics and drug discovery.}, + langid = {english}, + keywords = {/unread,AML,ANI1-x,biomolecules,chemistry,GPU,HPC,library,ML,MLP,parallelization,software,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Inizan et al_2023_Scalable hybrid deep neural networks-polarizable potentials biomolecular.pdf;/Users/wasmer/Zotero/storage/ZQFN36VU/Inizan et al. - 2023 - Scalable hybrid deep neural networkspolarizable p.pdf} +} + +@book{inuiGroupTheoryIts1990, + title = {Group {{Theory}} and {{Its Applications}} in {{Physics}}}, + author = {Inui, Teturo and Tanabe, Yukito and Onodera, Yositaka}, + editorb = {Cardona, Manuel and Fulde, Peter and Von Klitzing, Klaus and Queisser, Hans-Joachim and Lotsch, Helmut K. V.}, + editorbtype = {redactor}, + date = {1990}, + series = {Springer {{Series}} in {{Solid-State Sciences}}}, + volume = {78}, + publisher = {{Springer}}, + location = {{Berlin, Heidelberg}}, + doi = {10.1007/978-3-642-80021-4}, + url = {http://link.springer.com/10.1007/978-3-642-80021-4}, + urldate = {2023-09-20}, + isbn = {978-3-540-60445-7 978-3-642-80021-4}, + langid = {english}, + keywords = {group theory,irreps,physics,textbook}, + file = {/Users/wasmer/Nextcloud/Zotero/Inui et al_1990_Group Theory and Its Applications in Physics.pdf} +} + @article{ismail-beigiNewAlgebraicFormulation2000, title = {New {{Algebraic Formulation}} of {{Density Functional Calculation}}}, author = {Ismail-Beigi, Sohrab and Arias, T. A.}, @@ -5492,6 +7109,22 @@ file = {/Users/wasmer/Nextcloud/Zotero/Jain et al_2023_GFlowNets for AI-Driven Scientific Discovery.pdf;/Users/wasmer/Zotero/storage/298HNEQA/2302.html} } +@online{janakarajanLanguageModelsMolecular2023, + title = {Language Models in Molecular Discovery}, + author = {Janakarajan, Nikita and Erdmann, Tim and Swaminathan, Sarath and Laino, Teodoro and Born, Jannis}, + date = {2023-09-28}, + eprint = {2309.16235}, + eprinttype = {arxiv}, + eprintclass = {physics, q-bio}, + doi = {10.48550/arXiv.2309.16235}, + url = {http://arxiv.org/abs/2309.16235}, + urldate = {2023-10-05}, + abstract = {The success of language models, especially transformer-based architectures, has trickled into other domains giving rise to "scientific language models" that operate on small molecules, proteins or polymers. In chemistry, language models contribute to accelerating the molecule discovery cycle as evidenced by promising recent findings in early-stage drug discovery. Here, we review the role of language models in molecular discovery, underlining their strength in de novo drug design, property prediction and reaction chemistry. We highlight valuable open-source software assets thus lowering the entry barrier to the field of scientific language modeling. Last, we sketch a vision for future molecular design that combines a chatbot interface with access to computational chemistry tools. Our contribution serves as a valuable resource for researchers, chemists, and AI enthusiasts interested in understanding how language models can and will be used to accelerate chemical discovery.}, + pubstate = {preprint}, + keywords = {AML,biomolecules,chemical synthesis,chemistry,drug discovery,foundation models,IBM,LLM,ML,nlp,property prediction,review}, + file = {/Users/wasmer/Nextcloud/Zotero/Janakarajan et al_2023_Language models in molecular discovery.pdf;/Users/wasmer/Zotero/storage/PR7IJC84/2309.html} +} + @online{janssenAutomatedOptimizationConvergence2021, title = {Automated Optimization of Convergence Parameters in Plane Wave Density Functional Theory Calculations via a Tensor Decomposition-Based Uncertainty Quantification}, author = {Janssen, Jan and Makarov, Edgar and Hickel, Tilmann and Shapeev, Alexander V. and Neugebauer, Jörg}, @@ -5536,10 +7169,20 @@ doi = {10.1109/SC41405.2020.00009}, abstract = {For 35 years, ab initio molecular dynamics (AIMD) has been the method of choice for modeling complex atomistic phenomena from first principles. However, most AIMD applications are limited by computational cost to systems with thousands of atoms at most. We report that a machine learning based simulation protocol (Deep Potential Molecular Dynamics), while retaining ab initio accuracy, can simulate more than 1 nanosecond-long trajectory of over 100 million atoms per day, using a highly optimized code (GPU DeePMD-kit) on the Summit supercomputer. Our code can efficiently scale up to the entire Summit supercomputer, attaining 91 PFLOPS in double precision (45.5\% of the peak) and 162/275 PFLOPS in mixed-single/half precision. The great accomplishment of this work is that it opens the door to simulating unprecedented size and time scales with ab initio accuracy. It also poses new challenges to the next-generation supercomputer for a better integration of machine learning and physical modeling.}, eventtitle = {{{SC20}}: {{International Conference}} for {{High Performance Computing}}, {{Networking}}, {{Storage}} and {{Analysis}}}, - keywords = {100 million atoms,DeePMD-kit,MD,ML,MLP,record,Supercomputer}, + keywords = {AML,biomolecules,DeePMD-kit,Gordon Bell Prize,large-scale simulation,MD,ML,MLP,record,scaling,Supercomputer}, file = {/Users/wasmer/Nextcloud/Zotero/Jia et al_2020_Pushing the Limit of Molecular Dynamics with Ab Initio Accuracy to 100 Million.pdf;/Users/wasmer/Zotero/storage/UML425XW/9355242.html} } +@unpublished{jiMathematicalViewAttention2020, + title = {A {{Mathematical View}} of {{Attention Models}} in {{Deep Learning}}}, + author = {Ji, Shuiwang and Xie, Yaochen and Gao, Hongyang}, + date = {2020}, + abstract = {This introduction of attention models aims at providing a complete, self-contained, and easy-to- understand introduction of this important class of deep modules. This document is based on lecture notes by Shuiwang Ji at Texas A\&M University and can be used for undergraduate and graduate level classes}, + langid = {english}, + keywords = {AML,attention,educational,equivariant,General ML,geometric deep learning,introduction,invariance,ML,ML theory,multi-head attention,self-attention,symmetry}, + file = {/Users/wasmer/Zotero/storage/YALP3X85/Ji et al. - A Mathematical View of Attention Models in Deep Le.pdf} +} + @article{jinHopesRaisedRoomtemperature2023, title = {Hopes Raised for Room-Temperature Superconductivity, but Doubts Remain}, author = {Jin, ChangQing and Ceperley, David}, @@ -5560,6 +7203,24 @@ file = {/Users/wasmer/Nextcloud/Zotero/Jin_Ceperley_2023_Hopes raised for room-temperature superconductivity, but doubts remain.pdf} } +@article{jinnouchiPhaseTransitionsHybrid2019, + title = {Phase {{Transitions}} of {{Hybrid Perovskites Simulated}} by {{Machine-Learning Force Fields Trained}} on the {{Fly}} with {{Bayesian Inference}}}, + author = {Jinnouchi, Ryosuke and Lahnsteiner, Jonathan and Karsai, Ferenc and Kresse, Georg and Bokdam, Menno}, + date = {2019-06-07}, + journaltitle = {Physical Review Letters}, + shortjournal = {Phys. Rev. Lett.}, + volume = {122}, + number = {22}, + pages = {225701}, + publisher = {{American Physical Society}}, + doi = {10.1103/PhysRevLett.122.225701}, + url = {https://link.aps.org/doi/10.1103/PhysRevLett.122.225701}, + urldate = {2023-09-05}, + abstract = {Realistic finite temperature simulations of matter are a formidable challenge for first principles methods. Long simulation times and large length scales are required, demanding years of computing time. Here we present an on-the-fly machine learning scheme that generates force fields automatically during molecular dynamics simulations. This opens up the required time and length scales, while retaining the distinctive chemical precision of first principles methods and minimizing the need for human intervention. The method is widely applicable to multielement complex systems. We demonstrate its predictive power on the entropy driven phase transitions of hybrid perovskites, which have never been accurately described in simulations. Using machine learned potentials, isothermal-isobaric simulations give direct insight into the underlying microscopic mechanisms. Finally, we relate the phase transition temperatures of different perovskites to the radii of the involved species, and we determine the order of the transitions in Landau theory.}, + keywords = {/unread,active learning}, + file = {/Users/wasmer/Nextcloud/Zotero/Jinnouchi et al_2019_Phase Transitions of Hybrid Perovskites Simulated by Machine-Learning Force.pdf;/Users/wasmer/Zotero/storage/N4YE6INM/PhysRevLett.122.html} +} + @book{johanssonNumericalPythonScientific2019, title = {Numerical {{Python}}: {{Scientific Computing}} and {{Data Science Applications}} with {{Numpy}}, {{SciPy}} and {{Matplotlib}}}, shorttitle = {Numerical {{Python}}}, @@ -5666,6 +7327,36 @@ file = {/Users/wasmer/Nextcloud/Zotero/Jørgensen_Bhowmik_2021_Graph neural networks for fast electron density estimation of molecules,.pdf;/Users/wasmer/Zotero/storage/MBXG22TT/2112.html} } +@online{joshiExpressivePowerGeometric2023, + title = {On the {{Expressive Power}} of {{Geometric Graph Neural Networks}}}, + author = {Joshi, Chaitanya K. and Bodnar, Cristian and Mathis, Simon V. and Cohen, Taco and Liò, Pietro}, + date = {2023-06-03}, + eprint = {2301.09308}, + eprinttype = {arxiv}, + eprintclass = {cs, math, stat}, + doi = {10.48550/arXiv.2301.09308}, + url = {http://arxiv.org/abs/2301.09308}, + urldate = {2023-10-07}, + abstract = {The expressive power of Graph Neural Networks (GNNs) has been studied extensively through the Weisfeiler-Leman (WL) graph isomorphism test. However, standard GNNs and the WL framework are inapplicable for geometric graphs embedded in Euclidean space, such as biomolecules, materials, and other physical systems. In this work, we propose a geometric version of the WL test (GWL) for discriminating geometric graphs while respecting the underlying physical symmetries: permutations, rotation, reflection, and translation. We use GWL to characterise the expressive power of geometric GNNs that are invariant or equivariant to physical symmetries in terms of distinguishing geometric graphs. GWL unpacks how key design choices influence geometric GNN expressivity: (1) Invariant layers have limited expressivity as they cannot distinguish one-hop identical geometric graphs; (2) Equivariant layers distinguish a larger class of graphs by propagating geometric information beyond local neighbourhoods; (3) Higher order tensors and scalarisation enable maximally powerful geometric GNNs; and (4) GWL's discrimination-based perspective is equivalent to universal approximation. Synthetic experiments supplementing our results are available at \textbackslash url\{https://github.com/chaitjo/geometric-gnn-dojo\}}, + pubstate = {preprint}, + keywords = {AML,benchmarking,DimeNet,educational,equivariant,General ML,geometric deep learning,GNN,invariance,library,MACE,ML,MPNN,review,review-of-GNN,SchNet,SE(3),transformer,with-code,WL test}, + file = {/Users/wasmer/Nextcloud/Zotero/Joshi et al_2023_On the Expressive Power of Geometric Graph Neural Networks.pdf;/Users/wasmer/Zotero/storage/4HKE6SLD/2301.html} +} + +@inproceedings{joubertLearningScaleSummit2022, + title = {Learning to {{Scale}} the {{Summit}}: {{AI}} for {{Science}} on a {{Leadership Supercomputer}}}, + shorttitle = {Learning to {{Scale}} the {{Summit}}}, + booktitle = {2022 {{IEEE International Parallel}} and {{Distributed Processing Symposium Workshops}} ({{IPDPSW}})}, + author = {Joubert, Wayne and Messer, Bronson and Roth, Philip C. and Georgiadou, Antigoni and Lietz, Justin and Eisenbach, Markus and Yin, Junqi}, + date = {2022-05}, + pages = {1246--1255}, + doi = {10.1109/IPDPSW55747.2022.00221}, + abstract = {The Summit system at Oak Ridge National Lab-oratory (ORNL) has been the world's top AI for science su-percomputer for several years, ranked world's fastest computer at its 2018 launch and currently top system in the US and \#2 on the TOP5OO list. Summit's purposeful design to handle both conventional modeling and simulation science and emerging AI workloads has made it a leading destination for AI-powered computational science. We report here on AI for science usage on Summit near the midpoint of its lifespan. We review AI usage across the many science projects that have used Summit. We then examine in detail a set of applications scaling AI to full system as well as projects implementing AI-coordinated science discovery workflows on Summit. Finally, we offer some observations regarding the future of advancing scientific knowledge and understanding via AI, especially in the context of leadership-class scientific computing.}, + eventtitle = {2022 {{IEEE International Parallel}} and {{Distributed Processing Symposium Workshops}} ({{IPDPSW}})}, + keywords = {/unread,AI,for introductions,General ML,HPC,HPC statistics,HPC user statistics,ML,Supercomputer,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Joubert et al_2022_Learning to Scale the Summit.pdf} +} + @online{julichJulichKKRCodes, title = {The {{Jülich KKR Codes}}}, shorttitle = {Jülich {{KKR}} Codes}, @@ -5874,6 +7565,36 @@ file = {/Users/wasmer/Nextcloud/Zotero/Kaundinya et al_2022_Prediction of the Electron Density of States for Crystalline Compounds with.pdf} } +@article{kavalskyHowMuchCan2023, + title = {By How Much Can Closed-Loop Frameworks Accelerate Computational Materials Discovery?}, + author = {Kavalsky, Lance and I.~Hegde, Vinay and Muckley, Eric and S.~Johnson, Matthew and Meredig, Bryce and Viswanathan, Venkatasubramanian}, + date = {2023}, + journaltitle = {Digital Discovery}, + volume = {2}, + number = {4}, + pages = {1112--1125}, + publisher = {{Royal Society of Chemistry}}, + doi = {10.1039/D2DD00133K}, + url = {https://pubs.rsc.org/en/content/articlelanding/2023/dd/d2dd00133k}, + urldate = {2023-08-19}, + langid = {english}, + keywords = {Citrine Informatics,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Kavalsky et al_2023_By how much can closed-loop frameworks accelerate computational materials.pdf} +} + +@inproceedings{kavanaghAPSAPSMarch2023, + title = {{{APS}} -{{APS March Meeting}} 2023 - {{Event}} - {{Symmetry-breaking}} and Reconstruction at Point Defects in Solids}, + booktitle = {Bulletin of the {{American Physical Society}}}, + author = {Kavanagh, Séan R}, + date = {2023-03}, + publisher = {{American Physical Society}}, + url = {https://meetings.aps.org/Meeting/MAR23/Session/D41.2}, + urldate = {2023-08-25}, + eventtitle = {{{APS March Meeting}} 2023}, + keywords = {/unread,todo-tagging}, + file = {/Users/wasmer/Zotero/storage/R72ILT3Q/D41.html} +} + @article{kawamuraLaughlinChargePumping2023, title = {Laughlin Charge Pumping in a Quantum Anomalous {{Hall}} Insulator}, author = {Kawamura, Minoru and Mogi, Masataka and Yoshimi, Ryutaro and Morimoto, Takahiro and Takahashi, Kei S. and Tsukazaki, Atsushi and Nagaosa, Naoto and Kawasaki, Masashi and Tokura, Yoshinori}, @@ -5912,10 +7633,30 @@ abstract = {Two-dimensional materials offer a promising platform for the next generation of (opto-) electronic devices and other high technology applications. One of the most exciting characteristics of 2D crystals is the ability to tune their properties via controllable introduction of defects. However, the search space for such structures is enormous, and ab-initio computations prohibitively expensive. We propose a machine learning approach for rapid estimation of the properties of 2D material given the lattice structure and defect configuration. The method suggests a way to represent configuration of 2D materials with defects that allows a neural network to train quickly and accurately. We compare our methodology with the state-of-the-art approaches and demonstrate at least 3.7 times energy prediction error drop. Also, our approach is an order of magnitude more resource-efficient than its contenders both for the training and inference part.}, issue = {1}, langid = {english}, - keywords = {2D material,AML,defects,disordered,exchange interaction,GNN,library,ML,point defects,prediction from defect structure,prediction of formation energy,prediction of HOMO/LUMO,with-code,with-data,with-demo}, + keywords = {\_tablet,2D material,AML,defects,disordered,exchange interaction,GNN,library,ML,multi-defect,point defects,prediction from defect structure,prediction of formation energy,prediction of HOMO/LUMO,representation of defects,sparsification,with-code,with-data,with-demo}, file = {/Users/wasmer/Nextcloud/Zotero/Kazeev et al_2023_Sparse representation for machine learning the properties of defects in 2D.pdf} } +@article{kearnesMolecularGraphConvolutions2016, + title = {Molecular Graph Convolutions: Moving beyond Fingerprints}, + shorttitle = {Molecular Graph Convolutions}, + author = {Kearnes, Steven and McCloskey, Kevin and Berndl, Marc and Pande, Vijay and Riley, Patrick}, + date = {2016-08-01}, + journaltitle = {Journal of Computer-Aided Molecular Design}, + shortjournal = {J Comput Aided Mol Des}, + volume = {30}, + number = {8}, + pages = {595--608}, + issn = {1573-4951}, + doi = {10.1007/s10822-016-9938-8}, + url = {https://doi.org/10.1007/s10822-016-9938-8}, + urldate = {2023-09-25}, + abstract = {Molecular “fingerprints†encoding structural information are the workhorse of cheminformatics and machine learning in drug discovery applications. However, fingerprint representations necessarily emphasize particular aspects of the molecular structure while ignoring others, rather than allowing the model to make data-driven decisions. We describe molecular graph convolutions, a machine learning architecture for learning from undirected graphs, specifically small molecules. Graph convolutions use a simple encoding of the molecular graph—atoms, bonds, distances, etc.—which allows the model to take greater advantage of information in the graph structure. Although graph convolutions do not outperform all fingerprint-based methods, they (along with other graph-based methods) represent a new paradigm in ligand-based virtual screening with exciting opportunities for future improvement.}, + langid = {english}, + keywords = {/unread,AML,cheminformatics,convolution,GNN,graph convolution,graph ML,ML,molecules,representation learning}, + file = {/Users/wasmer/Zotero/storage/VY3RDG2K/Kearnes et al. - 2016 - Molecular graph convolutions moving beyond finger.pdf} +} + @article{keimerPhysicsQuantumMaterials2017, title = {The Physics of Quantum Materials}, author = {Keimer, B. and Moore, J. E.}, @@ -5952,7 +7693,7 @@ url = {https://doi.org/10.1021/acs.chemrev.1c00107}, urldate = {2023-03-20}, abstract = {Machine learning models are poised to make a transformative impact on chemical sciences by dramatically accelerating computational algorithms and amplifying insights available from computational chemistry methods. However, achieving this requires a confluence and coaction of expertise in computer science and physical sciences. This Review is written for new and experienced researchers working at the intersection of both fields. We first provide concise tutorials of computational chemistry and machine learning methods, showing how insights involving both can be achieved. We follow with a critical review of noteworthy applications that demonstrate how computational chemistry and machine learning can be used together to provide insightful (and useful) predictions in molecular and materials modeling, retrosyntheses, catalysis, and drug design.}, - keywords = {/unread,AML,benchmarking,chemistry,HFT,MD,ML,ML-DFT,ML-ESM,MLP,multiscale,review,review-of-AML,WFT}, + keywords = {AML,benchmarking,chemistry,HFT,MD,ML,ML-DFT,ML-ESM,MLP,multiscale,review,review-of-AML,WFT}, file = {/Users/wasmer/Zotero/storage/BF3L8VHS/Keith et al. - 2021 - Combining Machine Learning and Computational Chemi.pdf;/Users/wasmer/Zotero/storage/NNJM7AKG/acs.chemrev.html} } @@ -5991,6 +7732,44 @@ file = {/Users/wasmer/Nextcloud/Zotero/Kidger_2022_On Neural Differential Equations.pdf;/Users/wasmer/Zotero/storage/EHARV7VZ/2202.html} } +@article{kimMachinelearnedMetricsPredicting2020, + title = {Machine-Learned Metrics for Predicting the Likelihood of Success in Materials Discovery}, + author = {Kim, Yoolhee and Kim, Edward and Antono, Erin and Meredig, Bryce and Ling, Julia}, + date = {2020-08-26}, + journaltitle = {npj Computational Materials}, + shortjournal = {npj Comput Mater}, + volume = {6}, + number = {1}, + pages = {1--9}, + publisher = {{Nature Publishing Group}}, + issn = {2057-3960}, + doi = {10.1038/s41524-020-00401-8}, + url = {https://www.nature.com/articles/s41524-020-00401-8}, + urldate = {2023-08-19}, + abstract = {Materials discovery is often compared to the challenge of finding a needle in a haystack. While much work has focused on accurately predicting the properties of candidate materials with machine learning (ML), which amounts to evaluating whether a given candidate is a piece of straw or a needle, less attention has been paid to a critical question: are we searching in the right haystack? We refer to the haystack as the design space for a particular materials discovery problem (i.e., the set of possible candidate materials to synthesize), and thus frame this question as one of design space selection. In this paper, we introduce two metrics, the predicted fraction of improved candidates (PFIC), and the cumulative maximum likelihood of improvement (CMLI), which we demonstrate can identify discovery-rich and discovery-poor design spaces, respectively. A combined classification system, composed of the CMLI and PFIC metrics, is then used to identify optimal design spaces with high precision, and thus show the potential to significantly accelerate ML-driven materials discovery.}, + issue = {1}, + langid = {english}, + keywords = {Citrine Informatics,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Kim et al_2020_Machine-learned metrics for predicting the likelihood of success in materials.pdf} +} + +@online{kingmaAdamMethodStochastic2017, + title = {Adam: {{A Method}} for {{Stochastic Optimization}}}, + shorttitle = {Adam}, + author = {Kingma, Diederik P. and Ba, Jimmy}, + date = {2017-01-29}, + eprint = {1412.6980}, + eprinttype = {arxiv}, + eprintclass = {cs}, + doi = {10.48550/arXiv.1412.6980}, + url = {http://arxiv.org/abs/1412.6980}, + urldate = {2023-07-21}, + abstract = {We introduce Adam, an algorithm for first-order gradient-based optimization of stochastic objective functions, based on adaptive estimates of lower-order moments. The method is straightforward to implement, is computationally efficient, has little memory requirements, is invariant to diagonal rescaling of the gradients, and is well suited for problems that are large in terms of data and/or parameters. The method is also appropriate for non-stationary objectives and problems with very noisy and/or sparse gradients. The hyper-parameters have intuitive interpretations and typically require little tuning. Some connections to related algorithms, on which Adam was inspired, are discussed. We also analyze the theoretical convergence properties of the algorithm and provide a regret bound on the convergence rate that is comparable to the best known results under the online convex optimization framework. Empirical results demonstrate that Adam works well in practice and compares favorably to other stochastic optimization methods. Finally, we discuss AdaMax, a variant of Adam based on the infinity norm.}, + pubstate = {preprint}, + keywords = {Adam,Deep learning,General ML,ML,NN,optimization,optimizer,original publication}, + file = {/Users/wasmer/Nextcloud/Zotero/Kingma_Ba_2017_Adam.pdf;/Users/wasmer/Zotero/storage/FDMFEJS4/1412.html} +} + @article{kippChiralHallEffect2021, title = {The Chiral {{Hall}} Effect in Canted Ferromagnets and Antiferromagnets}, author = {Kipp, Jonathan and Samanta, Kartik and Lux, Fabian R. and Merte, Maximilian and Go, Dongwook and Hanke, Jan-Philipp and Redies, Matthias and Freimuth, Frank and Blügel, Stefan and Ležaić, Marjana and Mokrousov, Yuriy}, @@ -6243,6 +8022,67 @@ file = {/Users/wasmer/Nextcloud/Zotero/Kohn_1965_Self-Consistent Equations Including Exchange and Correlation Effects.pdf;/Users/wasmer/Zotero/storage/4CF9DCKS/PhysRev.140.html} } +@article{kohnSolutionSchrOdinger1954, + title = {Solution of the {{Schr}}\textbackslash "odinger {{Equation}} in {{Periodic Lattices}} with an {{Application}} to {{Metallic Lithium}}}, + author = {Kohn, W. and Rostoker, N.}, + date = {1954-06-01}, + journaltitle = {Physical Review}, + shortjournal = {Phys. Rev.}, + volume = {94}, + number = {5}, + pages = {1111--1120}, + publisher = {{American Physical Society}}, + doi = {10.1103/PhysRev.94.1111}, + url = {https://link.aps.org/doi/10.1103/PhysRev.94.1111}, + urldate = {2023-09-19}, + abstract = {The problem of solving the Schrödinger equation in a periodic lattice is studied from the point of view of the variation-iteration method. This approach leads to a very compact scheme if the potential V(r) is spherically symmetrical within the inscribed spheres of the atomic polyhedra and constant in the space between them. The band structure of the lattice is then determined by (1) geometrical structure constants, characteristic of the type of lattice and (2) the logarithmic derivatives, at the surface of the inscribed sphere, of the s, p, d, … functions corresponding to V(r). By far the greater part of the labor is involved in the calculation of (1), which needs to be done only once for each type of lattice; (2) can be obtained by numerical integration or directly from the atomic spectra. Although derived from a different point of view, this scheme turns out to be essentially equivalent to one proposed by Korringa on the basis of the theory of lattice interferences. The present paper also contains an application to the conduction band of metallic lithium.}, + keywords = {KKR,KKR foundations,original publication}, + file = {/Users/wasmer/Nextcloud/Zotero/Kohn_Rostoker_1954_Solution of the Schr-odinger Equation in Periodic Lattices with an Application.pdf;/Users/wasmer/Zotero/storage/RDPKHK8A/PhysRev.94.html} +} + +@article{kolbDiscoveringChargeDensity2017, + title = {Discovering Charge Density Functionals and Structure-Property Relationships with {{PROPhet}}: {{A}} General Framework for Coupling Machine Learning and First-Principles Methods}, + shorttitle = {Discovering Charge Density Functionals and Structure-Property Relationships with {{PROPhet}}}, + author = {Kolb, Brian and Lentz, Levi C. and Kolpak, Alexie M.}, + date = {2017-04-26}, + journaltitle = {Scientific Reports}, + shortjournal = {Sci Rep}, + volume = {7}, + number = {1}, + pages = {1192}, + publisher = {{Nature Publishing Group}}, + issn = {2045-2322}, + doi = {10.1038/s41598-017-01251-z}, + url = {https://www.nature.com/articles/s41598-017-01251-z}, + urldate = {2023-08-24}, + abstract = {Modern ab initio methods have rapidly increased our understanding of solid state materials properties, chemical reactions, and the quantum interactions between atoms. However, poor scaling often renders direct ab initio calculations intractable for large or complex systems. There are two obvious avenues through which to remedy this problem: (i) develop new, less expensive methods to calculate system properties, or (ii) make existing methods faster. This paper describes an open source framework designed to pursue both of these avenues. PROPhet (short for PROPerty Prophet) utilizes machine learning techniques to find complex, non-linear mappings between sets of material or system properties. The result is a single code capable of learning analytical potentials, non-linear density functionals, and other structure-property or property-property relationships. These capabilities enable highly accurate mesoscopic simulations, facilitate computation of expensive properties, and enable the development of predictive models for systematic materials design and optimization. This work explores the coupling of machine learning to ab initio methods through means both familiar (e.g., the creation of various potentials and energy functionals) and less familiar (e.g., the creation of density functionals for arbitrary properties), serving both to demonstrate PROPhet’s ability to create exciting post-processing analysis tools and to open the door to improving ab initio methods themselves with these powerful machine learning techniques.}, + issue = {1}, + langid = {english}, + keywords = {/unread,AML,Computational chemistry,Density functional theory,DFT,DFT code integration,Electronic structure,FHI-aims,grid-based descriptors,LAMMPS,library,MD,ML,ML-DFA,ML-DFT,ML-ESM,MLP,prediction from density,prediction of energy,prediction of Exc,Quantum ESPRESSO,Software,todo-tagging,VASP}, + file = {/Users/wasmer/Nextcloud/Zotero/Kolb et al_2017_Discovering charge density functionals and structure-property relationships.pdf} +} + +@article{kongDensityStatesPrediction2022, + title = {Density of States Prediction for Materials Discovery via Contrastive Learning from Probabilistic Embeddings}, + author = {Kong, Shufeng and Ricci, Francesco and Guevarra, Dan and Neaton, Jeffrey B. and Gomes, Carla P. and Gregoire, John M.}, + date = {2022-02-17}, + journaltitle = {Nature Communications}, + shortjournal = {Nat Commun}, + volume = {13}, + number = {1}, + pages = {949}, + publisher = {{Nature Publishing Group}}, + issn = {2041-1723}, + doi = {10.1038/s41467-022-28543-x}, + url = {https://www.nature.com/articles/s41467-022-28543-x}, + urldate = {2023-09-23}, + abstract = {Machine learning for materials discovery has largely focused on predicting an individual scalar rather than multiple related properties, where spectral properties are an important example. Fundamental spectral properties include the phonon density of states (phDOS) and the electronic density of states (eDOS), which individually or collectively are the origins of a breadth of materials observables and functions. Building upon the success of graph attention networks for encoding crystalline materials, we introduce a probabilistic embedding generator specifically tailored to the prediction of spectral properties. Coupled with supervised contrastive learning, our materials-to-spectrum (Mat2Spec) model outperforms state-of-the-art methods for predicting ab initio phDOS and eDOS for crystalline materials. We demonstrate Mat2Spec’s ability to identify eDOS gaps below the Fermi energy, validating predictions with ab initio calculations and thereby discovering candidate thermoelectrics and transparent conductors. Mat2Spec is an exemplar framework for predicting spectral properties of materials via strategically incorporated machine learning techniques.}, + issue = {1}, + langid = {english}, + keywords = {/unread,AML,CGCNN,contrastive learning,e3nn,GATGNN,Gaussian mixture,GNN,graph attention,library,Mat2Spec,materials,ML,ML-DFT,ML-ESM,multi-task learning,prediction of DOS,prediction of eDOS,prediction of phDOS,spectroscopy,with-code,with-data}, + file = {/Users/wasmer/Zotero/storage/6NBDKQII/Kong et al. - 2022 - Density of states prediction for materials discove.pdf} +} + @article{kongOpportunitiesChemistryMaterials2011, title = {Opportunities in Chemistry and Materials Science for Topological Insulators and Their Nanostructures}, author = {Kong, Desheng and Cui, Yi}, @@ -6263,6 +8103,24 @@ keywords = {/unread} } +@article{korringaCalculationEnergyBloch1947, + title = {On the Calculation of the Energy of a {{Bloch}} Wave in a Metal}, + author = {Korringa, J}, + date = {1947-08-01}, + journaltitle = {Physica}, + shortjournal = {Physica}, + volume = {13}, + number = {6}, + pages = {392--400}, + issn = {0031-8914}, + doi = {10.1016/0031-8914(47)90013-X}, + url = {https://www.sciencedirect.com/science/article/pii/003189144790013X}, + urldate = {2023-09-19}, + abstract = {General formulae for the calculation of the energy E(k) of a Bloch wave with reduced wave vector k are obtained by the application of the dynamical theory of lattice interferences to electron waves. This method requires the computation of the zeros of an infinite determinant which in fair approximation might be replaced by a determinant with a few rows and columns only. Its elements are the sum of two functions, a function of E and k, resulting from interference of multipole waves of free electrons (and therefore independent of the lattice potential), and a function of E only, resulting from scattering of free electron waves by the field around one atom. Of the latter terms only those in the principal diagonal are of importance.}, + keywords = {KKR,KKR foundations,original publication}, + file = {/Users/wasmer/Nextcloud/Zotero/Korringa_1947_On the calculation of the energy of a Bloch wave in a metal.pdf;/Users/wasmer/Zotero/storage/CCAE8PZY/003189144790013X.html} +} + @article{korshunovaOpenChemDeepLearning2021, title = {{{OpenChem}}: {{A Deep Learning Toolkit}} for {{Computational Chemistry}} and {{Drug Design}}}, shorttitle = {{{OpenChem}}}, @@ -6367,6 +8225,27 @@ file = {/Users/wasmer/Nextcloud/Zotero/Krenn et al_2022_Predicting the Future of AI with AI.pdf;/Users/wasmer/Zotero/storage/MZBX2N4K/2210.html} } +@article{krennScientificUnderstandingArtificial2022, + title = {On Scientific Understanding with Artificial Intelligence}, + author = {Krenn, Mario and Pollice, Robert and Guo, Si Yue and Aldeghi, Matteo and Cervera-Lierta, Alba and Friederich, Pascal and family=Passos~Gomes, given=Gabriel, prefix=dos, useprefix=true and Häse, Florian and Jinich, Adrian and Nigam, AkshatKumar and Yao, Zhenpeng and Aspuru-Guzik, Alán}, + date = {2022-12}, + journaltitle = {Nature Reviews Physics}, + shortjournal = {Nat Rev Phys}, + volume = {4}, + number = {12}, + pages = {761--769}, + publisher = {{Nature Publishing Group}}, + issn = {2522-5820}, + doi = {10.1038/s42254-022-00518-3}, + url = {https://www.nature.com/articles/s42254-022-00518-3}, + urldate = {2023-08-22}, + abstract = {An oracle that correctly predicts the outcome of every particle physics experiment, the products of every possible chemical reaction or the function of every protein would revolutionize science and technology. However, scientists would not be entirely satisfied because they would want to comprehend how the oracle made these predictions. This is scientific understanding, one of the main aims of science. With the increase in the available computational power and advances in artificial intelligence, a natural question arises: how can advanced computational systems, and specifically artificial intelligence, contribute to new scientific understanding or gain it autonomously? Trying to answer this question, we adopted a definition of ‘scientific understanding’ from the philosophy of science that enabled us to overview the scattered literature on the topic and, combined with dozens of anecdotes from scientists, map out three dimensions of computer-assisted scientific understanding. For each dimension, we review the existing state of the art and discuss future developments. We hope that this Perspective will inspire and focus research directions in this multidisciplinary emerging field.}, + issue = {12}, + langid = {english}, + keywords = {todo-tagging}, + file = {/Users/wasmer/Zotero/storage/9QXF3GUC/Krenn et al. - 2022 - On scientific understanding with artificial intell.pdf} +} + @article{krennSelfreferencingEmbeddedStrings2020, title = {Self-Referencing Embedded Strings ({{SELFIES}}): {{A}} 100\% Robust Molecular String Representation}, shorttitle = {Self-Referencing Embedded Strings ({{SELFIES}})}, @@ -6404,6 +8283,20 @@ file = {/Users/wasmer/Nextcloud/Zotero/Kreuzberger et al_2023_Machine Learning Operations (MLOps).pdf;/Users/wasmer/Zotero/storage/AGAJG2J6/10081336.html} } +@book{kronmullerHandbookMagnetismAdvanced2007, + title = {Handbook of {{Magnetism}} and {{Advanced Magnetic Materials}}}, + editor = {Kronmüller, Helmut and Parkin, Stuart}, + date = {2007-07-27}, + edition = {1}, + publisher = {{Wiley}}, + doi = {10.1002/9780470022184}, + url = {https://onlinelibrary.wiley.com/doi/book/10.1002/9780470022184}, + urldate = {2023-11-01}, + isbn = {978-0-470-02217-7 978-0-470-02218-4}, + langid = {english}, + keywords = {/unread,DFT,magnetism,physics,reference} +} + @article{kulikRoadmapMachineLearning2022, title = {Roadmap on {{Machine Learning}} in {{Electronic Structure}}}, author = {Kulik, Heather and Hammerschmidt, Thomas and Schmidt, Jonathan and Botti, Silvana and Marques, Miguel A. L. and Boley, Mario and Scheffler, Matthias and Todorović, Milica and Rinke, Patrick and Oses, Corey and Smolyanyuk, Andriy and Curtarolo, Stefano and Tkatchenko, Alexandre and Bartok, Albert and Manzhos, Sergei and Ihara, Manabu and Carrington, Tucker and Behler, Jörg and Isayev, Olexandr and Veit, Max and Grisafi, Andrea and Nigam, Jigyasa and Ceriotti, Michele and Schütt, Kristoff T and Westermayr, Julia and Gastegger, Michael and Maurer, Reinhard and Kalita, Bhupalee and Burke, Kieron and Nagai, Ryo and Akashi, Ryosuke and Sugino, Osamu and Hermann, Jan and Noé, Frank and Pilati, Sebastiano and Draxl, Claudia and Kuban, Martin and Rigamonti, Santiago and Scheidgen, Markus and Esters, Marco and Hicks, David and Toher, Cormac and Balachandran, Prasanna and Tamblyn, Isaac and Whitelam, Stephen and Bellinger, Colin and Ghiringhelli, Luca M.}, @@ -6439,6 +8332,25 @@ file = {/Users/wasmer/Nextcloud/Zotero/Kumar et al_2021_Topological Quantum Materials from the Viewpoint of Chemistry.pdf} } +@article{kurthRoleExchangeCorrelation2000, + title = {Role of the Exchange–Correlation Energy: {{Nature}}'s Glue}, + shorttitle = {Role of the Exchange–Correlation Energy}, + author = {Kurth, Stefan and Perdew, John P.}, + date = {2000}, + journaltitle = {International Journal of Quantum Chemistry}, + volume = {77}, + number = {5}, + pages = {814--818}, + issn = {1097-461X}, + doi = {10.1002/(SICI)1097-461X(2000)77:5<814::AID-QUA3>3.0.CO;2-F}, + url = {https://onlinelibrary.wiley.com/doi/abs/10.1002/%28SICI%291097-461X%282000%2977%3A5%3C814%3A%3AAID-QUA3%3E3.0.CO%3B2-F}, + urldate = {2023-10-05}, + abstract = {In the Kohn–Sham density functional theory of ground-state electronic structure, only the exchange–correlation energy Exc must be approximated. Although Exc is not typically a large component of the total energy, it is the principal ingredient of the glue that binds atoms together to form molecules and solids. To illustrate this fact, we present self-consistent results for atomization energies of molecules and for surface energies and work functions of jellium, calculated within the “Hartree†approximation, which neglects Exc. The Hartree world displays weak bonding between atoms, low or negative surface energies, and work functions that are close to zero. Other aspects of the Hartree world can be deduced from known size–effect relationships. The mechanism behind the glue role of exchange and correlation is the suppression of Hartree charge fluctuations. © 2000 John Wiley \& Sons, Inc. Int J Quant Chem 77: 814–818, 2000}, + langid = {english}, + keywords = {chemical bonding,DFT,DFT exchange,DFT theory,for introductions,xc functional}, + file = {/Users/wasmer/Nextcloud/Zotero/Kurth_Perdew_2000_Role of the exchange–correlation energy.pdf;/Users/wasmer/Zotero/storage/EVE2LZLC/(SICI)1097-461X(2000)775814AID-QUA33.0.html} +} + @online{laaksoUpdatesDScribeLibrary2023, title = {Updates to the {{DScribe Library}}: {{New Descriptors}} and {{Derivatives}}}, shorttitle = {Updates to the {{DScribe Library}}}, @@ -6558,6 +8470,61 @@ file = {/Users/wasmer/Nextcloud/Zotero/Larsen et al_2017_The atomic simulation environment—a Python library for working with atoms.pdf} } +@online{laszloffyTopologicalSuperconductivityFirstprinciples2023, + title = {Topological Superconductivity from First-Principles {{II}}: {{Effects}} from Manipulation of Spin Spirals \$-\$ {{Topological}} Fragmentation, Braiding, and {{Quasi-Majorana Bound States}}}, + shorttitle = {Topological Superconductivity from First-Principles {{II}}}, + author = {Lászlóffy, András and Nyári, Bendegúz and Csire, Gábor and Szunyogh, László and Újfalussy, Balázs}, + date = {2023-08-26}, + eprint = {2308.13831}, + eprinttype = {arxiv}, + eprintclass = {cond-mat}, + doi = {10.48550/arXiv.2308.13831}, + url = {http://arxiv.org/abs/2308.13831}, + urldate = {2023-09-20}, + abstract = {Recent advances in electron spin resonance techniques have allowed the manipulation of the spin of individual atoms, making magnetic atomic chains on superconducting hosts one of the most promising platform where topological superconductivity can be engineered. Motivated by this progress, we provide a detailed, quantitative description of the effects of manipulating spins in realistic nanowires by applying a first-principles-based computational approach to a recent experiment: an iron chain deposited on top of Au/Nb heterostructure. As a continuation of the first part of the paper experimentally relevant computational experiments are performed in spin spiral chains that shed light on several concerns about practical applications and add new aspects to the interpretation of recent experiments. We explore the stability of topological zero energy states, the formation and distinction of topologically trivial and non-trivial zero energy edge states, the effect of local changes in the exchange fields, the emergence of topological fragmentation, and the shift of Majorana Zero Modes along the superconducting nanowires opening avenues toward the implementation of a braiding operation.}, + pubstate = {preprint}, + keywords = {/unread,Budapest KKR group,GF2023 workshop,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Lászlóffy et al_2023_Topological superconductivity from first-principles II.pdf;/Users/wasmer/Zotero/storage/WPNB3PLJ/2308.html} +} + +@online{lavinSimulationIntelligenceNew2022, + title = {Simulation {{Intelligence}}: {{Towards}} a {{New Generation}} of {{Scientific Methods}}}, + shorttitle = {Simulation {{Intelligence}}}, + author = {Lavin, Alexander and Krakauer, David and Zenil, Hector and Gottschlich, Justin and Mattson, Tim and Brehmer, Johann and Anandkumar, Anima and Choudry, Sanjay and Rocki, Kamil and Baydin, Atılım GüneÅŸ and Prunkl, Carina and Paige, Brooks and Isayev, Olexandr and Peterson, Erik and McMahon, Peter L. and Macke, Jakob and Cranmer, Kyle and Zhang, Jiaxin and Wainwright, Haruko and Hanuka, Adi and Veloso, Manuela and Assefa, Samuel and Zheng, Stephan and Pfeffer, Avi}, + date = {2022-11-27}, + eprint = {2112.03235}, + eprinttype = {arxiv}, + eprintclass = {cs}, + doi = {10.48550/arXiv.2112.03235}, + url = {http://arxiv.org/abs/2112.03235}, + urldate = {2023-08-21}, + abstract = {The original "Seven Motifs" set forth a roadmap of essential methods for the field of scientific computing, where a motif is an algorithmic method that captures a pattern of computation and data movement. We present the "Nine Motifs of Simulation Intelligence", a roadmap for the development and integration of the essential algorithms necessary for a merger of scientific computing, scientific simulation, and artificial intelligence. We call this merger simulation intelligence (SI), for short. We argue the motifs of simulation intelligence are interconnected and interdependent, much like the components within the layers of an operating system. Using this metaphor, we explore the nature of each layer of the simulation intelligence operating system stack (SI-stack) and the motifs therein: (1) Multi-physics and multi-scale modeling; (2) Surrogate modeling and emulation; (3) Simulation-based inference; (4) Causal modeling and inference; (5) Agent-based modeling; (6) Probabilistic programming; (7) Differentiable programming; (8) Open-ended optimization; (9) Machine programming. We believe coordinated efforts between motifs offers immense opportunity to accelerate scientific discovery, from solving inverse problems in synthetic biology and climate science, to directing nuclear energy experiments and predicting emergent behavior in socioeconomic settings. We elaborate on each layer of the SI-stack, detailing the state-of-art methods, presenting examples to highlight challenges and opportunities, and advocating for specific ways to advance the motifs and the synergies from their combinations. Advancing and integrating these technologies can enable a robust and efficient hypothesis-simulation-analysis type of scientific method, which we introduce with several use-cases for human-machine teaming and automated science.}, + pubstate = {preprint}, + keywords = {Pasteur \& ISI,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Lavin et al_2022_Simulation Intelligence.pdf;/Users/wasmer/Zotero/storage/LYDGASRK/2112.html} +} + +@article{lavinTechnologyReadinessLevels2022, + title = {Technology Readiness Levels for Machine Learning Systems}, + author = {Lavin, Alexander and Gilligan-Lee, Ciarán M. and Visnjic, Alessya and Ganju, Siddha and Newman, Dava and Ganguly, Sujoy and Lange, Danny and Baydin, AtÃlÃm GüneÅŸ and Sharma, Amit and Gibson, Adam and Zheng, Stephan and Xing, Eric P. and Mattmann, Chris and Parr, James and Gal, Yarin}, + date = {2022-10-20}, + journaltitle = {Nature Communications}, + shortjournal = {Nat Commun}, + volume = {13}, + number = {1}, + pages = {6039}, + publisher = {{Nature Publishing Group}}, + issn = {2041-1723}, + doi = {10.1038/s41467-022-33128-9}, + url = {https://www.nature.com/articles/s41467-022-33128-9}, + urldate = {2023-08-21}, + abstract = {The development and deployment of machine learning systems can be executed easily with modern tools, but the process is typically rushed and means-to-an-end. Lack of diligence can lead to technical debt, scope creep and misaligned objectives, model misuse and failures, and expensive consequences. Engineering systems, on the other hand, follow well-defined processes and testing standards to streamline development for high-quality, reliable results. The extreme is spacecraft systems, with mission critical measures and robustness throughout the process. Drawing on experience in both spacecraft engineering and machine learning (research through product across domain areas), we’ve developed a proven systems engineering approach for machine learning and artificial intelligence: the Machine Learning Technology Readiness Levels framework defines a principled process to ensure robust, reliable, and responsible systems while being streamlined for machine learning workflows, including key distinctions from traditional software engineering, and a lingua franca for people across teams and organizations to work collaboratively on machine learning and artificial intelligence technologies. Here we describe the framework and elucidate with use-cases from physics research to computer vision apps to medical diagnostics.}, + issue = {1}, + langid = {english}, + keywords = {Pasteur \& ISI,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Lavin et al_2022_Technology readiness levels for machine learning systems.pdf} +} + @online{LearningLJPotential, title = {Learning a {{LJ}} Potential — {{PiNN}} Documentation}, url = {https://teoroo-pinn.readthedocs.io/en/latest/notebooks/Learn_LJ_potential.html}, @@ -6566,6 +8533,59 @@ file = {/Users/wasmer/Zotero/storage/VDHVAB3I/Learn_LJ_potential.html} } +@article{leeCorrectionTopologicalQuantum2019, + title = {Correction to {{Topological Quantum Materials}} for {{Realizing Majorana Quasiparticles}}}, + author = {Lee, Stephen R. and Sharma, Peter A. and Lima-Sharma, Ana L. and Pan, Wei and Nenoff, Tina M.}, + date = {2019-05-14}, + journaltitle = {Chemistry of Materials}, + shortjournal = {Chem. Mater.}, + volume = {31}, + number = {9}, + pages = {3591--3591}, + publisher = {{American Chemical Society}}, + issn = {0897-4756}, + doi = {10.1021/acs.chemmater.9b01629}, + url = {https://doi.org/10.1021/acs.chemmater.9b01629}, + urldate = {2023-08-24}, + keywords = {correction,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Lee et al_2019_Correction to Topological Quantum Materials for Realizing Majorana.pdf;/Users/wasmer/Zotero/storage/ETIJTH8X/acs.chemmater.html} +} + +@online{leePredictingDensityStates2023, + title = {Predicting {{Density}} of {{States}} via {{Multi-modal Transformer}}}, + author = {Lee, Namkyeong and Noh, Heewoong and Kim, Sungwon and Hyun, Dongmin and Na, Gyoung S. and Park, Chanyoung}, + date = {2023-04-10}, + eprint = {2303.07000}, + eprinttype = {arxiv}, + eprintclass = {cond-mat, physics:physics}, + doi = {10.48550/arXiv.2303.07000}, + url = {http://arxiv.org/abs/2303.07000}, + urldate = {2023-09-23}, + abstract = {The density of states (DOS) is a spectral property of materials, which provides fundamental insights on various characteristics of materials. In this paper, we propose a model to predict the DOS by reflecting the nature of DOS: DOS determines the general distribution of states as a function of energy. Specifically, we integrate the heterogeneous information obtained from the crystal structure and the energies via multi-modal transformer, thereby modeling the complex relationships between the atoms in the crystal structure, and various energy levels. Extensive experiments on two types of DOS, i.e., Phonon DOS and Electron DOS, with various real-world scenarios demonstrate the superiority of DOSTransformer. The source code for DOSTransformer is available at https://github.com/HeewoongNoh/DOSTransformer.}, + pubstate = {preprint}, + keywords = {/unread,Computer Science - Machine Learning,Condensed Matter - Materials Science,Physics - Computational Physics}, + file = {/Users/wasmer/Zotero/storage/DG95U3QT/Lee et al. - 2023 - Predicting Density of States via Multi-modal Trans.pdf;/Users/wasmer/Zotero/storage/JTRZEV35/2303.html} +} + +@article{leeTopologicalQuantumMaterials2019, + title = {Topological {{Quantum Materials}} for {{Realizing Majorana Quasiparticles}}}, + author = {Lee, Stephen R. and Sharma, Peter A. and Lima-Sharma, Ana L. and Pan, Wei and Nenoff, Tina M.}, + date = {2019-01-08}, + journaltitle = {Chemistry of Materials}, + shortjournal = {Chem. Mater.}, + volume = {31}, + number = {1}, + pages = {26--51}, + publisher = {{American Chemical Society}}, + issn = {0897-4756}, + doi = {10.1021/acs.chemmater.8b04383}, + url = {https://doi.org/10.1021/acs.chemmater.8b04383}, + urldate = {2023-08-24}, + abstract = {In the past decade, basic physics, chemistry, and materials science research on topological quantum materials—and their potential use to implement reliable quantum computers—has rapidly expanded to become a major endeavor. A pivotal goal of this research has been to realize materials hosting Majorana quasiparticles, thereby making topological quantum computing a technological reality. While this goal remains elusive, recent data-mining studies, performed using topological quantum chemistry methodologies, have identified thousands of potential topological materials—some, and perhaps many, with potential for hosting Majoranas. We write this Review for advanced materials researchers who are interested in joining this expanding search, but who are not currently specialists in topology. The first half of the Review addresses, in readily understood terms, three main areas associated with topological sciences: (1) a description of topological quantum materials and how they enable quantum computing; (2) an explanation of Majorana quasiparticles, the important topologically endowed properties, and how it arises quantum mechanically; and (3) a description of the basic classes of topological materials where Majoranas might be found. The second half of the Review details selected materials systems where intense research efforts are underway to demonstrate nontrivial topological phenomena in the search for Majoranas. Specific materials reviewed include the groups II–V semiconductors (Cd3As2), the layered chalcogenides (MX2, ZrTe5), and the rare-earth pyrochlore iridates (A2Ir2O7, A = Eu, Pr). In each case, we describe crystallographic structures, bulk phase diagrams, materials synthesis methods (bulk, thin film, and/or nanowire forms), methods used to characterize topological phenomena, and potential evidence for the existence of Majorana quasiparticles.}, + keywords = {/unread,educational,Majorana,MZM,quantum computing,review,todo-tagging,topological,topological insulator}, + file = {/Users/wasmer/Nextcloud/Zotero/Lee et al_2019_Topological Quantum Materials for Realizing Majorana Quasiparticles.pdf;/Users/wasmer/Zotero/storage/MTIT5MI6/acs.chemmater.html} +} + @article{lehtolaAssessmentInitialGuesses2019, title = {Assessment of {{Initial Guesses}} for {{Self-Consistent Field Calculations}}. {{Superposition}} of {{Atomic Potentials}}: {{Simple}} yet {{Efficient}}}, shorttitle = {Assessment of {{Initial Guesses}} for {{Self-Consistent Field Calculations}}. {{Superposition}} of {{Atomic Potentials}}}, @@ -6660,6 +8680,40 @@ file = {/Users/wasmer/Nextcloud/Zotero/Lejaeghere et al_2014_Error Estimates for Solid-State Density-Functional Theory Predictions.pdf;/Users/wasmer/Zotero/storage/92BC3LBZ/10408436.2013.html} } +@article{lejaeghereReproducibilityDensityFunctional2016, + title = {Reproducibility in Density Functional Theory Calculations of Solids}, + author = {Lejaeghere, Kurt and Bihlmayer, Gustav and Björkman, Torbjörn and Blaha, Peter and Blügel, Stefan and Blum, Volker and Caliste, Damien and Castelli, Ivano E. and Clark, Stewart J. and Dal Corso, Andrea and family=Gironcoli, given=Stefano, prefix=de, useprefix=true and Deutsch, Thierry and Dewhurst, John Kay and Di Marco, Igor and Draxl, Claudia and DuÅ‚ak, Marcin and Eriksson, Olle and Flores-Livas, José A. and Garrity, Kevin F. and Genovese, Luigi and Giannozzi, Paolo and Giantomassi, Matteo and Goedecker, Stefan and Gonze, Xavier and GrÃ¥näs, Oscar and Gross, E. K. U. and Gulans, Andris and Gygi, François and Hamann, D. R. and Hasnip, Phil J. and Holzwarth, N. A. W. and IuÅŸan, Diana and Jochym, Dominik B. and Jollet, François and Jones, Daniel and Kresse, Georg and Koepernik, Klaus and Küçükbenli, Emine and Kvashnin, Yaroslav O. and Locht, Inka L. M. and Lubeck, Sven and Marsman, Martijn and Marzari, Nicola and Nitzsche, Ulrike and Nordström, Lars and Ozaki, Taisuke and Paulatto, Lorenzo and Pickard, Chris J. and Poelmans, Ward and Probert, Matt I. J. and Refson, Keith and Richter, Manuel and Rignanese, Gian-Marco and Saha, Santanu and Scheffler, Matthias and Schlipf, Martin and Schwarz, Karlheinz and Sharma, Sangeeta and Tavazza, Francesca and Thunström, Patrik and Tkatchenko, Alexandre and Torrent, Marc and Vanderbilt, David and family=Setten, given=Michiel J., prefix=van, useprefix=true and Van Speybroeck, Veronique and Wills, John M. and Yates, Jonathan R. and Zhang, Guo-Xu and Cottenier, Stefaan}, + date = {2016-03-25}, + journaltitle = {Science}, + volume = {351}, + number = {6280}, + pages = {aad3000}, + publisher = {{American Association for the Advancement of Science}}, + doi = {10.1126/science.aad3000}, + url = {https://www.science.org/doi/full/10.1126/science.aad3000}, + urldate = {2023-09-21}, + abstract = {The widespread popularity of density functional theory has given rise to an extensive range of dedicated codes for predicting molecular and crystalline properties. However, each code implements the formalism in a different way, raising questions about the reproducibility of such predictions. We report the results of a community-wide effort that compared 15 solid-state codes, using 40 different potentials or basis set types, to assess the quality of the Perdew-Burke-Ernzerhof equations of state for 71 elemental crystals. We conclude that predictions from recent codes and pseudopotentials agree very well, with pairwise differences that are comparable to those between different high-precision experiments. Older methods, however, have less precise agreement. Our benchmark provides a framework for users and developers to document the precision of new applications and methodological improvements.}, + keywords = {/unread,DFT,reproducibility}, + file = {/Users/wasmer/Zotero/storage/9ZLZBQ2M/Lejaeghere et al. - 2016 - Reproducibility in density functional theory calcu.pdf} +} + +@online{lessigAtmoRepStochasticModel2023, + title = {{{AtmoRep}}: {{A}} Stochastic Model of Atmosphere Dynamics Using Large Scale Representation Learning}, + shorttitle = {{{AtmoRep}}}, + author = {Lessig, Christian and Luise, Ilaria and Gong, Bing and Langguth, Michael and Stadtler, Scarlet and Schultz, Martin}, + date = {2023-09-07}, + eprint = {2308.13280}, + eprinttype = {arxiv}, + eprintclass = {physics}, + doi = {10.48550/arXiv.2308.13280}, + url = {http://arxiv.org/abs/2308.13280}, + urldate = {2023-11-12}, + abstract = {The atmosphere affects humans in a multitude of ways, from loss of life due to adverse weather effects to long-term social and economic impacts on societies. Computer simulations of atmospheric dynamics are, therefore, of great importance for the well-being of our and future generations. Here, we propose AtmoRep, a novel, task-independent stochastic computer model of atmospheric dynamics that can provide skillful results for a wide range of applications. AtmoRep uses large-scale representation learning from artificial intelligence to determine a general description of the highly complex, stochastic dynamics of the atmosphere from the best available estimate of the system's historical trajectory as constrained by observations. This is enabled by a novel self-supervised learning objective and a unique ensemble that samples from the stochastic model with a variability informed by the one in the historical record. The task-independent nature of AtmoRep enables skillful results for a diverse set of applications without specifically training for them and we demonstrate this for nowcasting, temporal interpolation, model correction, and counterfactuals. We also show that AtmoRep can be improved with additional data, for example radar observations, and that it can be extended to tasks such as downscaling. Our work establishes that large-scale neural networks can provide skillful, task-independent models of atmospheric dynamics. With this, they provide a novel means to make the large record of atmospheric observations accessible for applications and for scientific inquiry, complementing existing simulations based on first principles.}, + pubstate = {preprint}, + keywords = {AI4Science,atmospheric physics,dynamical systems,foundation models,FZJ,generative models,JSC,masked token model,multiformer,multimodal input,pretrained models,representation learning,self-attention,SSL,stochastic modeling,transformer,weather forecasting}, + file = {/Users/wasmer/Nextcloud/Zotero/Lessig et al_2023_AtmoRep.pdf;/Users/wasmer/Zotero/storage/27DMQXFS/2308.html} +} + @unpublished{lewisLearningElectronDensities2021, title = {Learning Electron Densities in the Condensed-Phase}, author = {Lewis, Alan M. and Grisafi, Andrea and Ceriotti, Michele and Rossi, Mariana}, @@ -6726,6 +8780,23 @@ file = {/Users/wasmer/Nextcloud/Zotero/Liao_Smidt_2023_Equiformer.pdf;/Users/wasmer/Zotero/storage/33AF9I34/2206.html} } +@online{liaoEquiformerV2ImprovedEquivariant2023, + title = {{{EquiformerV2}}: {{Improved Equivariant Transformer}} for {{Scaling}} to {{Higher-Degree Representations}}}, + shorttitle = {{{EquiformerV2}}}, + author = {Liao, Yi-Lun and Wood, Brandon and Das, Abhishek and Smidt, Tess}, + date = {2023-06-21}, + eprint = {2306.12059}, + eprinttype = {arxiv}, + eprintclass = {physics}, + doi = {10.48550/arXiv.2306.12059}, + url = {http://arxiv.org/abs/2306.12059}, + urldate = {2023-08-19}, + abstract = {Equivariant Transformers such as Equiformer have demonstrated the efficacy of applying Transformers to the domain of 3D atomistic systems. However, they are still limited to small degrees of equivariant representations due to their computational complexity. In this paper, we investigate whether these architectures can scale well to higher degrees. Starting from Equiformer, we first replace \$SO(3)\$ convolutions with eSCN convolutions to efficiently incorporate higher-degree tensors. Then, to better leverage the power of higher degrees, we propose three architectural improvements -- attention re-normalization, separable \$S\^2\$ activation and separable layer normalization. Putting this all together, we propose EquiformerV2, which outperforms previous state-of-the-art methods on the large-scale OC20 dataset by up to \$12\textbackslash\%\$ on forces, \$4\textbackslash\%\$ on energies, offers better speed-accuracy trade-offs, and \$2\textbackslash times\$ reduction in DFT calculations needed for computing adsorption energies.}, + pubstate = {preprint}, + keywords = {todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Liao et al_2023_EquiformerV2.pdf;/Users/wasmer/Zotero/storage/MHXITTSP/2306.html} +} + @article{liCriticalExaminationRobustness2023, title = {A Critical Examination of Robustness and Generalizability of Machine Learning Prediction of Materials Properties}, author = {Li, Kangming and DeCost, Brian and Choudhary, Kamal and Greenwood, Michael and Hattrick-Simpers, Jason}, @@ -6822,6 +8893,24 @@ file = {/Users/wasmer/Zotero/storage/9B88LYEZ/S0743731520303464.html} } +@article{liechtensteinLocalSpinDensity1987, + title = {Local Spin Density Functional Approach to the Theory of Exchange Interactions in Ferromagnetic Metals and Alloys}, + author = {Liechtenstein, A. I. and Katsnelson, M. I. and Antropov, V. P. and Gubanov, V. A.}, + date = {1987-05-01}, + journaltitle = {Journal of Magnetism and Magnetic Materials}, + shortjournal = {Journal of Magnetism and Magnetic Materials}, + volume = {67}, + number = {1}, + pages = {65--74}, + issn = {0304-8853}, + doi = {10.1016/0304-8853(87)90721-9}, + url = {https://www.sciencedirect.com/science/article/pii/0304885387907219}, + urldate = {2023-09-20}, + abstract = {Rigorous expressions for the exchange parameters of classical Heisenberg model applied to crystals are obtained using a local spin density functional (LSDF) approach and KKR-Green functions formalism. The spin wave stiffness constant and Curie temperature (Tc) of ferromagnetic metals are obtained without any model assumptions as to the character of exchange interactions. The concentration dependence of Tc for binary ferromagnetic alloys is investigated in the framework of the single-site CPA-theory. The corresponding calculations are carried out for simple metals Fe, Ni and disordered Niî—¸Pd alloys.}, + keywords = {alloys,calculation of Jij,CPA,Curie temperature,DFT,DFT theory,disordered,Ferromagnetism,infinitesimal rotation,Jij,KKR,KKR foundations,magnetism,original publication,physics}, + file = {/Users/wasmer/Zotero/storage/F46FQTHF/Liechtenstein et al. - 1987 - Local spin density functional approach to the theo.pdf;/Users/wasmer/Zotero/storage/23L5VB4T/0304885387907219.html} +} + @article{liKohnShamEquationsRegularizer2021, title = {Kohn-{{Sham Equations}} as {{Regularizer}}: {{Building Prior Knowledge}} into {{Machine-Learned Physics}}}, shorttitle = {Kohn-{{Sham Equations}} as {{Regularizer}}}, @@ -6890,6 +8979,58 @@ file = {/Users/wasmer/Nextcloud/Zotero/Lindmaa_2017_Theoretical prediction of properties of atomistic systems.pdf;/Users/wasmer/Zotero/storage/TVX96NQ7/record.html} } +@online{linEfficientApproximationsComplete2023, + title = {Efficient {{Approximations}} of {{Complete Interatomic Potentials}} for {{Crystal Property Prediction}}}, + author = {Lin, Yuchao and Yan, Keqiang and Luo, Youzhi and Liu, Yi and Qian, Xiaoning and Ji, Shuiwang}, + date = {2023-08-01}, + eprint = {2306.10045}, + eprinttype = {arxiv}, + eprintclass = {physics}, + doi = {10.48550/arXiv.2306.10045}, + url = {http://arxiv.org/abs/2306.10045}, + urldate = {2023-08-19}, + abstract = {We study property prediction for crystal materials. A crystal structure consists of a minimal unit cell that is repeated infinitely in 3D space. How to accurately represent such repetitive structures in machine learning models remains unresolved. Current methods construct graphs by establishing edges only between nearby nodes, thereby failing to faithfully capture infinite repeating patterns and distant interatomic interactions. In this work, we propose several innovations to overcome these limitations. First, we propose to model physics-principled interatomic potentials directly instead of only using distances as in many existing methods. These potentials include the Coulomb potential, London dispersion potential, and Pauli repulsion potential. Second, we model the complete set of potentials among all atoms, instead of only between nearby atoms as in existing methods. This is enabled by our approximations of infinite potential summations with provable error bounds. We further develop efficient algorithms to compute the approximations. Finally, we propose to incorporate our computations of complete interatomic potentials into message passing neural networks for representation learning. We perform experiments on the JARVIS and Materials Project benchmarks for evaluation. Results show that the use of interatomic potentials and complete interatomic potentials leads to consistent performance improvements with reasonable computational costs. Our code is publicly available as part of the AIRS library (https://github.com/divelab/AIRS/tree/main/OpenMat/PotNet).}, + pubstate = {preprint}, + keywords = {todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Lin et al_2023_Efficient Approximations of Complete Interatomic Potentials for Crystal.pdf;/Users/wasmer/Zotero/storage/E3N59FIA/2306.html} +} + +@article{lingHighDimensionalMaterialsProcess2017, + title = {High-{{Dimensional Materials}} and {{Process Optimization Using Data-Driven Experimental Design}} with {{Well-Calibrated Uncertainty Estimates}}}, + author = {Ling, Julia and Hutchinson, Maxwell and Antono, Erin and Paradiso, Sean and Meredig, Bryce}, + date = {2017-09-01}, + journaltitle = {Integrating Materials and Manufacturing Innovation}, + shortjournal = {Integr Mater Manuf Innov}, + volume = {6}, + number = {3}, + pages = {207--217}, + issn = {2193-9772}, + doi = {10.1007/s40192-017-0098-z}, + url = {https://doi.org/10.1007/s40192-017-0098-z}, + urldate = {2023-08-19}, + abstract = {The optimization of composition and processing to obtain materials that exhibit desirable characteristics has historically relied on a combination of domain knowledge, trial and error, and luck. We propose a methodology that can accelerate this process by fitting data-driven models to experimental data as it is collected to suggest which experiment should be performed next. This methodology can guide the practitioner to test the most promising candidates earlier and can supplement scientific and engineering intuition with data-driven insights. A key strength of the proposed framework is that it scales to high-dimensional parameter spaces, as are typical in materials discovery applications. Importantly, the data-driven models incorporate uncertainty analysis, so that new experiments are proposed based on a combination of exploring high-uncertainty candidates and exploiting high-performing regions of parameter space. Over four materials science test cases, our methodology led to the optimal candidate being found with three times fewer required measurements than random guessing on average.}, + langid = {english}, + keywords = {/unread,Active learning,Experimental design,Machine learning,Sequential design,Uncertainty quantification}, + file = {/Users/wasmer/Nextcloud/Zotero/Ling et al_2017_High-Dimensional Materials and Process Optimization Using Data-Driven.pdf} +} + +@article{linNumericalMethodsKohn2019, + title = {Numerical Methods for {{Kohn}}–{{Sham}} Density Functional Theory}, + author = {Lin, Lin and Lu, Jianfeng and Ying, Lexing}, + date = {2019-06-14}, + journaltitle = {Acta Numerica}, + volume = {28}, + pages = {405--539}, + publisher = {{Cambridge University Press}}, + issn = {0962-4929, 1474-0508}, + doi = {10.1017/S0962492919000047}, + url = {https://www.cambridge.org/core/journals/acta-numerica/article/numerical-methods-for-kohnsham-density-functional-theory/755DFB88349DD5F1EE1E360AD61661BF}, + urldate = {2023-09-24}, + abstract = {Kohn–Sham density functional theory (DFT) is the most widely used electronic structure theory. Despite significant progress in the past few decades, the numerical solution of Kohn–Sham DFT problems remains challenging, especially for large-scale systems. In this paper we review the basics as well as state-of-the-art numerical methods, and focus on the unique numerical challenges of DFT.}, + langid = {english}, + file = {/Users/wasmer/Zotero/storage/9Z7XGB5F/Lin et al. - 2019 - Numerical methods for Kohn–Sham density functional.pdf} +} + @online{liptonTroublingTrendsMachine2018, title = {Troubling {{Trends}} in {{Machine Learning Scholarship}}}, author = {Lipton, Zachary C. and Steinhardt, Jacob}, @@ -6902,10 +9043,26 @@ urldate = {2022-06-27}, abstract = {Collectively, machine learning (ML) researchers are engaged in the creation and dissemination of knowledge about data-driven algorithms. In a given paper, researchers might aspire to any subset of the following goals, among others: to theoretically characterize what is learnable, to obtain understanding through empirically rigorous experiments, or to build a working system that has high predictive accuracy. While determining which knowledge warrants inquiry may be subjective, once the topic is fixed, papers are most valuable to the community when they act in service of the reader, creating foundational knowledge and communicating as clearly as possible. Recent progress in machine learning comes despite frequent departures from these ideals. In this paper, we focus on the following four patterns that appear to us to be trending in ML scholarship: (i) failure to distinguish between explanation and speculation; (ii) failure to identify the sources of empirical gains, e.g., emphasizing unnecessary modifications to neural architectures when gains actually stem from hyper-parameter tuning; (iii) mathiness: the use of mathematics that obfuscates or impresses rather than clarifies, e.g., by confusing technical and non-technical concepts; and (iv) misuse of language, e.g., by choosing terms of art with colloquial connotations or by overloading established technical terms. While the causes behind these patterns are uncertain, possibilities include the rapid expansion of the community, the consequent thinness of the reviewer pool, and the often-misaligned incentives between scholarship and short-term measures of success (e.g., bibliometrics, attention, and entrepreneurial opportunity). While each pattern offers a corresponding remedy (don't do it), we also discuss some speculative suggestions for how the community might combat these trends.}, pubstate = {preprint}, - keywords = {criticism,ML,research ethics,state of a field}, + keywords = {criticism,ML,research ethics,skepticism,state of a field}, file = {/Users/wasmer/Nextcloud/Zotero/Lipton_Steinhardt_2018_Troubling Trends in Machine Learning Scholarship.pdf;/Users/wasmer/Zotero/storage/HK89ZR8C/1807.html} } +@online{liSelfconsistentGradientlikeEigen2022, + title = {Self-Consistent {{Gradient-like Eigen Decomposition}} in {{Solving Schr}}\textbackslash "odinger {{Equations}}}, + author = {Li, Xihan and Chen, Xiang and Tutunov, Rasul and Bou-Ammar, Haitham and Wang, Lei and Wang, Jun}, + date = {2022-02-02}, + eprint = {2202.01388}, + eprinttype = {arxiv}, + eprintclass = {quant-ph}, + doi = {10.48550/arXiv.2202.01388}, + url = {http://arxiv.org/abs/2202.01388}, + urldate = {2023-09-23}, + abstract = {The Schr\textbackslash "odinger equation is at the heart of modern quantum mechanics. Since exact solutions of the ground state are typically intractable, standard approaches approximate Schr\textbackslash "odinger equation as forms of nonlinear generalized eigenvalue problems \$F(V)V = SV\textbackslash Lambda\$ in which \$F(V)\$, the matrix to be decomposed, is a function of its own top-\$k\$ smallest eigenvectors \$V\$, leading to a "self-consistency problem". Traditional iterative methods heavily rely on high-quality initial guesses of \$V\$ generated via domain-specific heuristics methods based on quantum mechanics. In this work, we eliminate such a need for domain-specific heuristics by presenting a novel framework, Self-consistent Gradient-like Eigen Decomposition (SCGLED) that regards \$F(V)\$ as a special "online data generator", thus allows gradient-like eigendecomposition methods in streaming \$k\$-PCA to approach the self-consistency of the equation from scratch in an iterative way similar to online learning. With several critical numerical improvements, SCGLED is robust to initial guesses, free of quantum-mechanism-based heuristics designs, and neat in implementation. Our experiments show that it not only can simply replace traditional heuristics-based initial guess methods with large performance advantage (achieved averagely 25x more precise than the best baseline in similar wall time), but also is capable of finding highly precise solutions independently without any traditional iterative methods.}, + pubstate = {preprint}, + keywords = {/unread,alternative approaches,eigendecomposition,generalized eigenvalue problem,incremental algorithms,iterative algorithms,numerical linear algebra,online learning,optimization,SCF,Schrödinger equation,streaming algorithm,streaming algorithms}, + file = {/Users/wasmer/Zotero/storage/DHLPNXT3/Li et al. - 2022 - Self-consistent Gradient-like Eigen Decomposition .pdf;/Users/wasmer/Zotero/storage/TAQH9FC5/2202.html} +} + @article{liuDensityEstimationUsing2021, title = {Density Estimation Using Deep Generative Neural Networks}, author = {Liu, Qiao and Xu, Jiaze and Jiang, Rui and Wong, Wing Hung}, @@ -6922,6 +9079,61 @@ file = {/Users/wasmer/Nextcloud/Zotero/Liu et al_2021_Density estimation using deep generative neural networks.pdf} } +@article{liuDIGTurnkeyLibrary2021, + title = {{{DIG}}: {{A Turnkey Library}} for {{Diving}} into {{Graph Deep Learning Research}}}, + shorttitle = {{{DIG}}}, + author = {Liu, Meng and Luo, Youzhi and Wang, Limei and Xie, Yaochen and Yuan, Hao and Gui, Shurui and Yu, Haiyang and Xu, Zhao and Zhang, Jingtun and Liu, Yi and Yan, Keqiang and Liu, Haoran and Fu, Cong and Oztekin, Bora M. and Zhang, Xuan and Ji, Shuiwang}, + date = {2021}, + journaltitle = {Journal of Machine Learning Research}, + volume = {22}, + number = {240}, + pages = {1--9}, + issn = {1533-7928}, + url = {http://jmlr.org/papers/v22/21-0343.html}, + urldate = {2023-10-13}, + abstract = {Although there exist several libraries for deep learning on graphs, they are aiming at implementing basic operations for graph deep learning. In the research community, implementing and benchmarking various advanced tasks are still painful and time-consuming with existing libraries. To facilitate graph deep learning research, we introduce DIG: Dive into Graphs, a turnkey library that provides a unified testbed for higher level, research-oriented graph deep learning tasks. Currently, we consider graph generation, self-supervised learning on graphs, explainability of graph neural networks, and deep learning on 3D graphs. For each direction, we provide unified implementations of data interfaces, common algorithms, and evaluation metrics. Altogether, DIG is an extensible, open-source, and turnkey library for researchers to develop new methods and effortlessly compare with common baselines using widely used datasets and evaluation metrics. Source code is available at https://github.com/divelab/DIG.}, + keywords = {\_tablet,alternative approaches,AML,DimeNet,General ML,geometric deep learning,GNN,GNNExplainer,graph generation,graph ML,library,LIFT,MD17,ML,MPNN,PyG,PyTorch,QM9,SchNet,SphereNet,SSL,with-code,XAI}, + file = {/Users/wasmer/Nextcloud/Zotero/Liu et al_2021_DIG.pdf;/Users/wasmer/Zotero/storage/P9LJWCS3/DIG.html} +} + +@article{liuFullpotentialApproachRelativistic2016, + title = {A Full-Potential Approach to the Relativistic Single-Site {{Green}}’s Function}, + author = {Liu, Xianglin and Wang, Yang and Eisenbach, Markus and Stocks, G. Malcolm}, + date = {2016-07}, + journaltitle = {Journal of Physics: Condensed Matter}, + shortjournal = {J. Phys.: Condens. Matter}, + volume = {28}, + number = {35}, + pages = {355501}, + publisher = {{IOP Publishing}}, + issn = {0953-8984}, + doi = {10.1088/0953-8984/28/35/355501}, + url = {https://dx.doi.org/10.1088/0953-8984/28/35/355501}, + urldate = {2023-09-19}, + abstract = {One major purpose of studying the single-site scattering problem is to obtain the scattering matrices and differential equation solutions indispensable to multiple scattering theory (MST) calculations. On the other hand, the single-site scattering itself is also appealing because it reveals the physical environment experienced by electrons around the scattering center. In this paper we demonstrate a new formalism to calculate the relativistic full-potential single-site Green’s function. We implement this method to calculate the single-site density of states and electron charge densities. The code is rigorously tested and with the help of Krein’s theorem, the relativistic effects and full potential effects in group V elements and noble metals are thoroughly investigated.}, + langid = {english}, + keywords = {/unread,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Liu et al_2016_A full-potential approach to the relativistic single-site Green’s function.pdf} +} + +@article{liuFullyrelativisticFullpotentialMultiple2018, + title = {Fully-Relativistic Full-Potential Multiple Scattering Theory: {{A}} Pathology-Free Scheme}, + shorttitle = {Fully-Relativistic Full-Potential Multiple Scattering Theory}, + author = {Liu, Xianglin and Wang, Yang and Eisenbach, Markus and Stocks, G. Malcolm}, + date = {2018-03-01}, + journaltitle = {Computer Physics Communications}, + shortjournal = {Computer Physics Communications}, + volume = {224}, + pages = {265--272}, + issn = {0010-4655}, + doi = {10.1016/j.cpc.2017.10.011}, + url = {https://www.sciencedirect.com/science/article/pii/S001046551730351X}, + urldate = {2023-09-19}, + abstract = {The Green function plays an essential role in the Korringa–Kohn–Rostoker(KKR) multiple scattering method. In practice, it is constructed from the regular and irregular solutions of the local Kohn–Sham equation and robust methods exist for spherical potentials. However, when applied to a non-spherical potential, numerical errors from the irregular solutions give rise to pathological behaviors of the charge density at small radius. Here we present a full-potential implementation of the fully-relativistic KKR method to perform ab initio self-consistent calculation by directly solving the Dirac differential equations using the generalized variable phase (sine and cosine matrices) formalism Liu et~al. (2016). The pathology around the origin is completely eliminated by carrying out the energy integration of the single-site Green function along the real axis. By using an efficient pole-searching technique to identify the zeros of the well-behaved Jost matrices, we demonstrated that this scheme is numerically stable and computationally efficient, with speed comparable to the conventional contour energy integration method, while free of the pathology problem of the charge density. As an application, this method is utilized to investigate the crystal structures of polonium and their bulk properties, which is challenging for a conventional real-energy scheme. The noble metals are also calculated, both as a test of our method and to study the relativistic effects.}, + keywords = {/unread,DFT,Dirac equation,full-potential,full-relativistic,KKR,Multiple scattering theory}, + file = {/Users/wasmer/Nextcloud/Zotero/Liu et al_2018_Fully-relativistic full-potential multiple scattering theory.pdf} +} + @article{liuImprovingPerformanceLongRangeCorrected2017, title = {Improving the {{Performance}} of {{Long-Range-Corrected Exchange-Correlation Functional}} with an {{Embedded Neural Network}}}, author = {Liu, Qin and Wang, JingChun and Du, PengLi and Hu, LiHong and Zheng, Xiao and Chen, GuanHua}, @@ -6955,10 +9167,61 @@ file = {/Users/wasmer/Zotero/storage/JX4G37PC/5368343.html} } -@article{liuMagneticTopologicalInsulator, - title = {Magnetic {{Topological Insulator Heterostructures}}: {{A Review}}}, - shorttitle = {Magnetic {{Topological Insulator Heterostructures}}}, - author = {Liu, Jieyi and Hesjedal, Thorsten}, +@online{liuMachineLearningEffective2019, + title = {Machine {{Learning}} the {{Effective Hamiltonian}} in {{High Entropy Alloys}}}, + author = {Liu, Xianglin and Zhang, Jiaxin and Eisenbach, Markus and Wang, Yang}, + date = {2019-12-31}, + eprint = {1912.13460}, + eprinttype = {arxiv}, + eprintclass = {cond-mat, physics:physics}, + doi = {10.48550/arXiv.1912.13460}, + url = {http://arxiv.org/abs/1912.13460}, + urldate = {2023-09-19}, + abstract = {The development of machine learning sheds new light on the problem of statistical thermodynamics in multicomponent alloys. However, a data-driven approach to construct the effective Hamiltonian requires sufficiently large data sets, which is expensive to calculate with conventional density functional theory (DFT). To solve this problem, we propose to use the atomic local energy as the target variable, and harness the power of the linear-scaling DFT to accelerate the data generating process. Using the large amounts of DFT data sets, various complex models are devised and applied to learn the effective Hamiltonians of a range of refractory high entropy alloys (HEAs). The testing \$R\^2\$ scores of the effective pair interaction model are higher than 0.99, demonstrating that the pair interactions within the 6-th coordination shell provide an excellent description of the atomic local energies for all the four HEAs. This model is further improved by including nonlinear and multi-site interactions. In particular, the deep neural networks (DNNs) built directly in the local configuration space (therefore no hand-crafted features) are employed to model the effective Hamiltonian. The results demonstrate that neural networks are promising for the modeling of effective Hamiltonian due to its excellent representation power.}, + pubstate = {preprint}, + keywords = {/unread,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Liu et al_2019_Machine Learning the Effective Hamiltonian in High Entropy Alloys.pdf;/Users/wasmer/Zotero/storage/GU92DRKM/1912.html} +} + +@article{liuMachineLearningHighentropy2023, + title = {Machine Learning for High-Entropy Alloys: {{Progress}}, Challenges and Opportunities}, + shorttitle = {Machine Learning for High-Entropy Alloys}, + author = {Liu, Xianglin and Zhang, Jiaxin and Pei, Zongrui}, + date = {2023-01-01}, + journaltitle = {Progress in Materials Science}, + shortjournal = {Progress in Materials Science}, + volume = {131}, + pages = {101018}, + issn = {0079-6425}, + doi = {10.1016/j.pmatsci.2022.101018}, + url = {https://www.sciencedirect.com/science/article/pii/S0079642522000998}, + urldate = {2023-10-08}, + abstract = {High-entropy alloys (HEAs) have attracted extensive interest due to their exceptional mechanical properties and the vast compositional space for new HEAs. However, understanding their novel physical mechanisms and then using these mechanisms to design new HEAs are confronted with their high-dimensional chemical complexity, which presents unique challenges to (i) the theoretical modeling that needs accurate atomic interactions for atomistic simulations and (ii) constructing reliable macro-scale models for high-throughput screening of vast amounts of candidate alloys. Machine learning (ML) sheds light on these problems with its capability to represent extremely complex relations. This review highlights the success and promising future of utilizing ML to overcome these challenges. We first introduce the basics of ML algorithms and application scenarios. We then summarize the state-of-the-art ML models describing atomic interactions and atomistic simulations of thermodynamic and mechanical properties. Special attention is paid to phase predictions, planar-defect calculations, and plastic deformation simulations. Next, we review ML models for macro-scale properties, such as lattice structures, phase formations, and mechanical properties. Examples of machine-learned phase-formation rules and order parameters are used to illustrate the workflow. Finally, we discuss the remaining challenges and present an outlook of research directions, including uncertainty quantification and ML-guided inverse materials design.}, + keywords = {/unread,alloys,AML,HEA,materials,ML,review,review-of-AML,transition metals}, + file = {/Users/wasmer/Nextcloud/Zotero/Liu et al_2023_Machine learning for high-entropy alloys.pdf;/Users/wasmer/Zotero/storage/BTPAY2N6/S0079642522000998.html} +} + +@online{liuMachineLearningModeling2019, + title = {Machine Learning Modeling of High Entropy Alloy: The Role of Short-Range Order}, + shorttitle = {Machine Learning Modeling of High Entropy Alloy}, + author = {Liu, Xianglin and Zhang, Jiaxin and Eisenbach, Markus and Wang, Yang}, + date = {2019-06-07}, + eprint = {1906.02889}, + eprinttype = {arxiv}, + eprintclass = {cond-mat, physics:physics}, + doi = {10.48550/arXiv.1906.02889}, + url = {http://arxiv.org/abs/1906.02889}, + urldate = {2023-09-19}, + abstract = {The development of machine learning sheds new light on the traditionally complicated problem of thermodynamics in multicomponent alloys. Successful application of such a method, however, strongly depends on the quality of the data and model. Here we propose a scheme to improve the representativeness of the data by utilizing the short-range order (SRO) parameters to survey the configuration space. Using the improved data, a pair interaction model is trained for the NbMoTaW high entropy alloy using linear regression. Benefiting from the physics incorporated into the model, the learned effective Hamiltonian demonstrates excellent predictability over the whole configuration space. By including pair interactions within the 6th nearest-neighbor shell, this model achieves an \$R\^2\$ testing score of 0.997 and root mean square error of 0.43 meV. We further perform a detailed analysis on the effects of training data, testing data, and model parameters. The results reveal the vital importance of representative data and physical model. On the other hand, we also examined the performance neural networks, which is found to demonstrate a strong tendency to overfit the data.}, + pubstate = {preprint}, + keywords = {/unread,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Liu et al_2019_Machine learning modeling of high entropy alloy.pdf;/Users/wasmer/Zotero/storage/RIJJA86L/1906.html} +} + +@article{liuMagneticTopologicalInsulator, + title = {Magnetic {{Topological Insulator Heterostructures}}: {{A Review}}}, + shorttitle = {Magnetic {{Topological Insulator Heterostructures}}}, + author = {Liu, Jieyi and Hesjedal, Thorsten}, journaltitle = {Advanced Materials}, volume = {n/a}, number = {n/a}, @@ -7127,10 +9390,30 @@ isbn = {978-3-89336-501-2}, langid = {english}, pagetotal = {189}, - keywords = {Dissertation (Univ.),Hochschulschrift,juKKR,KKR,magnetism,PGI-1/IAS-1,thesis}, + keywords = {\_tablet,Dissertation (Univ.),Hochschulschrift,juKKR,KKR,magnetism,PGI-1/IAS-1,thesis}, file = {/Users/wasmer/Nextcloud/Zotero/Lounis_2007_Theory of Magnetic Transition Metal Nanoclusters on Surfaces.pdf} } +@article{luDeepXDEDeepLearning2021, + title = {{{DeepXDE}}: {{A Deep Learning Library}} for {{Solving Differential Equations}}}, + shorttitle = {{{DeepXDE}}}, + author = {Lu, Lu and Meng, Xuhui and Mao, Zhiping and Karniadakis, George Em}, + date = {2021-01}, + journaltitle = {SIAM Review}, + shortjournal = {SIAM Rev.}, + volume = {63}, + number = {1}, + pages = {208--228}, + publisher = {{Society for Industrial and Applied Mathematics}}, + issn = {0036-1445}, + doi = {10.1137/19M1274067}, + url = {https://epubs.siam.org/doi/10.1137/19M1274067}, + urldate = {2023-11-11}, + abstract = {In this paper, we propose a personalized dialogue generation system, which combines reinforcement learning techniques with an attention-based hierarchical recurrent encoderdecoder model. Firstly, we incorporate user-specific information into the decoder to capture user's background information and speaking style. Secondly, we employ reinforcement learning techniques to maximize future reward in dialogue, which enables our system to generate topic-coherent, informative and grammatical responses. Moreover, we propose three types of rewards to characterize good conversations. Finally, we compare the performance of the following reinforcement learning methods in dialogue generation: policy gradient, Q-learning, and actor-critic algorithms. We conduct experiments to verify the effectiveness of the proposed model on two dialogue datasets. Experimental results demonstrate that our model can generate better personalized dialogues for different users. Quantitatively, our method achieves better performance than the state-of-the-art dialogue systems in terms of BLEU score, perplexity, and human evaluation.}, + keywords = {/unread,library,neural operator,PDE,physics-informed ML,PINN,PyTorch,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Lu et al_2021_DeepXDE.pdf} +} + @article{luhBOUNDSTATESUPERCONDUCTORS1965, title = {BOUND STATE IN SUPERCONDUCTORS WITH PARAMAGNETIC IMPURITIES}, author = {Luh, Yu}, @@ -7189,6 +9472,70 @@ file = {/Users/wasmer/Nextcloud/Zotero/Lunghi_Sanvito_2019_Surfing multiple conformation-property landscapes via machine learning.pdf;/Users/wasmer/Zotero/storage/FQSQYUBP/1911.html} } +@inproceedings{lupopasiniFastAccuratePredictions2022, + title = {Fast and {{Accurate Predictions}} of {{Total Energy}} for {{Solid Solution Alloys}} with {{Graph Convolutional Neural Networks}}}, + booktitle = {Driving {{Scientific}} and {{Engineering Discoveries Through}} the {{Integration}} of {{Experiment}}, {{Big Data}}, and {{Modeling}} and {{Simulation}}}, + author = {Lupo Pasini, Massimiliano and Burc̆ul, Marko and Reeve, Samuel Temple and Eisenbach, Markus and Perotto, Simona}, + editor = {Nichols, Jeffrey and Maccabe, Arthur ‘Barney’ and Nutaro, James and Pophale, Swaroop and Devineni, Pravallika and Ahearn, Theresa and Verastegui, Becky}, + date = {2022}, + series = {Communications in {{Computer}} and {{Information Science}}}, + pages = {79--98}, + publisher = {{Springer International Publishing}}, + location = {{Cham}}, + doi = {10.1007/978-3-030-96498-6_5}, + abstract = {We use graph convolutional neural networks (GCNNs) to produce fast and accurate predictions of the total energy of solid solution binary alloys. GCNNs allow us to abstract the lattice structure of a solid material as a graph, whereby atoms are modeled as nodes and metallic bonds as edges. This representation naturally incorporates information about the structure of the material, thereby eliminating the need for computationally expensive data pre-processing which would be required with standard neural network (NN) approaches. We train GCNNs on ab-initio density functional theory (DFT) for copper-gold (CuAu) and iron-platinum (FePt) data that has been generated by running the LSMS-3 code, which implements a locally self-consistent multiple scattering method, on OLCF supercomputers Titan and Summit. GCNN outperforms the ab-initio DFT simulation by orders of magnitude in terms of computational time to produce the estimate of the total energy for a given atomic configuration of the lattice structure. We compare the predictive performance of GCNN models against a standard NN such as dense feedforward multi-layer perceptron (MLP) by using the root-mean-squared errors to quantify the predictive quality of the deep learning (DL) models. We find that the attainable accuracy of GCNNs is at least an order of magnitude better than that of the MLP.}, + isbn = {978-3-030-96498-6}, + langid = {english}, + keywords = {/unread,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Lupo Pasini et al_2022_Fast and Accurate Predictions of Total Energy for Solid Solution Alloys with.pdf} +} + +@article{lupopasiniGraphNeuralNetworks2023, + title = {Graph Neural Networks Predict Energetic and Mechanical Properties for Models of Solid Solution Metal Alloy Phases}, + author = {Lupo Pasini, Massimiliano and Jung, Gang Seob and Irle, Stephan}, + date = {2023-05-01}, + journaltitle = {Computational Materials Science}, + shortjournal = {Computational Materials Science}, + volume = {224}, + pages = {112141}, + issn = {0927-0256}, + doi = {10.1016/j.commatsci.2023.112141}, + url = {https://www.sciencedirect.com/science/article/pii/S0927025623001350}, + urldate = {2023-09-19}, + abstract = {We developed a PyTorch-based architecture called HydraGNN that implements graph convolutional neural networks (GCNNs) to predict the formation energy and the bulk modulus for models of solid solution alloys for various atomic crystal structures and relaxed volumes. We trained the GCNN surrogate model on a dataset for nickel–niobium (NiNb) generated by the embedded atom model (EAM) empirical interatomic potential for demonstration purposes. The dataset was generated by calculating the formation energy and the bulk modulus as a prototypical elastic property for optimized geometries starting from initial body-centered cubic (BCC), face-centered cubic (FCC), and hexagonal compact packed (HCP) crystal structures, with configurations spanning the possible compositional range for each of the three types of initial crystal structures. Numerical results show that the GCNN model effectively predicts both the formation energy and the bulk modulus as function of the optimized crystal structure, relaxed volume, and configurational entropy of the model structures for solid solution alloys.}, + keywords = {AML,GNN,HydraGNN,ML,multi-task learning,prediction of energy,prediction of mechanical properties}, + file = {/Users/wasmer/Zotero/storage/UCX4RJQN/S0927025623001350.html} +} + +@software{lupopasiniHydraGNN2021, + title = {{{HydraGNN}}}, + author = {Lupo Pasini, Massimiliano and Reeve, Samuel Temple and Zhang, Pei and Choi, Jong Youl and Lupo Pasini, Massimiliano and Reeve, Samuel Temple and Zhang, Pei and Choi, Jong Youl}, + date = {2021-10-19}, + doi = {10.11578/dc.20211019.2}, + url = {https://www.osti.gov/biblio/1826659}, + urldate = {2023-09-19}, + abstract = {Distributed PyTorch implementation of multi-headed graph convolutional neural networks}, + organization = {{Oak Ridge National Laboratory (ORNL), Oak Ridge, TN (United States)}}, + keywords = {/unread,AML,GNN,HydraGNN,library,ML,multi-task learning,PyTorch} +} + +@article{lupopasiniScalableAlgorithmOptimization2021, + title = {A Scalable Algorithm for the Optimization of Neural Network Architectures}, + author = {Lupo Pasini, Massimiliano and Yin, Junqi and Li, Ying Wai and Eisenbach, Markus}, + date = {2021-07-01}, + journaltitle = {Parallel Computing}, + shortjournal = {Parallel Computing}, + volume = {104--105}, + pages = {102788}, + issn = {0167-8191}, + doi = {10.1016/j.parco.2021.102788}, + url = {https://www.sciencedirect.com/science/article/pii/S0167819121000430}, + urldate = {2023-09-19}, + abstract = {We propose a new scalable method to optimize the architecture of an artificial neural network. The proposed algorithm, called Greedy Search for Neural Network Architecture, aims to determine a neural network with minimal number of layers that is at least as performant as neural networks of the same structure identified by other hyperparameter search algorithms in terms of accuracy and computational cost. Numerical results performed on benchmark datasets show that, for these datasets, our method outperforms state-of-the-art hyperparameter optimization algorithms in terms of attainable predictive performance by the selected neural network architecture, and time-to-solution for the hyperparameter optimization to complete.}, + keywords = {/unread,General ML,hyperparameters optimization,ML,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Lupo Pasini et al_2021_A scalable algorithm for the optimization of neural network architectures.pdf} +} + @online{lysogorskiyActiveLearningStrategies2022, title = {Active Learning Strategies for Atomic Cluster Expansion Models}, author = {Lysogorskiy, Yury and Bochkarev, Anton and Mrovec, Matous and Drautz, Ralf}, @@ -7254,7 +9601,7 @@ abstract = {Self-driving labs (SDLs) leverage combinations of artificial intelligence, automation, and advanced computing to accelerate scientific discovery. The promise of this field has given rise to a rich community of passionate scientists, engineers, and social scientists, as evidenced by the development of the Acceleration Consortium and recent Accelerate Conference. Despite its strengths, this rapidly developing field presents numerous opportunities for growth, challenges to overcome, and potential risks of which to remain aware. This community perspective builds on a discourse instantiated during the first Accelerate Conference, and looks to the future of self-driving labs with a tempered optimism. Incorporating input from academia, government, and industry, we briefly describe the current status of self-driving labs, then turn our attention to barriers, opportunities, and a vision for what is possible. Our field is delivering solutions in technology and infrastructure, artificial intelligence and knowledge generation, and education and workforce development. In the spirit of community, we intend for this work to foster discussion and drive best practices as our field grows.}, langid = {english}, organization = {{arXiv.org}}, - keywords = {/unread}, + keywords = {todo-tagging}, file = {/Users/wasmer/Nextcloud/Zotero/Maffettone et al_2023_What is missing in autonomous discovery.pdf} } @@ -7281,7 +9628,7 @@ url = {https://link.springer.com/book/10.1007/978-3-642-84411-9}, urldate = {2022-06-18}, langid = {english}, - keywords = {condensed matter,defects,DFT,magnetism}, + keywords = {\_tablet,condensed matter,defects,DFT,magnetism}, file = {/Users/wasmer/Nextcloud/Zotero/Magnetism and the Electronic Structure of Crystals.pdf;/Users/wasmer/Zotero/storage/QVJRNHRA/978-3-642-84411-9.html} } @@ -7312,10 +9659,52 @@ urldate = {2023-06-12}, abstract = {Topological superconductors (TSCs) have garnered significant research and industry attention in the past two decades. By hosting Majorana bound states which can be used as qubits that are robust against local perturbations, TSCs offer a promising platform toward (non-universal) topological quantum computation. However, there has been a scarcity of TSC candidates, and the experimental signatures that identify a TSC are often elusive. In this perspective, after a short review of the TSC basics and theories, we provide an overview of the TSC materials candidates, including natural compounds and synthetic material systems. We further introduce various experimental techniques to probe TSC, focusing on how a system is identified as a TSC candidate, and why a conclusive answer is often challenging to draw. We conclude by calling for new experimental signatures and stronger computational support to accelerate the search for new TSC candidates.}, pubstate = {preprint}, - keywords = {/unread,materials,perspective,physics,superconductor,Topological Superconductor}, + keywords = {materials,perspective,physics,superconductor,Topological Superconductor}, file = {/Users/wasmer/Zotero/storage/4J3A87J3/Mandal et al. - 2023 - Topological superconductors from a materials persp.pdf;/Users/wasmer/Zotero/storage/L8FFWWRW/2303.html} } +@article{manicaAcceleratingMaterialDesign2023, + title = {Accelerating Material Design with the Generative Toolkit for Scientific Discovery}, + author = {Manica, Matteo and Born, Jannis and Cadow, Joris and Christofidellis, Dimitrios and Dave, Ashish and Clarke, Dean and Teukam, Yves Gaetan Nana and Giannone, Giorgio and Hoffman, Samuel C. and Buchan, Matthew and Chenthamarakshan, Vijil and Donovan, Timothy and Hsu, Hsiang Han and Zipoli, Federico and Schilter, Oliver and Kishimoto, Akihiro and Hamada, Lisa and Padhi, Inkit and Wehden, Karl and McHugh, Lauren and Khrabrov, Alexy and Das, Payel and Takeda, Seiji and Smith, John R.}, + date = {2023-05-01}, + journaltitle = {npj Computational Materials}, + shortjournal = {npj Comput Mater}, + volume = {9}, + number = {1}, + pages = {1--6}, + publisher = {{Nature Publishing Group}}, + issn = {2057-3960}, + doi = {10.1038/s41524-023-01028-1}, + url = {https://www.nature.com/articles/s41524-023-01028-1}, + urldate = {2023-08-22}, + abstract = {With the growing availability of data within various scientific domains, generative models hold enormous potential to accelerate scientific discovery. They harness powerful representations learned from datasets to speed up the formulation of novel hypotheses with the potential to impact material discovery broadly. We present the Generative Toolkit for Scientific Discovery (GT4SD). This extensible open-source library enables scientists, developers, and researchers to train and use state-of-the-art generative models to accelerate scientific discovery focused on organic material design.}, + issue = {1}, + langid = {english}, + keywords = {/unread,chemistry,generative models,library,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Manica et al_2023_Accelerating material design with the generative toolkit for scientific.pdf} +} + +@article{manicaAcceleratingMaterialDesign2023a, + title = {Accelerating Material Design with the Generative Toolkit for Scientific Discovery}, + author = {Manica, Matteo and Born, Jannis and Cadow, Joris and Christofidellis, Dimitrios and Dave, Ashish and Clarke, Dean and Teukam, Yves Gaetan Nana and Giannone, Giorgio and Hoffman, Samuel C. and Buchan, Matthew and Chenthamarakshan, Vijil and Donovan, Timothy and Hsu, Hsiang Han and Zipoli, Federico and Schilter, Oliver and Kishimoto, Akihiro and Hamada, Lisa and Padhi, Inkit and Wehden, Karl and McHugh, Lauren and Khrabrov, Alexy and Das, Payel and Takeda, Seiji and Smith, John R.}, + date = {2023-05-01}, + journaltitle = {npj Computational Materials}, + shortjournal = {npj Comput Mater}, + volume = {9}, + number = {1}, + pages = {1--6}, + publisher = {{Nature Publishing Group}}, + issn = {2057-3960}, + doi = {10.1038/s41524-023-01028-1}, + url = {https://www.nature.com/articles/s41524-023-01028-1}, + urldate = {2023-10-08}, + abstract = {With the growing availability of data within various scientific domains, generative models hold enormous potential to accelerate scientific discovery. They harness powerful representations learned from datasets to speed up the formulation of novel hypotheses with the potential to impact material discovery broadly. We present the Generative Toolkit for Scientific Discovery (GT4SD). This extensible open-source library enables scientists, developers, and researchers to train and use state-of-the-art generative models to accelerate scientific discovery focused on organic material design.}, + issue = {1}, + langid = {english}, + keywords = {/unread,AML,generative models,library,ML,pretrained models,with-code,with-demo}, + file = {/Users/wasmer/Nextcloud/Zotero/Manica et al_2023_Accelerating material design with the generative toolkit for scientific2.pdf} +} + @article{manzoorMachineLearningBased2021, title = {Machine {{Learning Based Methodology}} to {{Predict Point Defect Energies}} in {{Multi-Principal Element Alloys}}}, author = {Manzoor, Anus and Arora, Gaurav and Jerome, Bryant and Linton, Nathan and Norman, Bailey and Aidhy, Dilpuneet S.}, @@ -7330,6 +9719,26 @@ file = {/Users/wasmer/Nextcloud/Zotero/Manzoor et al_2021_Machine Learning Based Methodology to Predict Point Defect Energies in.pdf} } +@article{marblestoneUnblockResearchBottlenecks2022, + title = {Unblock Research Bottlenecks with Non-Profit Start-Ups}, + author = {Marblestone, Adam and Gamick, Anastasia and Kalil, Tom and Martin, Cheryl and Cvitkovic, Milan and Rodriques, Samuel G.}, + date = {2022-01}, + journaltitle = {Nature}, + volume = {601}, + number = {7892}, + pages = {188--190}, + publisher = {{Nature Publishing Group}}, + doi = {10.1038/d41586-022-00018-5}, + url = {https://www.nature.com/articles/d41586-022-00018-5}, + urldate = {2023-08-21}, + abstract = {‘Focused research organizations’ can take on mid-scale projects that don’t get tackled by academia, venture capitalists or government labs.}, + issue = {7892}, + langid = {english}, + keywords = {/unread,Institutions,Pasteur \& ISI,Research management,todo-tagging}, + annotation = {Bandiera\_abtest: a Cg\_type: Comment Subject\_term: Research management, Institutions}, + file = {/Users/wasmer/Nextcloud/Zotero/Marblestone et al_2022_Unblock research bottlenecks with non-profit start-ups.pdf;/Users/wasmer/Zotero/storage/95Z9E64E/d41586-022-00018-5.html} +} + @article{margrafPureNonlocalMachinelearned2021, title = {Pure Non-Local Machine-Learned Density Functional Theory for Electron Correlation}, author = {Margraf, Johannes T. and Reuter, Karsten}, @@ -7386,6 +9795,40 @@ file = {/Users/wasmer/Nextcloud/Zotero/false;/Users/wasmer/Nextcloud/Zotero/Martin_2020_Electronic Structure.pdf;/Users/wasmer/Zotero/storage/PUPKBCZR/ED0FF348536BFFE8899627C8F78FEE6A.html} } +@article{martinez-carracedoElectricallyDrivenSinglettriplet2023, + title = {Electrically Driven Singlet-Triplet Transition in Triangulene Spin-1 Chains}, + author = {MartÃnez-Carracedo, Gabriel and Oroszlány, László and GarcÃa-Fuente, Amador and Szunyogh, László and Ferrer, Jaime}, + date = {2023-01-25}, + journaltitle = {Physical Review B}, + shortjournal = {Phys. Rev. B}, + volume = {107}, + number = {3}, + pages = {035432}, + publisher = {{American Physical Society}}, + doi = {10.1103/PhysRevB.107.035432}, + url = {https://link.aps.org/doi/10.1103/PhysRevB.107.035432}, + urldate = {2023-09-20}, + abstract = {Recently, graphene triangulene chains have been synthesized, and their magnetic response has been analyzed by scanning tunneling microscopy methods by Mishra et al. [Nature (London) 598, 287 (2021)]. Motivated by this study, we determine the exchange bilinear and biquadratic constants of the triangulene chains by calculating two-spin rotations in the spirit of the magnetic force theorem. We then analyze open-ended, odd-numbered chains, whose edge states pair up forming a triplet ground state. We propose three experimental approaches that enable us to trigger and control a singlet-triplet spin transition. Two of these methods are based on applying a mechanical distortion to the chain. We finally show that the transition can be controlled efficiently by the application of an electric field.}, + keywords = {Budapest KKR group,DFT,GF2023 workshop,Jij,SIESTA,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/MartÃnez-Carracedo et al_2023_Electrically driven singlet-triplet transition in triangulene spin-1 chains.pdf;/Users/wasmer/Zotero/storage/QKMHR94B/PhysRevB.107.html} +} + +@online{martinez-carracedoRelativisticMagneticInteractions2023, + title = {Relativistic Magnetic Interactions from Non-Orthogonal Basis Sets}, + author = {MartÃnez-Carracedo, Gabriel and Oroszlány, László and GarcÃa-Fuente, Amador and Nyári, Bendegúz and Udvardi, László and Szunyogh, László and Ferrer, Jaime}, + date = {2023-09-05}, + eprint = {2309.02558}, + eprinttype = {arxiv}, + eprintclass = {cond-mat}, + doi = {10.48550/arXiv.2309.02558}, + url = {http://arxiv.org/abs/2309.02558}, + urldate = {2023-09-20}, + abstract = {We propose a method to determine the magnetic exchange interaction and on-site anisotropy tensors of extended Heisenberg spin models from density functional theory including relativistic effects. The method is based on the Liechtenstein-Katsnelson-Antropov-Gubanov torque formalism, whereby energy variations upon infinitesimal rotations are performed. We assume that the Kohn-Sham Hamiltonian is expanded in a non-orthogonal basis set of pseudo-atomic orbitals. We define local operators that are both hermitian and satisfy relevant sum rules. We demonstrate that in the presence of spin-orbit coupling a correct mapping from the density functional total energy to a spin model that relies on the rotation of the exchange field part of the Hamiltonian can not be accounted for by transforming the full Hamiltonian. We derive a set of sum rules that pose stringent validity tests on any specific calculation. We showcase the flexibility and accuracy of the method by computing the exchange and anisotropy tensors of both well-studied magnetic nanostructures and of recently synthesized two-dimensional magnets. Specifically, we benchmark our approach against the established Korringa-Kohn-Rostoker Green's function method and show that they agree well. Finally, we demonstrate how the application of biaxial strain on the two-dimensional magnet T-CrTe2 can trigger a magnetic phase transition.}, + pubstate = {preprint}, + keywords = {/unread,Budapest KKR group,DFT,GF2023 workshop,Jij,SIESTA,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/MartÃnez-Carracedo et al_2023_Relativistic magnetic interactions from non-orthogonal basis sets.pdf;/Users/wasmer/Zotero/storage/8KP4SZA4/2309.html} +} + @book{martinInteractingElectronsTheory2016, title = {Interacting {{Electrons}}: {{Theory}} and {{Computational Approaches}}}, shorttitle = {Interacting {{Electrons}}}, @@ -7398,7 +9841,6 @@ urldate = {2023-07-04}, abstract = {Recent progress in the theory and computation of electronic structure is bringing an unprecedented level of capability for research. Many-body methods are becoming essential tools vital for quantitative calculations and understanding materials phenomena in physics, chemistry, materials science and other fields. This book provides a unified exposition of the most-used tools: many-body perturbation theory, dynamical mean field theory and quantum Monte Carlo simulations. Each topic is introduced with a less technical overview for a broad readership, followed by in-depth descriptions and mathematical formulation. Practical guidelines, illustrations and exercises are chosen to enable readers to appreciate the complementary approaches, their relationships, and the advantages and disadvantages of each method. This book is designed for graduate students and researchers who want to use and understand these advanced computational tools, get a broad overview, and acquire a basis for participating in new developments.}, isbn = {978-0-521-87150-1}, - keywords = {/unread}, file = {/Users/wasmer/Nextcloud/Zotero/Martin et al_2016_Interacting Electrons.pdf;/Users/wasmer/Zotero/storage/2VUQIE7U/4317C43D0531C900920E83DD4632CFE9.html} } @@ -7431,6 +9873,32 @@ file = {/Users/wasmer/Nextcloud/Zotero/Marzari et al_2021_Electronic-structure methods for materials design.pdf;/Users/wasmer/Zotero/storage/AKF7QEMC/s41563-021-01013-3.html} } +@misc{MAterialsDesignEXascale, + title = {{{MAterials}} Design at the {{eXascale}} | {{MaX Project}} | {{Fact Sheet}} | {{HORIZON}}}, + url = {https://cordis.europa.eu/project/id/101093374}, + urldate = {2023-10-08}, + abstract = {Understanding, predicting, and discovering the properties and performance of materials is key to delivering the technologies that power our economy and provide a sustainable development to our society. For this reason, materials simulations have become one of the most...}, + langid = {english}, + keywords = {/unread,FLEUR,grant,Horizon Europe,JuDFT,PGI-1/IAS-1}, + file = {/Users/wasmer/Zotero/storage/TVLGP47F/101093374.html} +} + +@online{maTransferableMachineLearning2023, + title = {Transferable {{Machine Learning Approach}} for {{Predicting Electronic Structures}} of {{Charged Defects}}}, + author = {Ma, Yuxing and Zhong, Yang and Hongyu, Yu and Chen, Shiyou and Xiang, Hongjun}, + date = {2023-06-13}, + eprint = {2306.08017}, + eprinttype = {arxiv}, + eprintclass = {cond-mat, physics:physics}, + doi = {10.48550/arXiv.2306.08017}, + url = {http://arxiv.org/abs/2306.08017}, + urldate = {2023-10-13}, + abstract = {The study of the electronic properties of charged defects is crucial for our understanding of various electrical properties of materials. However, the high computational cost of density functional theory (DFT) hinders the research on large defect models. In this study, we present an E(3) equivariant graph neural network framework (HamGNN-Q), which can predict the tight-binding Hamiltonian matrices for various defect types with different charges using only one set of network parameters. By incorporating features of background charge into the element representation, HamGNN-Q enables a direct mapping from structure and background charge to the electronic Hamiltonian matrix of charged defect systems without DFT calculation. We demonstrate the model's high precision and transferability through testing on GaAs systems with various charged defect configurations. Our approach provides a practical solution for accelerating charged defect electronic structure calculations and advancing the design of materials with tailored electronic properties.}, + pubstate = {preprint}, + keywords = {AML,defects,disordered,ML,ML-DFT,ML-ESM,point defects,prediction of Hamiltonian matrix}, + file = {/Users/wasmer/Nextcloud/Zotero/Ma et al_2023_Transferable Machine Learning Approach for Predicting Electronic Structures of.pdf;/Users/wasmer/Zotero/storage/TICQMBV5/2306.html} +} + @inproceedings{mavropoulosKorringaKohnRostokerKKRGreen2006, title = {The {{Korringa-Kohn-Rostoker}} ({{KKR}}) {{Green}} Function Method {{I}}. {{Electronic}} Structure of Periodic Systems}, booktitle = {Computational {{Nanoscience}}: {{Do It Yourself}}! - {{Lecture Notes}}}, @@ -7506,6 +9974,23 @@ file = {/Users/wasmer/Nextcloud/Zotero/Mehta et al_2019_A high-bias, low-variance introduction to Machine Learning for physicists.pdf} } +@online{mellorNeuralArchitectureSearch2021, + title = {Neural {{Architecture Search}} without {{Training}}}, + author = {Mellor, Joseph and Turner, Jack and Storkey, Amos and Crowley, Elliot J.}, + date = {2021-06-11}, + eprint = {2006.04647}, + eprinttype = {arxiv}, + eprintclass = {cs, stat}, + doi = {10.48550/arXiv.2006.04647}, + url = {http://arxiv.org/abs/2006.04647}, + urldate = {2023-10-05}, + abstract = {The time and effort involved in hand-designing deep neural networks is immense. This has prompted the development of Neural Architecture Search (NAS) techniques to automate this design. However, NAS algorithms tend to be slow and expensive; they need to train vast numbers of candidate networks to inform the search process. This could be alleviated if we could partially predict a network's trained accuracy from its initial state. In this work, we examine the overlap of activations between datapoints in untrained networks and motivate how this can give a measure which is usefully indicative of a network's trained performance. We incorporate this measure into a simple algorithm that allows us to search for powerful networks without any training in a matter of seconds on a single GPU, and verify its effectiveness on NAS-Bench-101, NAS-Bench-201, NATS-Bench, and Network Design Spaces. Our approach can be readily combined with more expensive search methods; we examine a simple adaptation of regularised evolutionary search. Code for reproducing our experiments is available at https://github.com/BayesWatch/nas-without-training.}, + pubstate = {preprint}, + version = {3}, + keywords = {autoML,Deep learning,General ML,hyperparameters,hyperparameters optimization,library,MALA,ML,NN,NN architecture,PyTorch,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Mellor et al_2021_Neural Architecture Search without Training.pdf;/Users/wasmer/Zotero/storage/NKM9IWFY/2006.html} +} + @article{meredigCanMachineLearning2018, title = {Can Machine Learning Identify the next High-Temperature Superconductor? {{Examining}} Extrapolation Performance for Materials Discovery}, shorttitle = {Can Machine Learning Identify the next High-Temperature Superconductor?}, @@ -7526,6 +10011,24 @@ file = {/Users/wasmer/Nextcloud/Zotero/Meredig et al_2018_Can machine learning identify the next high-temperature superconductor.pdf;/Users/wasmer/Zotero/storage/9WFQM4EG/c8me00012c.html} } +@article{meredigFiveHighImpactResearch2019, + title = {Five {{High-Impact Research Areas}} in {{Machine Learning}} for {{Materials Science}}}, + author = {Meredig, Bryce}, + date = {2019-12-10}, + journaltitle = {Chemistry of Materials}, + shortjournal = {Chem. Mater.}, + volume = {31}, + number = {23}, + pages = {9579--9581}, + publisher = {{American Chemical Society}}, + issn = {0897-4756}, + doi = {10.1021/acs.chemmater.9b04078}, + url = {https://doi.org/10.1021/acs.chemmater.9b04078}, + urldate = {2023-08-19}, + keywords = {Citrine Informatics,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Meredig_2019_Five High-Impact Research Areas in Machine Learning for Materials Science.pdf;/Users/wasmer/Zotero/storage/ZZ9VLFUX/acs.chemmater.html} +} + @article{merkerMachineLearningMagnetism2022, title = {Machine Learning Magnetism Classifiers from Atomic Coordinates}, author = {Merker, Helena A. and Heiberger, Harry and Nguyen, Linh and Liu, Tongtong and Chen, Zhantao and Andrejevic, Nina and Drucker, Nathan C. and Okabe, Ryotaro and Kim, Song Eun and Wang, Yao and Smidt, Tess and Li, Mingda}, @@ -7564,6 +10067,23 @@ file = {/Users/wasmer/Nextcloud/Zotero/Merkys et al_2017_A posteriori metadata from automated provenance tracking.pdf;/Users/wasmer/Zotero/storage/9ZIMVPJ8/s13321-017-0242-y.html} } +@online{metzVeLOTrainingVersatile2022, + title = {{{VeLO}}: {{Training Versatile Learned Optimizers}} by {{Scaling Up}}}, + shorttitle = {{{VeLO}}}, + author = {Metz, Luke and Harrison, James and Freeman, C. Daniel and Merchant, Amil and Beyer, Lucas and Bradbury, James and Agrawal, Naman and Poole, Ben and Mordatch, Igor and Roberts, Adam and Sohl-Dickstein, Jascha}, + date = {2022-11-17}, + eprint = {2211.09760}, + eprinttype = {arxiv}, + eprintclass = {cs, math, stat}, + doi = {10.48550/arXiv.2211.09760}, + url = {http://arxiv.org/abs/2211.09760}, + urldate = {2023-07-21}, + abstract = {While deep learning models have replaced hand-designed features across many domains, these models are still trained with hand-designed optimizers. In this work, we leverage the same scaling approach behind the success of deep learning to learn versatile optimizers. We train an optimizer for deep learning which is itself a small neural network that ingests gradients and outputs parameter updates. Meta-trained with approximately four thousand TPU-months of compute on a wide variety of optimization tasks, our optimizer not only exhibits compelling performance, but optimizes in interesting and unexpected ways. It requires no hyperparameter tuning, instead automatically adapting to the specifics of the problem being optimized. We open source our learned optimizer, meta-training code, the associated train and test data, and an extensive optimizer benchmark suite with baselines at velo-code.github.io.}, + pubstate = {preprint}, + keywords = {autoML,general ML,Google,hyperparameters,hyperparameters optimization,meta-training,ML,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Metz et al_2022_VeLO.pdf;/Users/wasmer/Zotero/storage/82NVCST9/2211.html} +} + @article{microsoftquantumInAsAlHybridDevices2023, title = {{{InAs-Al}} Hybrid Devices Passing the Topological Gap Protocol}, author = {{Microsoft Quantum} and Aghaee, Morteza and Akkala, Arun and Alam, Zulfi and Ali, Rizwan and Alcaraz Ramirez, Alejandro and Andrzejczuk, Mariusz and Antipov, Andrey E. and Aseev, Pavel and Astafev, Mikhail and Bauer, Bela and Becker, Jonathan and Boddapati, Srini and Boekhout, Frenk and Bommer, Jouri and Bosma, Tom and Bourdet, Leo and Boutin, Samuel and Caroff, Philippe and Casparis, Lucas and Cassidy, Maja and Chatoor, Sohail and Christensen, Anna Wulf and Clay, Noah and Cole, William S. and Corsetti, Fabiano and Cui, Ajuan and Dalampiras, Paschalis and Dokania, Anand and family=Lange, given=Gijs, prefix=de, useprefix=true and family=Moor, given=Michiel, prefix=de, useprefix=true and Estrada Saldaña, Juan Carlos and Fallahi, Saeed and Fathabad, Zahra Heidarnia and Gamble, John and Gardner, Geoff and Govender, Deshan and Griggio, Flavio and Grigoryan, Ruben and Gronin, Sergei and Gukelberger, Jan and Hansen, Esben Bork and Heedt, Sebastian and Herranz Zamorano, Jesús and Ho, Samantha and Holgaard, Ulrik Laurens and Ingerslev, Henrik and Johansson, Linda and Jones, Jeffrey and Kallaher, Ray and Karimi, Farhad and Karzig, Torsten and King, Cameron and Kloster, Maren Elisabeth and Knapp, Christina and Kocon, Dariusz and Koski, Jonne and Kostamo, Pasi and Krogstrup, Peter and Kumar, Mahesh and Laeven, Tom and Larsen, Thorvald and Li, Kongyi and Lindemann, Tyler and Love, Julie and Lutchyn, Roman and Madsen, Morten Hannibal and Manfra, Michael and Markussen, Signe and Martinez, Esteban and McNeil, Robert and Memisevic, Elvedin and Morgan, Trevor and Mullally, Andrew and Nayak, Chetan and Nielsen, Jens and Nielsen, William Hvidtfelt Padkær and Nijholt, Bas and Nurmohamed, Anne and O'Farrell, Eoin and Otani, Keita and Pauka, Sebastian and Petersson, Karl and Petit, Luca and Pikulin, Dmitry I. and Preiss, Frank and Quintero-Perez, Marina and Rajpalke, Mohana and Rasmussen, Katrine and Razmadze, Davydas and Reentila, Outi and Reilly, David and Rouse, Richard and Sadovskyy, Ivan and Sainiemi, Lauri and Schreppler, Sydney and Sidorkin, Vadim and Singh, Amrita and Singh, Shilpi and Sinha, Sarat and Sohr, Patrick and StankeviÄ, TomaÅ¡ and Stek, Lieuwe and Suominen, Henri and Suter, Judith and Svidenko, Vicky and Teicher, Sam and Temuerhan, Mine and Thiyagarajah, Nivetha and Tholapi, Raj and Thomas, Mason and Toomey, Emily and Upadhyay, Shivendra and Urban, Ivan and VaitiekÄ—nas, Saulius and Van Hoogdalem, Kevin and Van Woerkom, David and Viazmitinov, Dmitrii V. and Vogel, Dominik and Waddy, Steven and Watson, John and Weston, Joseph and Winkler, Georg W. and Yang, Chung Kai and Yau, Sean and Yi, Daniel and Yucelen, Emrah and Webster, Alex and Zeisel, Roland and Zhao, Ruichen}, @@ -7661,6 +10181,24 @@ file = {/Users/wasmer/Zotero/storage/8VNJWN2F/global.html} } +@article{mooreTopologicalInvariantsTimereversalinvariant2007, + title = {Topological Invariants of Time-Reversal-Invariant Band Structures}, + author = {Moore, J. E. and Balents, L.}, + date = {2007-03-26}, + journaltitle = {Physical Review B}, + shortjournal = {Phys. Rev. B}, + volume = {75}, + number = {12}, + pages = {121306}, + publisher = {{American Physical Society}}, + doi = {10.1103/PhysRevB.75.121306}, + url = {https://link.aps.org/doi/10.1103/PhysRevB.75.121306}, + urldate = {2023-07-12}, + abstract = {The topological invariants of a time-reversal-invariant band structure in two dimensions are multiple copies of the Z2 invariant found by Kane and Mele. Such invariants protect the “topological insulator†phase and give rise to a spin Hall effect carried by edge states. Each pair of bands related by time reversal is described by one Z2 invariant, up to one less than half the dimension of the Bloch Hamiltonians. In three dimensions, there are four such invariants per band pair. The Z2 invariants of a crystal determine the transitions between ordinary and topological insulators as its bands are occupied by electrons. We derive these invariants using maps from the Brillouin zone to the space of Bloch Hamiltonians and clarify the connections between Z2 invariants, the integer invariants that underlie the integer quantum Hall effect, and previous invariants of T-invariant Fermi systems.}, + keywords = {/unread,Hall effect,Hall SHE,invariance,original publication,quantum materials,topological,topological insulator,TRS}, + file = {/Users/wasmer/Zotero/storage/QCK3M8Q5/Moore and Balents - 2007 - Topological invariants of time-reversal-invariant .pdf;/Users/wasmer/Zotero/storage/9LV63QTR/PhysRevB.75.html} +} + @article{morawietzDensityFunctionalTheoryBasedNeural2013, title = {A {{Density-Functional Theory-Based Neural Network Potential}} for {{Water Clusters Including}} van Der {{Waals Corrections}}}, author = {Morawietz, Tobias and Behler, Jörg}, @@ -7747,6 +10285,25 @@ file = {/Users/wasmer/Nextcloud/Zotero/Morgenstern et al_2021_Strong and Weak 3D Topological Insulators Probed by Surface Science Methods.pdf;/Users/wasmer/Zotero/storage/4RCNJ2RK/pssb.html} } +@article{moriartyUnlockNNUncertaintyQuantification2022, + title = {{{UnlockNN}}: {{Uncertainty}} Quantification for Neural Network Models of Chemical Systems}, + shorttitle = {{{UnlockNN}}}, + author = {Moriarty, Alexander and Morita, Kazuki and Butler, Keith T. and Walsh, Aron}, + date = {2022-07-05}, + journaltitle = {Journal of Open Source Software}, + volume = {7}, + number = {75}, + pages = {3700}, + issn = {2475-9066}, + doi = {10.21105/joss.03700}, + url = {https://joss.theoj.org/papers/10.21105/joss.03700}, + urldate = {2023-08-25}, + abstract = {Moriarty et al., (2022). UnlockNN: Uncertainty quantification for neural network models of chemical systems. Journal of Open Source Software, 7(75), 3700, https://doi.org/10.21105/joss.03700}, + langid = {english}, + keywords = {todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Moriarty et al_2022_UnlockNN.pdf} +} + @online{morrowHowValidateMachinelearned2022, title = {How to Validate Machine-Learned Interatomic Potentials}, author = {Morrow, Joe D. and Gardner, John L. A. and Deringer, Volker L.}, @@ -7763,6 +10320,61 @@ file = {/Users/wasmer/Nextcloud/Zotero/Morrow et al_2022_How to validate machine-learned interatomic potentials.pdf;/Users/wasmer/Zotero/storage/TW3TCHB3/2211.html} } +@article{mosquera-loisIdentifyingGroundState2023, + title = {Identifying the Ground State Structures of Point Defects in Solids}, + author = {Mosquera-Lois, Irea and Kavanagh, Seán R. and Walsh, Aron and Scanlon, David O.}, + date = {2023-02-17}, + journaltitle = {npj Computational Materials}, + shortjournal = {npj Comput Mater}, + volume = {9}, + number = {1}, + pages = {1--11}, + publisher = {{Nature Publishing Group}}, + issn = {2057-3960}, + doi = {10.1038/s41524-023-00973-1}, + url = {https://www.nature.com/articles/s41524-023-00973-1}, + urldate = {2023-08-25}, + abstract = {Point defects are a universal feature of crystals. Their identification is addressed by combining experimental measurements with theoretical models. The standard modelling approach is, however, prone to missing the ground state atomic configurations associated with energy-lowering reconstructions from the idealised crystallographic environment. Missed ground states compromise the accuracy of calculated properties. To address this issue, we report an approach to navigate the defect configurational landscape using targeted bond distortions and rattling. Application of our workflow to eight materials (CdTe, GaAs, Sb2S3, Sb2Se3, CeO2, In2O3, ZnO, anatase-TiO2) reveals symmetry breaking in each host crystal that is not found via conventional local minimisation techniques. The point defect distortions are classified by the associated physico-chemical factors. We demonstrate the impact of these defect distortions on derived properties, including formation energies, concentrations and charge transition levels. Our work presents a step forward for quantitative modelling of imperfect solids.}, + issue = {1}, + langid = {english}, + keywords = {/unread,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Mosquera-Lois et al_2023_Identifying the ground state structures of point defects in solids.pdf} +} + +@article{mosquera-loisImperfectionsAreNot2023, + title = {Imperfections Are Not 0 {{K}}: Free Energy of Point Defects in Crystals}, + shorttitle = {Imperfections Are Not 0 {{K}}}, + author = {Mosquera-Lois, Irea and R.~Kavanagh, Seán and Klarbring, Johan and Tolborg, Kasper and Walsh, Aron}, + date = {2023}, + journaltitle = {Chemical Society Reviews}, + publisher = {{Royal Society of Chemistry}}, + doi = {10.1039/D3CS00432E}, + url = {https://pubs.rsc.org/en/content/articlelanding/2023/cs/d3cs00432e}, + urldate = {2023-08-25}, + langid = {english}, + keywords = {/unread,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Mosquera-Lois et al_2023_Imperfections are not 0 K.pdf} +} + +@article{mosquera-loisShakeNBreakNavigatingDefect2022, + title = {{{ShakeNBreak}}: {{Navigating}} the Defect Configurational Landscape}, + shorttitle = {{{ShakeNBreak}}}, + author = {Mosquera-Lois, Irea and Kavanagh, Seán R. and Walsh, Aron and Scanlon, David O.}, + date = {2022-12-01}, + journaltitle = {Journal of Open Source Software}, + volume = {7}, + number = {80}, + pages = {4817}, + issn = {2475-9066}, + doi = {10.21105/joss.04817}, + url = {https://joss.theoj.org/papers/10.21105/joss.04817}, + urldate = {2023-08-25}, + abstract = {Mosquera-Lois et al., (2022). ShakeNBreak: Navigating the defect configurational landscape. Journal of Open Source Software, 7(80), 4817, https://doi.org/10.21105/joss.04817}, + langid = {english}, + keywords = {/unread,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Mosquera-Lois et al_2022_ShakeNBreak.pdf} +} + @thesis{mozumderDesignMagneticInteractions2022, type = {mathesis}, title = {Design of Magnetic Interactions in Doped Topological Insulators}, @@ -7776,18 +10388,55 @@ file = {/Users/wasmer/Nextcloud/Zotero/Mozumder_2022_Design of magnetic interactions in doped topological insulators.pdf} } +@online{muckleyInterpretableModelsExtrapolation2022, + title = {Interpretable Models for Extrapolation in Scientific Machine Learning}, + author = {Muckley, Eric S. and Saal, James E. and Meredig, Bryce and Roper, Christopher S. and Martin, John H.}, + date = {2022-12-16}, + eprint = {2212.10283}, + eprinttype = {arxiv}, + eprintclass = {cond-mat}, + doi = {10.48550/arXiv.2212.10283}, + url = {http://arxiv.org/abs/2212.10283}, + urldate = {2023-08-19}, + abstract = {Data-driven models are central to scientific discovery. In efforts to achieve state-of-the-art model accuracy, researchers are employing increasingly complex machine learning algorithms that often outperform simple regressions in interpolative settings (e.g. random k-fold cross-validation) but suffer from poor extrapolation performance, portability, and human interpretability, which limits their potential for facilitating novel scientific insight. Here we examine the trade-off between model performance and interpretability across a broad range of science and engineering problems with an emphasis on materials science datasets. We compare the performance of black box random forest and neural network machine learning algorithms to that of single-feature linear regressions which are fitted using interpretable input features discovered by a simple random search algorithm. For interpolation problems, the average prediction errors of linear regressions were twice as high as those of black box models. Remarkably, when prediction tasks required extrapolation, linear models yielded average error only 5\% higher than that of black box models, and outperformed black box models in roughly 40\% of the tested prediction tasks, which suggests that they may be desirable over complex algorithms in many extrapolation problems because of their superior interpretability, computational overhead, and ease of use. The results challenge the common assumption that extrapolative models for scientific machine learning are constrained by an inherent trade-off between performance and interpretability.}, + pubstate = {preprint}, + keywords = {Citrine Informatics,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Muckley et al_2022_Interpretable models for extrapolation in scientific machine learning.pdf;/Users/wasmer/Zotero/storage/HE59WRRZ/2212.html} +} + +@online{mullerAttendingGraphTransformers2023, + title = {Attending to {{Graph Transformers}}}, + author = {Müller, Luis and Galkin, Mikhail and Morris, Christopher and Rampášek, Ladislav}, + date = {2023-02-08}, + eprint = {2302.04181}, + eprinttype = {arxiv}, + eprintclass = {cs}, + doi = {10.48550/arXiv.2302.04181}, + url = {http://arxiv.org/abs/2302.04181}, + urldate = {2023-07-24}, + abstract = {Recently, transformer architectures for graphs emerged as an alternative to established techniques for machine learning with graphs, such as graph neural networks. So far, they have shown promising empirical results, e.g., on molecular prediction datasets, often attributed to their ability to circumvent graph neural networks' shortcomings, such as over-smoothing and over-squashing. Here, we derive a taxonomy of graph transformer architectures, bringing some order to this emerging field. We overview their theoretical properties, survey structural and positional encodings, and discuss extensions for important graph classes, e.g., 3D molecular graphs. Empirically, we probe how well graph transformers can recover various graph properties, how well they can deal with heterophilic graphs, and to what extent they prevent over-squashing. Further, we outline open challenges and research direction to stimulate future work. Our code is available at https://github.com/luis-mueller/probing-graph-transformers.}, + pubstate = {preprint}, + keywords = {/unread,attention,benchmarking,general ML,GNN,graph attention,graph ML,graph transformer,ML,OGB,transformer}, + file = {/Users/wasmer/Nextcloud/Zotero/Müller et al_2023_Attending to Graph Transformers.pdf;/Users/wasmer/Zotero/storage/I3F8C5FS/2302.html} +} + @article{mullerSpiritMultifunctionalFramework2019, - title = {\emph{Spirit}: {{Multifunctional}} Framework for Atomistic Spin Simulations}, - shorttitle = {\emph{Spirit}}, - author = {Müller, Gideon P.}, - date = {2019}, + title = {Spirit: {{Multifunctional}} Framework for Atomistic Spin Simulations}, + shorttitle = {Spirit}, + author = {Müller, Gideon P. and Hoffmann, Markus and Dißelkamp, Constantin and Schürhoff, Daniel and Mavros, Stefanos and Sallermann, Moritz and Kiselev, Nikolai S. and Jónsson, Hannes and Blügel, Stefan}, + date = {2019-06-10}, journaltitle = {Physical Review B}, shortjournal = {Phys. Rev. B}, volume = {99}, number = {22}, + pages = {224414}, + publisher = {{American Physical Society}}, doi = {10.1103/PhysRevB.99.224414}, - keywords = {browser-based visualization,interactive visualization,library,PGI-1/IAS-1,spin dynamics,Spirit,visualization,web app,with-code}, - file = {/Users/wasmer/Nextcloud/Zotero/Müller_2019_iSpirit-i.pdf;/Users/wasmer/Zotero/storage/NXE55BTB/PhysRevB.99.html} + url = {https://link.aps.org/doi/10.1103/PhysRevB.99.224414}, + urldate = {2023-10-26}, + abstract = {The Spirit framework is designed for atomic-scale spin simulations of magnetic systems with arbitrary geometry and magnetic structure, providing a graphical user interface with powerful visualizations and an easy-to-use scripting interface. An extended Heisenberg-type spin-lattice Hamiltonian including competing exchange interactions between neighbors at arbitrary distances, higher-order exchange, Dzyaloshinskii-Moriya and dipole-dipole interactions is used to describe the energetics of a system of classical spins localized at atom positions. A variety of common simulation methods are implemented including Monte Carlo and various time evolution algorithms based on the Landau-Lifshitz-Gilbert (LLG) equation of motion. These methods can be used to determine static ground-state and metastable spin configurations, sample equilibrium and finite-temperature thermodynamical properties of magnetic materials and nanostructures, or calculate dynamical trajectories including spin torques induced by stochastic temperature or electric current. Methods for finding the mechanism and rate of thermally assisted transitions include the geodesic nudged elastic band method, which can be applied when both initial and final states are specified, and the minimum mode-following method when only the initial state is given. The lifetimes of magnetic states and rates of transitions can be evaluated within the harmonic approximation of transition-state theory. The framework offers performant central processing unit (CPU) and graphics processing unit (GPU) parallelizations. All methods are verified and applications to several systems, such as vortices, domain walls, skyrmions, and bobbers are described.}, + keywords = {/unread,FZJ,Heisenberg model,Jij,Landau-Lifshits-Gilbert equation,library,magnetic moment,magnetism,PGI,PGI-1/IAS-1,physics,spin dynamics,spin texture,spin-dependent,Spirit,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Müller et al_2019_Spirit.pdf;/Users/wasmer/Zotero/storage/N8F8Z9GD/PhysRevB.99.html} } @book{MultipleScatteringTheory, @@ -7797,6 +10446,7 @@ urldate = {2021-12-02}, isbn = {978-0-7503-1490-9}, langid = {english}, + keywords = {\_tablet}, file = {/Users/wasmer/Nextcloud/Zotero/Multiple Scattering Theory.pdf;/Users/wasmer/Zotero/storage/UYLUXULV/978-0-7503-1490-9.html} } @@ -7835,6 +10485,36 @@ file = {/Users/wasmer/Zotero/storage/INEV8259/Musaelian et al. - 2023 - Learning local equivariant representations for lar.pdf} } +@video{musaelianLearningLocalEquivariant2023a, + entrysubtype = {video}, + title = {Learning {{Local Equivariant Representations}} for {{Large-Scale Atomistic Dynamics}}}, + editor = {Musaelian, Albert}, + editortype = {director}, + date = {2023-06-13}, + location = {{online}}, + url = {https://m2d2.io/talks/m2d2/learning-local-equivariant-representations-for-large-scale-atomistic-dynamics/}, + urldate = {2023-09-05}, + abstract = {Trade-offs between accuracy and speed have long limited the applications of machine learning interatomic potentials. Recently, E(3)-equivariant architectures have demonstrated leading accuracy, data efficiency, transferability, and simulation stability, but their computational cost and scaling has generally reinforced this trade-off. In particular, the ubiquitous use of message passing architectures has precluded the extension of accessible length- and time-scales with efficient multi-GPU calculations. In this talk I will discuss Allegro, a strictly local equivariant deep learning interatomic potential designed for parallel scalability and increased computational efficiency that simultaneously exhibits excellent accuracy. After presenting the architecture, I will discuss applications and benchmarks on various materials and chemical systems, including recent demonstrations of scaling to large all-atom biomolecular systems such as solvated proteins and a 44 million atom model of the HIV capsid. Finally, I will summarize the software ecosystem and tooling around Allegro.}, + keywords = {/unread}, + file = {/Users/wasmer/Zotero/storage/EXJBCIP2/learning-local-equivariant-representations-for-large-scale-atomistic-dynamics.html} +} + +@online{musaelianScalingLeadingAccuracy2023, + title = {Scaling the Leading Accuracy of Deep Equivariant Models to Biomolecular Simulations of Realistic Size}, + author = {Musaelian, Albert and Johansson, Anders and Batzner, Simon and Kozinsky, Boris}, + date = {2023-04-19}, + eprint = {2304.10061}, + eprinttype = {arxiv}, + eprintclass = {physics, q-bio}, + doi = {10.48550/arXiv.2304.10061}, + url = {http://arxiv.org/abs/2304.10061}, + urldate = {2023-09-04}, + abstract = {This work brings the leading accuracy, sample efficiency, and robustness of deep equivariant neural networks to the extreme computational scale. This is achieved through a combination of innovative model architecture, massive parallelization, and models and implementations optimized for efficient GPU utilization. The resulting Allegro architecture bridges the accuracy-speed tradeoff of atomistic simulations and enables description of dynamics in structures of unprecedented complexity at quantum fidelity. To illustrate the scalability of Allegro, we perform nanoseconds-long stable simulations of protein dynamics and scale up to a 44-million atom structure of a complete, all-atom, explicitly solvated HIV capsid on the Perlmutter supercomputer. We demonstrate excellent strong scaling up to 100 million atoms and 70\% weak scaling to 5120 A100 GPUs.}, + pubstate = {preprint}, + keywords = {Allegro,AML,biomolecules,Gordon Bell Prize,HPC,large-scale simulation,ML,MLP,scaling}, + file = {/Users/wasmer/Nextcloud/Zotero/Musaelian et al_2023_Scaling the leading accuracy of deep equivariant models to biomolecular.pdf;/Users/wasmer/Zotero/storage/QQDA94V4/2304.html} +} + @article{musilEfficientImplementationAtomdensity2021, title = {Efficient Implementation of Atom-Density Representations}, author = {Musil, Félix and Veit, Max and Goscinski, Alexander and Fraux, Guillaume and Willatt, Michael J. and Stricker, Markus and Junge, Till and Ceriotti, Michele}, @@ -7922,6 +10602,38 @@ file = {/Users/wasmer/Nextcloud/Zotero/Musil et al_2021_Physics-inspired structural representations for molecules and materials.pdf;/Users/wasmer/Zotero/storage/EXTUHGNH/2101.html} } +@software{MuST2021, + title = {{{MuST}}}, + date = {2021-01-01}, + origdate = {2019-10-29T17:07:16Z}, + url = {https://github.com/mstsuite/MuST}, + urldate = {2023-09-19}, + abstract = {Multiple Scattering Theory code for first principles calculations}, + organization = {{ORNL}}, + keywords = {/unread,DFT,KKR,library,Multiple scattering theory} +} + +@article{muyAiiDAdefectsAutomatedFully2023, + title = {{{AiiDA-defects}}: An Automated and Fully Reproducible Workflow for the Complete Characterization of Defect Chemistry in Functional Materials}, + shorttitle = {{{AiiDA-defects}}}, + author = {Muy, Sokseiha and Johnston, Conrad and Marzari, Nicola}, + date = {2023-06}, + journaltitle = {Electronic Structure}, + shortjournal = {Electron. Struct.}, + volume = {5}, + number = {2}, + pages = {024009}, + publisher = {{IOP Publishing}}, + issn = {2516-1075}, + doi = {10.1088/2516-1075/ace014}, + url = {https://dx.doi.org/10.1088/2516-1075/ace014}, + urldate = {2023-10-06}, + abstract = {Functional materials that enable many technological applications in our everyday lives owe their unique properties to defects that are carefully engineered and incorporated into these materials during processing. However, optimizing and characterizing these defects is very challenging in practice, making computational modelling an indispensable complementary tool. We have developed an automated workflow and code to accelerate these calculations (AiiDA-defects), which utilises the AiiDA framework, a robust open-source high-throughput materials informatics infrastructure that provides workflow automation while simultaneously preserving and storing the full data provenance in a relational database that is queryable and traversable. This paper describes the design and implementation details of AiiDA-defects, the models and algorithms used, and demonstrates its use in an application to fully characterize the defect chemistry of the well known solid-state Li-ion conductors LiZnPS4. We anticipate that AiiDA-defects will be useful as a tool for fully automated and reproducible defect calculations, allowing detailed defect chemistry to be obtained in a reliable and high-throughput way, and paving the way toward the generation of defects databases for accelerated materials design and discovery.}, + langid = {english}, + keywords = {\_tablet,AiiDA,defect chemistry,defects,disordered,FAIR,library,materials informatics,PBC,periodic,point defects,Quantum ESPRESSO,supercell,with-code,workflows}, + file = {/Users/wasmer/Nextcloud/Zotero/Muy et al_2023_AiiDA-defects.pdf} +} + @article{nadj-pergeObservationMajoranaFermions2014, title = {Observation of {{Majorana}} Fermions in Ferromagnetic Atomic Chains on a Superconductor}, author = {Nadj-Perge, Stevan and Drozdov, Ilya K. and Li, Jian and Chen, Hua and Jeon, Sangjun and Seo, Jungpil and MacDonald, Allan H. and Bernevig, B. Andrei and Yazdani, Ali}, @@ -7960,6 +10672,24 @@ file = {/Users/wasmer/Nextcloud/Zotero/Nagaosa_Tokura_2013_Topological properties and dynamics of magnetic skyrmions.pdf} } +@article{nakataLargeScaleLinear2020, + title = {Large Scale and Linear Scaling {{DFT}} with the {{CONQUEST}} Code}, + author = {Nakata, Ayako and Baker, Jack S. and Mujahed, Shereif Y. and Poulton, Jack T. L. and Arapan, Sergiu and Lin, Jianbo and Raza, Zamaan and Yadav, Sushma and Truflandier, Lionel and Miyazaki, Tsuyoshi and Bowler, David R.}, + date = {2020-04-28}, + journaltitle = {The Journal of Chemical Physics}, + shortjournal = {The Journal of Chemical Physics}, + volume = {152}, + number = {16}, + pages = {164112}, + issn = {0021-9606}, + doi = {10.1063/5.0005074}, + url = {https://doi.org/10.1063/5.0005074}, + urldate = {2023-10-05}, + abstract = {We survey the underlying theory behind the large-scale and linear scaling density functional theory code, conquest, which shows excellent parallel scaling and can be applied to thousands of atoms with diagonalization and millions of atoms with linear scaling. We give details of the representation of the density matrix and the approach to finding the electronic ground state and discuss the implementation of molecular dynamics with linear scaling. We give an overview of the performance of the code, focusing in particular on the parallel scaling, and provide examples of recent developments and applications.}, + keywords = {/unread,CONQUEST,DFT,DFT benchmark,DFT code,HPC,large-scale simulation,linear scaling,linear-scaling DFT,pseudopotential}, + file = {/Users/wasmer/Nextcloud/Zotero/Nakata et al_2020_Large scale and linear scaling DFT with the CONQUEST code.pdf;/Users/wasmer/Zotero/storage/M45BS7QI/Large-scale-and-linear-scaling-DFT-with-the.html} +} + @article{nandiCheapTurnsSuperior2022, title = {Cheap {{Turns Superior}}: {{A Linear Regression-Based Correction Method}} to {{Reaction Energy}} from the {{DFT}}}, shorttitle = {Cheap {{Turns Superior}}}, @@ -8014,6 +10744,35 @@ file = {/Users/wasmer/Nextcloud/Zotero/Narayan et al_2020_Density-Preserving Data Visualization Unveils Dynamic Patterns of Single-Cell.pdf;/Users/wasmer/Zotero/storage/6QBY65KW/2020.05.12.html} } +@online{NatureCollection20162016, + title = {Nature {{Collection}} 2016 {{Nobel Prize}} in {{Physics}}}, + date = {2016-10-04}, + url = {https://www.nature.com/collections/fwsytynlwg}, + urldate = {2023-07-12}, + abstract = {2016 Nobel Prize in Physics}, + langid = {english}, + organization = {{Nature}}, + keywords = {/unread,collection,history of science,Nobel prize,physics,popular science,quantum materials,topological insulator,Topological matter}, + file = {/Users/wasmer/Zotero/storage/LSJ66I93/fwsytynlwg.html} +} + +@online{neklyudovWassersteinQuantumMonte2023, + title = {Wasserstein {{Quantum Monte Carlo}}: {{A Novel Approach}} for {{Solving}} the {{Quantum Many-Body Schr}}\textbackslash "odinger {{Equation}}}, + shorttitle = {Wasserstein {{Quantum Monte Carlo}}}, + author = {Neklyudov, Kirill and Nys, Jannes and Thiede, Luca and Carrasquilla, Juan and Liu, Qiang and Welling, Max and Makhzani, Alireza}, + date = {2023-07-16}, + eprint = {2307.07050}, + eprinttype = {arxiv}, + eprintclass = {physics}, + doi = {10.48550/arXiv.2307.07050}, + url = {http://arxiv.org/abs/2307.07050}, + urldate = {2023-08-22}, + abstract = {Solving the quantum many-body Schr\textbackslash "odinger equation is a fundamental and challenging problem in the fields of quantum physics, quantum chemistry, and material sciences. One of the common computational approaches to this problem is Quantum Variational Monte Carlo (QVMC), in which ground-state solutions are obtained by minimizing the energy of the system within a restricted family of parameterized wave functions. Deep learning methods partially address the limitations of traditional QVMC by representing a rich family of wave functions in terms of neural networks. However, the optimization objective in QVMC remains notoriously hard to minimize and requires second-order optimization methods such as natural gradient. In this paper, we first reformulate energy functional minimization in the space of Born distributions corresponding to particle-permutation (anti-)symmetric wave functions, rather than the space of wave functions. We then interpret QVMC as the Fisher-Rao gradient flow in this distributional space, followed by a projection step onto the variational manifold. This perspective provides us with a principled framework to derive new QMC algorithms, by endowing the distributional space with better metrics, and following the projected gradient flow induced by those metrics. More specifically, we propose "Wasserstein Quantum Monte Carlo" (WQMC), which uses the gradient flow induced by the Wasserstein metric, rather than Fisher-Rao metric, and corresponds to transporting the probability mass, rather than teleporting it. We demonstrate empirically that the dynamics of WQMC results in faster convergence to the ground state of molecular systems.}, + pubstate = {preprint}, + keywords = {/unread,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Neklyudov et al_2023_Wasserstein Quantum Monte Carlo.pdf;/Users/wasmer/Zotero/storage/5BUA924B/2307.html} +} + @article{nelsonDataDrivenTimePropagation2022, title = {Data-{{Driven Time Propagation}} of {{Quantum Systems}} with {{Neural Networks}}}, author = {Nelson, James and Coopmans, Luuk and Kells, Graham and Sanvito, Stefano}, @@ -8144,6 +10903,23 @@ file = {/Users/wasmer/Nextcloud/Zotero/Nguyen_2023_Fast proper orthogonal descriptors for many-body interatomic potentials.pdf;/Users/wasmer/Zotero/storage/3337UHFR/PhysRevB.107.html} } +@article{nguyenPredictingTensorialMolecular2022, + title = {Predicting Tensorial Molecular Properties with Equivariant Machine Learning Models}, + author = {Nguyen, Vu Ha Anh and Lunghi, Alessandro}, + date = {2022-04-18}, + journaltitle = {Physical Review B}, + shortjournal = {Phys. Rev. B}, + volume = {105}, + number = {16}, + pages = {165131}, + publisher = {{American Physical Society}}, + doi = {10.1103/PhysRevB.105.165131}, + url = {https://link.aps.org/doi/10.1103/PhysRevB.105.165131}, + urldate = {2023-10-13}, + abstract = {Embedding molecular symmetries into machine learning models is key for efficient learning of chemico-physical scalar properties, but little evidence on how to extend the same strategy to tensorial quantities exists. Here we formulate a scalable equivariant machine learning model based on local atomic environment descriptors. We apply it to a series of molecules and show that accurate predictions can be achieved for a comprehensive list of dielectric and magnetic tensorial properties of different ranks. These results show that equivariant models are a promising platform to extend the scope of machine learning in materials modeling.}, + file = {/Users/wasmer/Nextcloud/Zotero/Nguyen_Lunghi_2022_Predicting tensorial molecular properties with equivariant machine learning.pdf} +} + @article{nguyenProperOrthogonalDescriptors2023, title = {Proper Orthogonal Descriptors for Efficient and Accurate Interatomic Potentials}, author = {Nguyen, Ngoc Cuong and Rohskopf, Andrew}, @@ -8272,6 +11048,40 @@ file = {/Users/wasmer/Nextcloud/Zotero/Novikov et al_2022_Magnetic Moment Tensor Potentials for collinear spin-polarized materials.pdf} } +@online{nyariTopologicalSuperconductivityFirstprinciples2023, + title = {Topological Superconductivity from First-Principles {{I}}: {{Shiba}} Band Structure and Topological Edge States of Artificial Spin Chains}, + shorttitle = {Topological Superconductivity from First-Principles {{I}}}, + author = {Nyári, Bendegúz and Lászlóffy, András and Csire, Gábor and Szunyogh, László and Újfalussy, Balázs}, + date = {2023-08-26}, + eprint = {2308.13824}, + eprinttype = {arxiv}, + eprintclass = {cond-mat}, + doi = {10.48550/arXiv.2308.13824}, + url = {http://arxiv.org/abs/2308.13824}, + urldate = {2023-09-20}, + abstract = {Magnetic chains on superconductors hosting Majorna Zero Modes (MZMs) attracted high interest due to their possible applications in fault-tolerant quantum computing. However, this is hindered by the lack of a detailed, quantitative understanding of these systems. As a significant step forward, we present a first-principles computational approach based on a microscopic relativistic theory of inhomogeneous superconductors applied to an iron chain on the top of Au-covered Nb(110) to study the Shiba band structure and the topological nature of the edge states. Contrary to contemporary considerations, our method enables the introduction of quantities indicating band inversion without fitting parameters in realistic experimental settings, holding thus the power to determine the topological nature of zero energy edge states in an accurate ab-initio based description of the experimental systems. We confirm that ferromagnetic Fe chains on Au/Nb(110) surface do not support any separated MZM; however, a broad range of spin-spirals can be identified with robust zero energy edge states displaying signatures of MZMs. For these spirals, we explore the structure of the superconducting order parameter shedding light on the internally antisymmetric triplet pairing hosted by MZMs. We also reveal a two-fold effect of spin-orbit coupling: although it tends to enlarge the topological phase regarding spin spiraling angles, however, it also extends the localization of MZMs. Due to the presented predictive power, our work fills a big gap between the experimental efforts and theoretical models while paving the way for engineering platforms for topological quantum computation.}, + pubstate = {preprint}, + keywords = {/unread,Budapest KKR group,GF2023 workshop,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Nyári et al_2023_Topological superconductivity from first-principles I.pdf;/Users/wasmer/Zotero/storage/A5BSXVEG/2308.html} +} + +@online{ockCatalystPropertyPrediction2023, + title = {Catalyst {{Property Prediction}} with {{CatBERTa}}: {{Unveiling Feature Exploration Strategies}} through {{Large Language Models}}}, + shorttitle = {Catalyst {{Property Prediction}} with {{CatBERTa}}}, + author = {Ock, Janghoon and Guntuboina, Chakradhar and Farimani, Amir Barati}, + date = {2023-09-01}, + eprint = {2309.00563}, + eprinttype = {arxiv}, + eprintclass = {physics}, + doi = {10.48550/arXiv.2309.00563}, + url = {http://arxiv.org/abs/2309.00563}, + urldate = {2023-11-05}, + abstract = {Efficient catalyst screening necessitates predictive models for adsorption energy, a key property of reactivity. However, prevailing methods, notably graph neural networks (GNNs), demand precise atomic coordinates for constructing graph representations, while integrating observable attributes remains challenging. This research introduces CatBERTa, an energy prediction Transformer model using textual inputs. Built on a pretrained Transformer encoder, CatBERTa processes human-interpretable text, incorporating target features. Attention score analysis reveals CatBERTa's focus on tokens related to adsorbates, bulk composition, and their interacting atoms. Moreover, interacting atoms emerge as effective descriptors for adsorption configurations, while factors such as bond length and atomic properties of these atoms offer limited predictive contributions. By predicting adsorption energy from the textual representation of initial structures, CatBERTa achieves a mean absolute error (MAE) of 0.75 eV-comparable to vanilla Graph Neural Networks (GNNs). Furthermore, the subtraction of the CatBERTa-predicted energies effectively cancels out their systematic errors by as much as 19.3\% for chemically similar systems, surpassing the error reduction observed in GNNs. This outcome highlights its potential to enhance the accuracy of energy difference predictions. This research establishes a fundamental framework for text-based catalyst property prediction, without relying on graph representations, while also unveiling intricate feature-property relationships.}, + pubstate = {preprint}, + keywords = {adsorption,alternative approaches,alternative to GNN,AML,benchmarking,catalysis,geometric deep learning,GNN,LLM,ML,model comparison,prediction of energy,textual representation,transformer}, + file = {/Users/wasmer/Nextcloud/Zotero/Ock et al_2023_Catalyst Property Prediction with CatBERTa.pdf;/Users/wasmer/Zotero/storage/E4E3G8VW/2309.html} +} + @article{ohCompleteQuantumHall2013, title = {The {{Complete Quantum Hall Trio}}}, author = {Oh, Seongshik}, @@ -8306,6 +11116,24 @@ file = {/Users/wasmer/Nextcloud/Zotero/Oliveira et al_2020_The CECAM electronic structure library and the modular software development.pdf} } +@article{oliynykHighThroughputMachineLearningDrivenSynthesis2016, + title = {High-{{Throughput Machine-Learning-Driven Synthesis}} of {{Full-Heusler Compounds}}}, + author = {Oliynyk, Anton O. and Antono, Erin and Sparks, Taylor D. and Ghadbeigi, Leila and Gaultois, Michael W. and Meredig, Bryce and Mar, Arthur}, + date = {2016-10-25}, + journaltitle = {Chemistry of Materials}, + shortjournal = {Chem. Mater.}, + volume = {28}, + number = {20}, + pages = {7324--7331}, + publisher = {{American Chemical Society}}, + issn = {0897-4756}, + doi = {10.1021/acs.chemmater.6b02724}, + url = {https://doi.org/10.1021/acs.chemmater.6b02724}, + urldate = {2023-08-19}, + abstract = {A machine-learning model has been trained to discover Heusler compounds, which are intermetallics exhibiting diverse physical properties attractive for applications in thermoelectric and spintronic materials. Improving these properties requires knowledge of crystal structures, which occur in three subtle variations (Heusler, inverse Heusler, and CsCl-type structures) that are difficult, and at times impossible, to distinguish by diffraction techniques. Compared to alternative approaches, this Heusler discovery engine performs exceptionally well, making fast and reliable predictions of the occurrence of Heusler vs non-Heusler compounds for an arbitrary combination of elements with no structural input on over 400\,000 candidates. The model has a true positive rate of 0.94 (and false positive rate of 0.01). It is also valuable for data sanitizing, by flagging questionable entries in crystallographic databases. It was applied to screen candidates with the formula AB2C and predict the existence of 12 novel gallides MRu2Ga and RuM2Ga (M = Ti–Co) as Heusler compounds, which were confirmed experimentally. One member, TiRu2Ga, exhibited diagnostic superstructure peaks that confirm the adoption of an ordered Heusler as opposed to a disordered CsCl-type structure.}, + keywords = {/unread,Citrine Informatics,todo-tagging} +} + @article{onatSensitivityDimensionalityAtomic2020, title = {Sensitivity and Dimensionality of Atomic Environment Representations Used for Machine Learning Interatomic Potentials}, author = {Onat, Berk and Ortner, Christoph and Kermode, James R.}, @@ -8335,6 +11163,55 @@ file = {/Users/wasmer/Zotero/storage/6TZCQAXX/Machine_Learning_For_Physicists_2021.html} } +@online{ortnerAtomicClusterExpansion2023, + title = {On the {{Atomic Cluster Expansion}}: Interatomic Potentials and Beyond}, + shorttitle = {On the {{Atomic Cluster Expansion}}}, + author = {Ortner, Christoph}, + date = {2023-08-12}, + eprint = {2308.06462}, + eprinttype = {arxiv}, + eprintclass = {cond-mat, physics:physics}, + doi = {10.25950/c7f24234}, + url = {http://arxiv.org/abs/2308.06462}, + urldate = {2023-09-22}, + abstract = {The Atomic Cluster Expansion (ACE) [R. Drautz, Phys. Rev. B, 99:014104 (2019)] provides a systematically improvable, universal descriptor for the environment of an atom that is invariant to permutation, translation and rotation. ACE is being used extensively in newly emerging interatomic potentials based on machine learning. This commentary discusses the ACE framework and its potential impact.}, + pubstate = {preprint}, + keywords = {\_tablet,Condensed Matter - Materials Science,Physics - Chemical Physics,Physics - Computational Physics}, + file = {/Users/wasmer/Zotero/storage/HD6NJWY3/Ortner_2023_On the Atomic Cluster Expansion.pdf;/Users/wasmer/Zotero/storage/LCWVPNL3/2308.html} +} + +@online{ortnerFrameworkGeneralisationAnalysis2022, + title = {A Framework for a Generalisation Analysis of Machine-Learned Interatomic Potentials}, + author = {Ortner, Christoph and Wang, Yangshuai}, + date = {2022-09-12}, + eprint = {2209.05366}, + eprinttype = {arxiv}, + eprintclass = {cs, math}, + doi = {10.48550/arXiv.2209.05366}, + url = {http://arxiv.org/abs/2209.05366}, + urldate = {2023-09-22}, + abstract = {Machine-learned interatomic potentials (MLIPs) and force fields (i.e. interaction laws for atoms and molecules) are typically trained on limited data-sets that cover only a very small section of the full space of possible input structures. MLIPs are nevertheless capable of making accurate predictions of forces and energies in simulations involving (seemingly) much more complex structures. In this article we propose a framework within which this kind of generalisation can be rigorously understood. As a prototypical example, we apply the framework to the case of simulating point defects in a crystalline solid. Here, we demonstrate how the accuracy of the simulation depends explicitly on the size of the training structures, on the kind of observations (e.g., energies, forces, force constants, virials) to which the model has been fitted, and on the fit accuracy. The new theoretical insights we gain partially justify current best practices in the MLIP literature and in addition suggest a new approach to the collection of training data and the design of loss functions.}, + pubstate = {preprint}, + keywords = {AML,AML theory,database generation,error estimate,generalization,ML,MLP,todo-tagging}, + file = {/Users/wasmer/Zotero/storage/DFJWG5I4/Ortner and Wang - 2022 - A framework for a generalisation analysis of machi.pdf;/Users/wasmer/Zotero/storage/V6S43FVE/2209.html} +} + +@article{ortnerFrameworkGeneralizationAnalysis2023, + title = {A {{Framework}} for a {{Generalization Analysis}} of {{Machine-Learned Interatomic Potentials}}}, + author = {Ortner, Christoph and Wang, Yangshuai}, + date = {2023-09-30}, + journaltitle = {Multiscale Modeling \& Simulation}, + shortjournal = {Multiscale Model. Simul.}, + pages = {1053--1080}, + publisher = {{Society for Industrial and Applied Mathematics}}, + issn = {1540-3459}, + doi = {10.1137/22M152267X}, + url = {https://epubs.siam.org/doi/abs/10.1137/22M152267X}, + urldate = {2023-09-22}, + abstract = {.In the present paper, we prove convergence rates for the velocity of the local discontinuous Galerkin approximation, proposed in Part I of the paper [A. Kaltenbach and M. RůžiÄka, SIAM J. Numer. Anal., to appear], of systems of -Navier–Stokes type and -Stokes type with . The convergence rates are optimal for linear ansatz functions. The results are supported by numerical experiments.}, + keywords = {/unread} +} + @article{otrokovPredictionObservationAntiferromagnetic2019, title = {Prediction and Observation of an Antiferromagnetic Topological Insulator}, author = {Otrokov, M. M. and Klimovskikh, I. I. and Bentmann, H. and Estyunin, D. and Zeugner, A. and Aliev, Z. S. and Gaß, S. and Wolter, A. U. B. and Koroleva, A. V. and Shikin, A. M. and Blanco-Rey, M. and Hoffmann, M. and Rusinov, I. P. and Vyazovskaya, A. Yu and Eremeev, S. V. and Koroteev, Yu M. and Kuznetsov, V. M. and Freyse, F. and Sánchez-Barriga, J. and Amiraslanov, I. R. and Babanly, M. B. and Mamedov, N. T. and Abdullayev, N. A. and Zverev, V. N. and Alfonsov, A. and Kataev, V. and Büchner, B. and Schwier, E. F. and Kumar, S. and Kimura, A. and Petaccia, L. and Di Santo, G. and Vidal, R. C. and Schatz, S. and Kißner, K. and Ünzelmann, M. and Min, C. H. and Moser, Simon and Peixoto, T. R. F. and Reinert, F. and Ernst, A. and Echenique, P. M. and Isaeva, A. and Chulkov, E. V.}, @@ -8530,6 +11407,108 @@ file = {/Users/wasmer/Zotero/storage/ARZ5YYBV/Parsaeifard and Goedecker - 2022 - Manifolds of quasi-constant SOAP and ACSF fingerpr.pdf} } +@article{pasiniFastStableDeeplearning2020, + title = {Fast and Stable Deep-Learning Predictions of Material Properties for Solid Solution Alloys**}, + author = {Pasini, Massimiliano Lupo and Li, Ying Wai and Yin, Junqi and Zhang, Jiaxin and Barros, Kipton and Eisenbach, Markus}, + date = {2020-12}, + journaltitle = {Journal of Physics: Condensed Matter}, + shortjournal = {J. Phys.: Condens. Matter}, + volume = {33}, + number = {8}, + pages = {084005}, + publisher = {{IOP Publishing}}, + issn = {0953-8984}, + doi = {10.1088/1361-648X/abcb10}, + url = {https://dx.doi.org/10.1088/1361-648X/abcb10}, + urldate = {2023-09-19}, + abstract = {We present a novel deep learning (DL) approach to produce highly accurate predictions of macroscopic physical properties of solid solution binary alloys and magnetic systems. The major idea is to make use of the correlations between different physical properties in alloy systems to improve the prediction accuracy of neural network (NN) models. We use multitasking NN models to simultaneously predict the total energy, charge density and magnetic moment. These physical properties mutually serve as constraints during the training of the multitasking NN, resulting in more reliable DL models because multiple physics properties are correctly learned by a single model. Two binary alloys, copper–gold (CuAu) and iron–platinum (FePt), were studied. Our results show that once the multitasking NN’s are trained, they can estimate the material properties for a specific configuration hundreds of times faster than first-principles density functional theory calculations while retaining comparable accuracy. We used a simple measure based on the root-mean-squared errors to quantify the quality of the NN models, and found that the inclusion of charge density and magnetic moment as physical constraints leads to more stable models that exhibit improved accuracy and reduced uncertainty for the energy predictions.}, + langid = {english}, + keywords = {/unread,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Pasini et al_2020_Fast and stable deep-learning predictions of material properties for solid.pdf} +} + +@online{pasiniGraphNeuralNetworks2022, + title = {Graph Neural Networks Predict Energetic and Mechanical Properties for Models of Solid Solution Metal Alloy Phases}, + author = {Pasini, Massimiliano Lupo and Jung, G. S. and Irle, Stephan}, + date = {2022-10-25}, + eprinttype = {ChemRxiv}, + doi = {10.26434/chemrxiv-2022-mmx62-v2}, + url = {https://chemrxiv.org/engage/chemrxiv/article-details/6355620acf6de91a611f0799}, + urldate = {2023-09-19}, + abstract = {We developed a graph convolutional neural network (GCNN) to predict the formation energy and the bulk modulus for models of solid solution alloys for various atomic crystal structures and relaxed volumes. We trained the GCNN model on a dataset for nickel-niobium (NiNb) that was generated for simplicity with the embedded atom model (EAM) empirical interatomic potential. The dataset has been generated by calculating the formation energy and the bulk modulus as a prototypical elastic property for optimized geometries starting from initial body-centered cubic (BCC), face-centered cubic (FCC), and hexagonal compact packed (HCP) crystal structures, with configurations spanning the possible compositional range for each of the three types of initial crystal structure. Numerical results show that the GCNN model effectively predicts both the formation energy and the bulk modulus as functions of the optimized crystal structure, relaxed volume, and configurational entropy of the model structures for solid solution alloys.}, + langid = {english}, + pubstate = {preprint}, + keywords = {/unread,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Pasini et al_2022_Graph neural networks predict energetic and mechanical properties for models of.pdf} +} + +@article{pasiniMultitaskGraphNeural2022, + title = {Multi-Task Graph Neural Networks for Simultaneous Prediction of Global and Atomic Properties in Ferromagnetic Systems*}, + author = {Pasini, Massimiliano Lupo and Zhang, Pei and Reeve, Samuel Temple and Choi, Jong Youl}, + date = {2022-05}, + journaltitle = {Machine Learning: Science and Technology}, + shortjournal = {Mach. Learn.: Sci. Technol.}, + volume = {3}, + number = {2}, + pages = {025007}, + publisher = {{IOP Publishing}}, + issn = {2632-2153}, + doi = {10.1088/2632-2153/ac6a51}, + url = {https://dx.doi.org/10.1088/2632-2153/ac6a51}, + urldate = {2023-09-19}, + abstract = {We introduce a multi-tasking graph convolutional neural network, HydraGNN, to simultaneously predict both global and atomic physical properties and demonstrate with ferromagnetic materials. We train HydraGNN on an open-source ab initio density functional theory (DFT) dataset for iron-platinum with a fixed body centered tetragonal lattice structure and fixed volume to simultaneously predict the mixing enthalpy (a global feature of the system), the atomic charge transfer, and the atomic magnetic moment across configurations that span the entire compositional range. By taking advantage of underlying physical correlations between material properties, multi-task learning (MTL) with HydraGNN provides effective training even with modest amounts of data. Moreover, this is achieved with just one architecture instead of three, as required by single-task learning (STL). The first convolutional layers of the HydraGNN architecture are shared by all learning tasks and extract features common to all material properties. The following layers discriminate the features of the different properties, the results of which are fed to the separate heads of the final layer to produce predictions. Numerical results show that HydraGNN effectively captures the relation between the configurational entropy and the material properties over the entire compositional range. Overall, the accuracy of simultaneous MTL predictions is comparable to the accuracy of the STL predictions. In addition, the computational cost of training HydraGNN for MTL is much lower than the original DFT calculations and also lower than training separate STL models for each property.}, + langid = {english}, + keywords = {\_tablet,AML,binary systems,charge transfer,DFT,Ferromagnetism,GF2023 workshop,GNN,HydraGNN,library,magnetism,ML,ML-DFT,ML-ESM,multi-task learning,ORNL,prediction of charge transfer,prediction of energy,prediction of magnetic moment,PyTorch,surrogate model,with-code,with-data}, + file = {/Users/wasmer/Nextcloud/Zotero/Pasini et al_2022_Multi-task graph neural networks for simultaneous prediction of global and.pdf} +} + +@online{pasiniTransferablePredictionFormation2023, + title = {Transferable Prediction of Formation Energy across Lattices of Increasing Size}, + author = {Pasini, Massimiliano Lupo and Karabin, Mariia and Eisenbach, Markus}, + date = {2023-06-14}, + eprinttype = {ChemRxiv}, + doi = {10.26434/chemrxiv-2023-c14r3}, + url = {https://chemrxiv.org/engage/chemrxiv/article-details/64877121e64f843f41b07e28}, + urldate = {2023-09-19}, + abstract = {In this study, we show the transferability of graph convolutional neural network (GCNN) predictions of the formation energy of solid solution alloys across atomic structures of increasing sizes, which was utilized in the cost-efficient sampling strategy. The GCNN was trained on a nickel-platinum (NiPt) dataset generated with the Large-scale Atomic/Molecular Massively Parallel Simulator (LAMMPS) using the second nearest-neighbor modified embedded-atom method (2NN MEAM) empirical interatomic potential. The dataset has been obtained by optimizing the geometries of initially randomly generated FCC crystal structures and calculating the formation energy, with configurations spanning the whole compositional range. The GCNN was first trained on a lattice of 256 atoms, which accounts well for the short-range interactions. Using this data, we predicted the formation energy for lattices of 864 atoms and 2,048 atoms, which resulted in lower-than-expected accuracy due to the long-range interactions present in these larger lattices. We accounted for the long-range interactions by including a small amount of training data representative for those two larger sizes. Using this additional data, the predictions of the GCNN scaled linearly with the size of the lattice. Therefore, our strategy ensured scalability while reducing significantly the computational cost of training on larger lattice sizes.}, + langid = {english}, + pubstate = {preprint}, + keywords = {todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Pasini et al_2023_Transferable prediction of formation energy across lattices of increasing size.pdf} +} + +@online{PasteurLabsISI2023, + title = {Pasteur {{Labs}} \& {{ISI}} - {{Research}}}, + date = {2023-08-21}, + url = {https://simulation.science/research}, + urldate = {2023-08-21}, + abstract = {Pasteur Labs (and non-profit 'sister' Institute for Simulation Intelligence) build novel technologies that enable human-machine teams to make breakthroughs in physical sciences and to deploy safe AI-driven systems at scale.}, + langid = {english}, + keywords = {Pasteur \& ISI,todo-tagging}, + file = {/Users/wasmer/Zotero/storage/9U89UUGL/research.html} +} + +@article{pastorElectronicDefectsMetal2022, + title = {Electronic Defects in Metal Oxide Photocatalysts}, + author = {Pastor, Ernest and Sachs, Michael and Selim, Shababa and Durrant, James R. and Bakulin, Artem A. and Walsh, Aron}, + date = {2022-07}, + journaltitle = {Nature Reviews Materials}, + shortjournal = {Nat Rev Mater}, + volume = {7}, + number = {7}, + pages = {503--521}, + publisher = {{Nature Publishing Group}}, + issn = {2058-8437}, + doi = {10.1038/s41578-022-00433-0}, + url = {https://www.nature.com/articles/s41578-022-00433-0}, + urldate = {2023-08-25}, + abstract = {A deep understanding of defects is essential for the optimization of materials for solar energy conversion. This is particularly true for metal oxide photo(electro)catalysts, which typically feature high concentrations of charged point defects that are electronically active. In photovoltaic materials, except for selected dopants, defects are considered detrimental and should be eliminated to minimize charge recombination. However, photocatalysis is a more complex process in which defects can have an active role, such as in stabilizing charge separation and in mediating rate-limiting catalytic steps. In this Review, we examine the behaviour of electronic defects in metal oxides, paying special attention to the principles that underpin the formation and function of trapped charges in the form of polarons. We focus on how defects alter the electronic structure of metal oxides, statically or transiently upon illumination, and discuss the implications of such changes in light-driven catalytic reactions. Finally, we compare oxide defect chemistry with that of new photocatalysts based on carbon nitrides, polymers and metal halide perovskites.}, + issue = {7}, + langid = {english}, + keywords = {todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Pastor et al_2022_Electronic defects in metal oxide photocatalysts.pdf} +} + @online{patelGoogleWeHave2023, title = {Google "We Have No Moat, And Neither Does OpenAI"}, author = {Patel, Dylan}, @@ -8560,6 +11539,22 @@ file = {/Users/wasmer/Nextcloud/Zotero/Pathrudkar et al_2022_Machine learning based prediction of the electronic structure of.pdf;/Users/wasmer/Zotero/storage/EH3KE7NL/PhysRevB.105.html} } +@online{peachImplicitGaussianProcess2023, + title = {Implicit {{Gaussian}} Process Representation of Vector Fields over Arbitrary Latent Manifolds}, + author = {Peach, Robert L. and Vinao-Carl, Matteo and Grossman, Nir and David, Michael and Mallas, Emma and Sharp, David and Malhotra, Paresh A. and Vandergheynst, Pierre and Gosztolai, Adam}, + date = {2023-09-28}, + eprint = {2309.16746}, + eprinttype = {arxiv}, + eprintclass = {physics, q-bio, stat}, + doi = {10.48550/arXiv.2309.16746}, + url = {http://arxiv.org/abs/2309.16746}, + urldate = {2023-10-07}, + abstract = {Gaussian processes (GPs) are popular nonparametric statistical models for learning unknown functions and quantifying the spatiotemporal uncertainty in data. Recent works have extended GPs to model scalar and vector quantities distributed over non-Euclidean domains, including smooth manifolds appearing in numerous fields such as computer vision, dynamical systems, and neuroscience. However, these approaches assume that the manifold underlying the data is known, limiting their practical utility. We introduce RVGP, a generalisation of GPs for learning vector signals over latent Riemannian manifolds. Our method uses positional encoding with eigenfunctions of the connection Laplacian, associated with the tangent bundle, readily derived from common graph-based approximation of data. We demonstrate that RVGP possesses global regularity over the manifold, which allows it to super-resolve and inpaint vector fields while preserving singularities. Furthermore, we use RVGP to reconstruct high-density neural dynamics derived from low-density EEG recordings in healthy individuals and Alzheimer's patients. We show that vector field singularities are important disease markers and that their reconstruction leads to a comparable classification accuracy of disease states to high-density recordings. Thus, our method overcomes a significant practical limitation in experimental and clinical applications.}, + pubstate = {preprint}, + keywords = {/unread,Gaussian process,General ML,library,Manifolds,ML,singularities,tensorial target,vector field,vectorial learning target,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Peach et al_2023_Implicit Gaussian process representation of vector fields over arbitrary latent.pdf;/Users/wasmer/Zotero/storage/J7N49F7L/2309.html} +} + @unpublished{pedersonMachineLearningDensity2022, title = {Machine Learning and Density Functional Theory}, author = {Pederson, Ryan and Kalita, Bhupalee and Burke, Kieron}, @@ -8724,6 +11719,22 @@ file = {/Users/wasmer/Nextcloud/Zotero/Pilania et al_2020_Data-Based Methods for Materials Design and Discovery.pdf;/Users/wasmer/Zotero/storage/8YQEF8LU/S00981ED1V01Y202001MOP001.html} } +@online{pinheiro3DMoleculeGeneration2023, + title = {{{3D}} Molecule Generation by Denoising Voxel Grids}, + author = {Pinheiro, Pedro O. and Rackers, Joshua and Kleinhenz, Joseph and Maser, Michael and Mahmood, Omar and Watkins, Andrew Martin and Ra, Stephen and Sresht, Vishnu and Saremi, Saeed}, + date = {2023-06-12}, + eprint = {2306.07473}, + eprinttype = {arxiv}, + eprintclass = {cs, q-bio}, + doi = {10.48550/arXiv.2306.07473}, + url = {http://arxiv.org/abs/2306.07473}, + urldate = {2023-08-19}, + abstract = {We propose a new score-based approach to generate 3D molecules represented as atomic densities on regular grids. First, we train a denoising neural network that learns to map from a smooth distribution of noisy molecules to the distribution of real molecules. Then, we follow the neural empirical Bayes framework [Saremi and Hyvarinen, 2019] and generate molecules in two steps: (i) sample noisy density grids from a smooth distribution via underdamped Langevin Markov chain Monte Carlo, and (ii) recover the ``clean'' molecule by denoising the noisy grid with a single step. Our method, VoxMol, generates molecules in a fundamentally different way than the current state of the art (i.e., diffusion models applied to atom point clouds). It differs in terms of the data representation, the noise model, the network architecture and the generative modeling algorithm. VoxMol achieves comparable results to state of the art on unconditional 3D molecule generation while being simpler to train and faster to generate molecules.}, + pubstate = {preprint}, + keywords = {todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Pinheiro et al_2023_3D molecule generation by denoising voxel grids.pdf;/Users/wasmer/Zotero/storage/6CKBVABI/2306.html} +} + @article{pinheiroChoosingRightMolecular2021, title = {Choosing the Right Molecular Machine Learning Potential}, author = {Pinheiro, Max and Ge, Fuchun and Ferré, Nicolas and Dral, Pavlo O. and Barbatti, Mario}, @@ -8744,6 +11755,42 @@ file = {/Users/wasmer/Nextcloud/Zotero/Pinheiro et al_2021_Choosing the right molecular machine learning potential.pdf} } +@article{piraudProvidingAIExpertise2023, + title = {Providing {{AI}} Expertise as an Infrastructure in Academia}, + author = {Piraud, Marie and Camero, Andrés and Götz, Markus and Kesselheim, Stefan and Steinbach, Peter and Weigel, Tobias}, + date = {2023-08-11}, + journaltitle = {Patterns}, + shortjournal = {Patterns}, + volume = {4}, + number = {8}, + pages = {100819}, + issn = {2666-3899}, + doi = {10.1016/j.patter.2023.100819}, + url = {https://www.sciencedirect.com/science/article/pii/S2666389923001885}, + urldate = {2023-11-11}, + abstract = {Artificial intelligence (AI) is proliferating and developing faster than any domain scientist can adapt. To support the scientific enterprise in the Helmholtz association, a network of AI specialists has been set up to disseminate AI expertise as an infrastructure among domain scientists. As this effort exposes an evolutionary step in science organization in Germany, this article aspires to describe our setup, goals, and motivations. We comment on past experiences, current developments, and future ideas as we bring our expertise as an infrastructure closer to scientists across our organization. We hope that this offers a brief yet insightful view of our activities as well as inspiration for other science organizations.}, + keywords = {AI4Science,FZJ,Helmholtz,Helmholtz AI,ML}, + file = {/Users/wasmer/Nextcloud/Zotero/Piraud et al_2023_Providing AI expertise as an infrastructure in academia.pdf;/Users/wasmer/Zotero/storage/3P8IDCHI/S2666389923001885.html} +} + +@article{pleForceFieldEnhancedNeuralNetwork2023, + title = {Force-{{Field-Enhanced Neural Network Interactions}}: From {{Local Equivariant Embedding}} to {{Atom-in-Molecule}} Properties and Long-Range Effects}, + shorttitle = {Force-{{Field-Enhanced Neural Network Interactions}}}, + author = {Plé, Thomas and Lagardère, Louis and Piquemal, Jean-Philip}, + date = {2023-10-03}, + journaltitle = {Chemical Science}, + shortjournal = {Chem. Sci.}, + publisher = {{The Royal Society of Chemistry}}, + issn = {2041-6539}, + doi = {10.1039/D3SC02581K}, + url = {https://pubs.rsc.org/en/content/articlelanding/2023/sc/d3sc02581k}, + urldate = {2023-10-05}, + abstract = {We introduce FENNIX (Force-Field-Enhanced Neural Network InteraXions), a hybrid approach between machine-learning and force-fields. We leverage state-of-the-art equivariant neural networks to predict local energy contributions and multiple atom-in-molecule properties that are then used as geometry-dependent parameters for physically-motivated energy terms which account for long-range electrostatics and dispersion. Using high-accuracy \textbackslash textit\{ab initio\} data (small organic molecules/dimers), we trained a first version of the model. Exhibiting accurate gas-phase energy predictions, FENNIX is transferable to the condensed phase. It is able to produce stable Molecular Dynamics simulations, including nuclear quantum effects, for water predicting accurate liquid properties. The extrapolating power of the hybrid physically-driven machine learning FENNIX approach is exemplified by computing: i) the solvated alanine dipeptide free energy landscape; ii) the reactive dissociation of small molecules.}, + langid = {english}, + keywords = {/unread,AML,biomolecules,chemical reaction,chemistry,equivariant,library,long-range interaction,ML,ML-FF,MLP,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Plé et al_2023_Force-Field-Enhanced Neural Network Interactions.pdf;/Users/wasmer/Zotero/storage/APBQ8PQN/Plé et al. - 2023 - Force-Field-Enhanced Neural Network Interactions .pdf} +} + @online{podryabinkinMLIP3ActiveLearning2023, title = {{{MLIP-3}}: {{Active}} Learning on Atomic Environments with {{Moment Tensor Potentials}}}, shorttitle = {{{MLIP-3}}}, @@ -8902,7 +11949,7 @@ urldate = {2023-06-01}, abstract = {Point clouds are versatile representations of 3D objects and have found widespread application in science and engineering. Many successful deep-learning models have been proposed that use them as input. Some application domains require incorporating exactly physical constraints, including chemical and materials modeling which we focus on in this paper. These constraints include smoothness, and symmetry with respect to translations, rotations, and permutations of identical particles. Most existing architectures in other domains do not fulfill simultaneously all of these requirements and thus are not applicable to atomic-scale simulations. Many of them, however, can be straightforwardly made to incorporate all the physical constraints except for rotational symmetry. We propose a general symmetrization protocol that adds rotational equivariance to any given model while preserving all the other constraints. As a demonstration of the potential of this idea, we introduce the Point Edge Transformer (PET) architecture, which is not intrinsically equivariant but achieves state-of-the-art performance on several benchmark datasets of molecules and solids. A-posteriori application of our general protocol makes PET exactly equivariant, with minimal changes to its accuracy. By alleviating the need to explicitly incorporate rotational symmetry within the model, our method bridges the gap between the approaches used in different communities, and simplifies the design of deep-learning schemes for chemical and materials modeling.}, pubstate = {preprint}, - keywords = {\_tablet,AML,equivariant,equivariant alternative,GNN,ML,MPNN,point cloud data,rotational symmetry,simplification,SO(3),symmetrization,transformer,with-code}, + keywords = {\_tablet,alternative approaches,alternative to GNN,AML,approximative equivariance,chemical species scaling problem,collinear,descriptors,equivariant,equivariant alternative,GNN,MACE,ML,MPNN,NequIP,point cloud data,representation learning,rotational symmetry,simplification,SO(3),spin-dependent,spin-polarized,symmetrization,symmetry,transformer,with-code}, file = {/Users/wasmer/Nextcloud/Zotero/Pozdnyakov_Ceriotti_2023_Smooth, exact rotational symmetrization for deep learning on point clouds.pdf;/Users/wasmer/Zotero/storage/W32HXDSQ/2305.html} } @@ -8950,7 +11997,7 @@ urldate = {2023-01-20}, abstract = {We present an atomic cluster expansion (ACE) for carbon that improves over available classical and machine learning potentials. The ACE is parameterized from an exhaustive set of important carbon structures at extended volume and energy range, computed using density functional theory (DFT). Rigorous validation reveals that ACE predicts accurately a broad range of properties of both crystalline and amorphous carbon phases while being several orders of magnitude more computationally efficient than available machine learning models. We demonstrate the predictive power of ACE on three distinct applications, brittle crack propagation in diamond, evolution of amorphous carbon structures at different densities and quench rates and nucleation and growth of fullerene clusters under high pressure and temperature conditions.}, pubstate = {preprint}, - keywords = {/unread,ACE,carbon,descriptors,MLP,molecular dynamics}, + keywords = {ACE,carbon,descriptors,MLP,molecular dynamics}, file = {/Users/wasmer/Nextcloud/Zotero/Qamar et al_2022_Atomic cluster expansion for quantum-accurate large-scale simulations of carbon.pdf;/Users/wasmer/Zotero/storage/SCVIRYIV/2210.html} } @@ -8963,6 +12010,23 @@ file = {/Users/wasmer/Nextcloud/Zotero/Quantum Theory of Magnetism.pdf;/Users/wasmer/Zotero/storage/ULV44ULF/978-3-540-85416-6.html} } +@book{quMachineLearningMolecular2023, + title = {Machine {{Learning}} in {{Molecular Sciences}}}, + editor = {Qu, Chen and Liu, Hanchao}, + date = {2023}, + series = {Challenges and {{Advances}} in {{Computational Chemistry}} and {{Physics}}}, + volume = {36}, + publisher = {{Springer International Publishing}}, + location = {{Cham}}, + doi = {10.1007/978-3-031-37196-7}, + url = {https://link.springer.com/10.1007/978-3-031-37196-7}, + urldate = {2023-10-06}, + isbn = {978-3-031-37195-0 978-3-031-37196-7}, + langid = {english}, + keywords = {\_tablet,AML,biomolecules,chemistry,database generation,educational,GNN,learning material,ML,ML-DFA,ML-DFT,ML-ESM,MLP,organic chemistry,review-of-AML,textbook}, + file = {/Users/wasmer/Nextcloud/Zotero/Qu_Liu_2023_Machine Learning in Molecular Sciences.pdf} +} + @online{rackersCrackingQuantumScaling2022, title = {Cracking the {{Quantum Scaling Limit}} with {{Machine Learned Electron Densities}}}, author = {Rackers, Joshua A. and Tecot, Lucas and Geiger, Mario and Smidt, Tess E.}, @@ -9029,6 +12093,55 @@ file = {/Users/wasmer/Nextcloud/Zotero/Rader et al_2021_Topological Insulators2.pdf;/Users/wasmer/Zotero/storage/CXY5KUXP/pssb.html} } +@online{radhakrishnanMechanismFeatureLearning2023, + title = {Mechanism of Feature Learning in Deep Fully Connected Networks and Kernel Machines That Recursively Learn Features}, + author = {Radhakrishnan, Adityanarayanan and Beaglehole, Daniel and Pandit, Parthe and Belkin, Mikhail}, + date = {2023-05-09}, + eprint = {2212.13881}, + eprinttype = {arxiv}, + eprintclass = {cs, stat}, + doi = {10.48550/arXiv.2212.13881}, + url = {http://arxiv.org/abs/2212.13881}, + urldate = {2023-09-18}, + abstract = {In recent years neural networks have achieved impressive results on many technological and scientific tasks. Yet, the mechanism through which these models automatically select features, or patterns in data, for prediction remains unclear. Identifying such a mechanism is key to advancing performance and interpretability of neural networks and promoting reliable adoption of these models in scientific applications. In this paper, we identify and characterize the mechanism through which deep fully connected neural networks learn features. We posit the Deep Neural Feature Ansatz, which states that neural feature learning occurs by implementing the average gradient outer product to up-weight features strongly related to model output. Our ansatz sheds light on various deep learning phenomena including emergence of spurious features and simplicity biases and how pruning networks can increase performance, the "lottery ticket hypothesis." Moreover, the mechanism identified in our work leads to a backpropagation-free method for feature learning with any machine learning model. To demonstrate the effectiveness of this feature learning mechanism, we use it to enable feature learning in classical, non-feature learning models known as kernel machines and show that the resulting models, which we refer to as Recursive Feature Machines, achieve state-of-the-art performance on tabular data.}, + pubstate = {preprint}, + keywords = {Deep learning,General ML,kernel methods,ML,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Radhakrishnan et al_2023_Mechanism of feature learning in deep fully connected networks and kernel.pdf;/Users/wasmer/Zotero/storage/XMIF5REM/2212.html} +} + +@online{radhakrishnanTransferLearningKernel2022, + title = {Transfer {{Learning}} with {{Kernel Methods}}}, + author = {Radhakrishnan, Adityanarayanan and Luyten, Max Ruiz and Prasad, Neha and Uhler, Caroline}, + date = {2022-10-31}, + eprint = {2211.00227}, + eprinttype = {arxiv}, + eprintclass = {cs}, + doi = {10.48550/arXiv.2211.00227}, + url = {http://arxiv.org/abs/2211.00227}, + urldate = {2023-09-18}, + abstract = {Transfer learning refers to the process of adapting a model trained on a source task to a target task. While kernel methods are conceptually and computationally simple machine learning models that are competitive on a variety of tasks, it has been unclear how to perform transfer learning for kernel methods. In this work, we propose a transfer learning framework for kernel methods by projecting and translating the source model to the target task. We demonstrate the effectiveness of our framework in applications to image classification and virtual drug screening. In particular, we show that transferring modern kernels trained on large-scale image datasets can result in substantial performance increase as compared to using the same kernel trained directly on the target task. In addition, we show that transfer-learned kernels allow a more accurate prediction of the effect of drugs on cancer cell lines. For both applications, we identify simple scaling laws that characterize the performance of transfer-learned kernels as a function of the number of target examples. We explain this phenomenon in a simplified linear setting, where we are able to derive the exact scaling laws. By providing a simple and effective transfer learning framework for kernel methods, our work enables kernel methods trained on large datasets to be easily adapted to a variety of downstream target tasks.}, + pubstate = {preprint}, + keywords = {Computer Science - Machine Learning,Deep learning,General ML,kernel methods,ML,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Radhakrishnan et al_2022_Transfer Learning with Kernel Methods.pdf;/Users/wasmer/Zotero/storage/WGXWDYIS/2211.html} +} + +@online{raissiPhysicsInformedDeep2017, + title = {Physics {{Informed Deep Learning}} ({{Part I}}): {{Data-driven Solutions}} of {{Nonlinear Partial Differential Equations}}}, + shorttitle = {Physics {{Informed Deep Learning}} ({{Part I}})}, + author = {Raissi, Maziar and Perdikaris, Paris and Karniadakis, George Em}, + date = {2017-11-28}, + eprint = {1711.10561}, + eprinttype = {arxiv}, + eprintclass = {cs, math, stat}, + doi = {10.48550/arXiv.1711.10561}, + url = {http://arxiv.org/abs/1711.10561}, + urldate = {2023-11-12}, + abstract = {We introduce physics informed neural networks -- neural networks that are trained to solve supervised learning tasks while respecting any given law of physics described by general nonlinear partial differential equations. In this two part treatise, we present our developments in the context of solving two main classes of problems: data-driven solution and data-driven discovery of partial differential equations. Depending on the nature and arrangement of the available data, we devise two distinct classes of algorithms, namely continuous time and discrete time models. The resulting neural networks form a new class of data-efficient universal function approximators that naturally encode any underlying physical laws as prior information. In this first part, we demonstrate how these networks can be used to infer solutions to partial differential equations, and obtain physics-informed surrogate models that are fully differentiable with respect to all input coordinates and free parameters.}, + pubstate = {preprint}, + keywords = {/unread,CFD,dynamical systems,dynamics,nonlinear dynamics,original publication,PDE,physics-informed ML,PINN,Python,rec-by-bluegel,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Raissi et al_2017_Physics Informed Deep Learning (Part I).pdf;/Users/wasmer/Zotero/storage/8E8HZ9R4/1711.html} +} + @article{raissiPhysicsinformedNeuralNetworks2019, title = {Physics-Informed Neural Networks: {{A}} Deep Learning Framework for Solving Forward and Inverse Problems Involving Nonlinear Partial Differential Equations}, shorttitle = {Physics-Informed Neural Networks}, @@ -9044,7 +12157,7 @@ urldate = {2022-03-23}, abstract = {We introduce physics-informed neural networks – neural networks that are trained to solve supervised learning tasks while respecting any given laws of physics described by general nonlinear partial differential equations. In this work, we present our developments in the context of solving two main classes of problems: data-driven solution and data-driven discovery of partial differential equations. Depending on the nature and arrangement of the available data, we devise two distinct types of algorithms, namely continuous time and discrete time models. The first type of models forms a new family of data-efficient spatio-temporal function approximators, while the latter type allows the use of arbitrarily accurate implicit Runge–Kutta time stepping schemes with unlimited number of stages. The effectiveness of the proposed framework is demonstrated through a collection of classical problems in fluids, quantum mechanics, reaction–diffusion systems, and the propagation of nonlinear shallow-water waves.}, langid = {english}, - keywords = {Nonlinear dynamics,original publication,PINN,Python,rec-by-bluegel,Runge–Kutta methods,with-code}, + keywords = {CFD,dynamical systems,dynamics,nonlinear dynamics,original publication,PDE,physics-informed ML,PINN,Python,rec-by-bluegel,with-code}, file = {/Users/wasmer/Nextcloud/Zotero/Raissi et al_2019_Physics-informed neural networks.pdf;/Users/wasmer/Zotero/storage/YR4ICSGZ/S0021999118307125.html} } @@ -9138,6 +12251,34 @@ file = {/Users/wasmer/Nextcloud/Zotero/Ren et al_2022_Ligand Optimization of Exchange Interaction in Co(II) Dimer Single Molecule.pdf;/Users/wasmer/Zotero/storage/NZ36VI4U/acs.jpca.html} } +@online{riebesellMatbenchDiscoveryCan2023, + title = {Matbench {{Discovery}} - {{Can}} Machine Learning Identify Stable Crystals?}, + author = {Riebesell, Janosh and Goodall, Rhys E. A. and Jain, Anubhav and Benner, Philipp and Persson, Kristin A. and Lee, Alpha A.}, + date = {2023-06-20}, + url = {https://janosh.github.io/matbench-discovery}, + urldate = {2023-08-29}, + abstract = {We present a new machine learning (ML) evaluation framework for materials stability predictions named Matbench Discovery. Our task closely simulates the deployment of ML energy models in a high-throughput search for stable inorganic crystals. It is accompanied by an interactive leaderboard and a Python package for easy ingestion of our training/test sets into future model submissions. To answer the question which ML methodology performs best at materials discovery, we explore a wide variety of models. Our initial selection ranges from random forests to GNNs, from one-shot predictors to iterative Bayesian optimizers and universal interatomic potentials (UIP) that closely emulate DFT. We find UIPs to be in a class of their own, achieving the highest F1 scores and discovery acceleration factors (DAF) of more than 3, i.e. 3x more stable structures found compared to dummy selection in our already enriched search space. We also identify a sharp disconnect between commonly used regression metrics and more task-relevant classification metrics. CGCNN and MEGNet are worse than dummy regressors, but substantially better than dummy classifiers, suggesting that the field overemphasizes the wrong performance indicators. Our results highlight the need to optimize metrics that measure true stability hit rate improvements and provide valuable insights for maintainers of high throughput materials databases by demonstrating that these models have matured enough to play a vital role as pre-filtering steps to effectively allocate compute budget for DFT relaxations.}, + langid = {english}, + keywords = {AML,Bayesian optimization,benchmark dataset,benchmarking,CGCNN,CHGNet,convex hull,Database,GNN,inorganic materials,library,M3GNet,MatBench,materials,materials project,MEGNet,ML,MLP,platform,todo-tagging,universal potential,voronoi descriptor,with-code,with-data}, + file = {/Users/wasmer/Zotero/storage/Z9GXZ7NV/preprint.html} +} + +@inproceedings{riedelEnablingHyperparameterTuningAI2023, + title = {Enabling {{Hyperparameter-Tuning}} of {{AI Models}} for {{Healthcare}} Using the {{CoE RAISE Unique AI Framework}} for {{HPC}}}, + booktitle = {2023 46th {{MIPRO ICT}} and {{Electronics Convention}} ({{MIPRO}})}, + author = {Riedel, M. and Barakat, C. and Fritsch, S. and Aach, M. and Busch, J. and Lintermann, A. and Schuppert, A. and Brynjólfsson, S. and Neukirchen, H. and Book, M.}, + date = {2023-05}, + pages = {435--440}, + issn = {2623-8764}, + doi = {10.23919/MIPRO57284.2023.10159755}, + url = {https://ieeexplore.ieee.org/abstract/document/10159755}, + urldate = {2023-11-11}, + abstract = {The European Center of Excellence in Exascale Computing “Research on AI- and Simulation-Based Engineering at Exascale†(CoE RAISE) is a project funded by the European Commission. One of its central goals is to develop a Unique AI Framework (UAIF) that simplifies the development of AI models on cutting-edge supercomputers. However, those supercomputers’ High-Performance Computing (HPC) environments require the knowledge of many low-level modules that all need to work together in different software versions (e.g., TensorFlow, Python, NCCL, PyTorch) and various concrete supercomputer hardware deployments (e.g., JUWELS, JURECA, DEEP, JUPITER and other EuroHPC Joint Undertaking HPC resources). This paper will describe our analyzed complex challenges for AI researchers using those environments and explain how to overcome them using the UAIF. In addition, it will show the benefits of using the UAIF hypertuning capability to make AI models better (i.e., better parameters) and faster by using HPC. Also, to demonstrate that the UAIF approach is indeed simple, we describe the adoption of selected UAIF building blocks by healthcare applications. The examples include AI models for the Acute Respiratory Distress Syndrome (ARDS). Finally, we highlight other AI models of use cases that co-designed the UAIF.}, + eventtitle = {2023 46th {{MIPRO ICT}} and {{Electronics Convention}} ({{MIPRO}})}, + keywords = {AI4Science,artificial intelligence,CEA,CFD,CoE,containerization,distributed computing,EuroHPC,FZJ,Horovod,HPC,HPC software,hybrid AI/simulation,JSC,library,ML,Optuna,Parallel computing,PyTorch,RSE,simulation,software framework,software infrastructure,supercomputing,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Riedel et al_2023_Enabling Hyperparameter-Tuning of AI Models for Healthcare using the CoE RAISE2.pdf;/Users/wasmer/Zotero/storage/EYG2J5NW/10159755.html} +} + @online{rinaldiNoncollinearMagneticAtomic2023, title = {Non-Collinear {{Magnetic Atomic Cluster Expansion}} for {{Iron}}}, author = {Rinaldi, Matteo and Mrovec, Matous and Bochkarev, Anton and Lysogorskiy, Yury and Drautz, Ralf}, @@ -9147,7 +12288,7 @@ abstract = {The Atomic Cluster Expansion (ACE) provides a formally complete basis for the local atomic environment. ACE is not limited to representing energies as a function of atomic positions and chemical species, but can be generalized to vectorial or tensorial properties and to incorporate further degrees of freedom (DOF). This is crucial for magnetic materials with potential energy surfaces that depend on atomic positions and atomic magnetic moments simultaneously. In this work, we employ the ACE formalism to develop a non-collinear magnetic ACE parametrization for the prototypical magnetic element Fe. The model is trained on a broad range of collinear and non-collinear magnetic structures calculated using spin density functional theory. We demonstrate that the non-collinear magnetic ACE is able to reproduce not only ground state properties of various magnetic phases of Fe but also the magnetic and lattice excitations that are essential for a correct description of the finite temperature behavior and properties of crystal defects.}, langid = {english}, organization = {{arXiv.org}}, - keywords = {/unread}, + keywords = {todo-tagging}, file = {/Users/wasmer/Nextcloud/Zotero/Rinaldi et al_2023_Non-collinear Magnetic Atomic Cluster Expansion for Iron.pdf} } @@ -9249,6 +12390,77 @@ file = {/Users/wasmer/Nextcloud/Zotero/Rønne et al_2022_Atomistic structure search using local surrogate model.pdf} } +@online{rossignolMachineLearningAssistedConstructionTernary2023, + title = {Machine-{{Learning-Assisted Construction}} of {{Ternary Convex Hull Diagrams}}}, + author = {Rossignol, Hugo and Minotakis, Michail and Cobelli, Matteo and Sanvito, Stefano}, + date = {2023-08-30}, + eprint = {2308.15907}, + eprinttype = {arxiv}, + eprintclass = {cond-mat}, + doi = {10.48550/arXiv.2308.15907}, + url = {http://arxiv.org/abs/2308.15907}, + urldate = {2023-09-22}, + abstract = {In the search for novel intermetallic ternary alloys, much of the effort goes into performing a large number of ab-initio calculations covering a wide range of compositions and structures. These are essential to build a reliable convex hull diagram. While density functional theory (DFT) provides accurate predictions for many systems, its computational overheads set a throughput limit on the number of hypothetical phases that can be probed. Here, we demonstrate how an ensemble of machine-learning spectral neighbor-analysis potentials (SNAPs) can be integrated into a workflow for the construction of accurate ternary convex hull diagrams, highlighting regions fertile for materials discovery. Our workflow relies on using available binary-alloy data both to train the SNAP models and to create prototypes for ternary phases. From the prototype structures, all unique ternary decorations are created and used to form a pool of candidate compounds. The SNAPs are then used to pre-relax the structures and screen the most favourable prototypes, before using DFT to build the final phase diagram. As constructed, the proposed workflow relies on no extra first-principles data to train the machine-learning surrogate model and yields a DFT-level accurate convex hull. We demonstrate its efficacy by investigating the Cu-Ag-Au and Mo-Ta-W ternary systems.}, + pubstate = {preprint}, + keywords = {/unread,Condensed Matter - Materials Science}, + file = {/Users/wasmer/Zotero/storage/JDTKMCIM/Rossignol et al. - 2023 - Machine-Learning-Assisted Construction of Ternary .pdf;/Users/wasmer/Zotero/storage/2Z9WD4PH/2308.html} +} + +@article{rossLargescaleChemicalLanguage2022, + title = {Large-Scale Chemical Language Representations Capture Molecular Structure and Properties}, + author = {Ross, Jerret and Belgodere, Brian and Chenthamarakshan, Vijil and Padhi, Inkit and Mroueh, Youssef and Das, Payel}, + date = {2022-12}, + journaltitle = {Nature Machine Intelligence}, + shortjournal = {Nat Mach Intell}, + volume = {4}, + number = {12}, + pages = {1256--1264}, + publisher = {{Nature Publishing Group}}, + issn = {2522-5839}, + doi = {10.1038/s42256-022-00580-7}, + url = {https://www.nature.com/articles/s42256-022-00580-7}, + urldate = {2023-10-08}, + abstract = {Models based on machine learning can enable accurate and fast molecular property predictions, which is of interest in drug discovery and material design. Various supervised machine learning models have demonstrated promising performance, but the vast chemical space and the limited availability of property labels make supervised learning challenging. Recently, unsupervised transformer-based language models pretrained on a large unlabelled corpus have produced state-of-the-art results in many downstream natural language processing tasks. Inspired by this development, we present molecular embeddings obtained by training an efficient transformer encoder model, MoLFormer, which uses rotary positional embeddings. This model employs a linear attention mechanism, coupled with highly distributed training, on SMILES sequences of 1.1 billion unlabelled molecules from the PubChem and ZINC datasets. We show that the learned molecular representation outperforms existing baselines, including supervised and self-supervised graph neural networks and language models, on several downstream tasks from ten benchmark datasets. They perform competitively on two others. Further analyses, specifically through the lens of attention, demonstrate that MoLFormer trained on chemical SMILES indeed learns the spatial relationships between atoms within a molecule. These results provide encouraging evidence that large-scale molecular language models can capture sufficient chemical and structural information to predict various distinct molecular properties, including quantum-chemical properties.}, + issue = {12}, + langid = {english}, + keywords = {/unread,alternative approaches,AML,attention,benchmarking,chemistry,fine-tuning,IBM,large dataset,library,linear attention,LLM,masked language model,ML,MoLFormer,multi-target prediction,organic chemistry,pretrained models,RoPE,rotary embedding,smal organic molecules,transformer,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Ross et al_2022_Large-scale chemical language representations capture molecular structure and.pdf} +} + +@online{ruheGeometricCliffordAlgebra2023, + title = {Geometric {{Clifford Algebra Networks}}}, + author = {Ruhe, David and Gupta, Jayesh K. and family=Keninck, given=Steven, prefix=de, useprefix=true and Welling, Max and Brandstetter, Johannes}, + date = {2023-05-29}, + eprint = {2302.06594}, + eprinttype = {arxiv}, + eprintclass = {cs}, + doi = {10.48550/arXiv.2302.06594}, + url = {http://arxiv.org/abs/2302.06594}, + urldate = {2023-08-22}, + abstract = {We propose Geometric Clifford Algebra Networks (GCANs) for modeling dynamical systems. GCANs are based on symmetry group transformations using geometric (Clifford) algebras. We first review the quintessence of modern (plane-based) geometric algebra, which builds on isometries encoded as elements of the \$\textbackslash mathrm\{Pin\}(p,q,r)\$ group. We then propose the concept of group action layers, which linearly combine object transformations using pre-specified group actions. Together with a new activation and normalization scheme, these layers serve as adjustable \$\textbackslash textit\{geometric templates\}\$ that can be refined via gradient descent. Theoretical advantages are strongly reflected in the modeling of three-dimensional rigid body transformations as well as large-scale fluid dynamics simulations, showing significantly improved performance over traditional methods.}, + pubstate = {preprint}, + keywords = {/unread,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Ruhe et al_2023_Geometric Clifford Algebra Networks.pdf;/Users/wasmer/Zotero/storage/66MRFXSJ/2302.html} +} + +@article{rungeDensityFunctionalTheoryTimeDependent1984, + title = {Density-{{Functional Theory}} for {{Time-Dependent Systems}}}, + author = {Runge, Erich and Gross, E. K. U.}, + date = {1984-03-19}, + journaltitle = {Physical Review Letters}, + shortjournal = {Phys. Rev. Lett.}, + volume = {52}, + number = {12}, + pages = {997--1000}, + publisher = {{American Physical Society}}, + doi = {10.1103/PhysRevLett.52.997}, + url = {https://link.aps.org/doi/10.1103/PhysRevLett.52.997}, + urldate = {2023-09-21}, + abstract = {A density-functional formalism comparable to the Hohenberg-Kohn-Sham theory of the ground state is developed for arbitrary time-dependent systems. It is proven that the single-particle potential v(→rt) leading to a given v-representable density n(→rt) is uniquely determined so that the corresponding map v→n is invertible. On the basis of this theorem, three schemes are derived to calculate the density: a set of hydrodynamical equations, a stationary action principle, and an effective single-particle Schrödinger equation.}, + keywords = {/unread,DFT theory,original publication,TDDFT}, + file = {/Users/wasmer/Zotero/storage/UMXX5WSX/Runge and Gross - 1984 - Density-Functional Theory for Time-Dependent Syste.pdf;/Users/wasmer/Zotero/storage/7869ER2S/PhysRevLett.52.html} +} + @article{ruppFastAccurateModeling2012, title = {Fast and {{Accurate Modeling}} of {{Molecular Atomization Energies}} with {{Machine Learning}}}, author = {Rupp, Matthias and Tkatchenko, Alexandre and Müller, Klaus-Robert and family=Lilienfeld, given=O. Anatole, prefix=von, useprefix=true}, @@ -9335,6 +12547,32 @@ file = {/Users/wasmer/Nextcloud/Zotero/Rüßmann et al_2022_The AiiDA-Spirit Plugin for Automated Spin-Dynamics Simulations and Multi-Scale.pdf} } +@online{russmannDensityfunctionalDescriptionMaterials2023, + title = {Density-Functional Description of Materials for Topological Qubits and Superconducting Spintronics}, + author = {Rüßmann, Philipp and Silva, David Antognini and Hemmati, Mohammad and Klepetsanis, Ilias and Trauzettel, Björn and Mavropoulos, Phivos and Blügel, Stefan}, + date = {2023-08-14}, + eprint = {2308.07383}, + eprinttype = {arxiv}, + eprintclass = {cond-mat}, + doi = {10.48550/arXiv.2308.07383}, + url = {http://arxiv.org/abs/2308.07383}, + urldate = {2023-08-19}, + abstract = {Interfacing superconductors with magnetic or topological materials offers a playground where novel phenomena like topological superconductivity, Majorana zero modes, or superconducting spintronics are emerging. In this work, we discuss recent developments in the Kohn-Sham Bogoliubov-de Gennes method, which allows to perform material-specific simulations of complex superconducting heterostructures on the basis of density functional theory. As a model system we study magnetically-doped Pb. In our analysis we focus on the interplay of magnetism and superconductivity. This combination leads to Yu-Shiba-Rusinov (YSR) in-gap bound states at magnetic defects and the breakdown of superconductivity at larger impurity concentrations. Moreover, the influence of spin-orbit coupling and on orbital splitting of YSR states as well as the appearance of a triplet component in the order parameter is discussed. These effects can be exploited in S/F/S-type devices (S=superconductor, F=ferromagnet) in the field of superconducting spintronics.}, + pubstate = {preprint}, + keywords = {BdG,CPA,defects,DFT,FZJ,impurity embedding,JuKKR,KKR,KS-BdG,MZM,PGI,PGI-1/IAS-1,physics,quantum materials,SOC,spintronics,superconducting spitronics,superconductor,Topological Superconductor,Yu-Shiba-Rusinov}, + file = {/Users/wasmer/Nextcloud/Zotero/Rüßmann et al_2023_Density-functional description of materials for topological qubits and.pdf;/Users/wasmer/Zotero/storage/5NLHWP9G/2308.html} +} + +@unpublished{russmannDensityfunctionalDescriptionMaterials2023a, + title = {Density-Functional Description of Materials for Topological Qubits and Superconducting Spintronics}, + author = {Rüßmann, Philipp}, + date = {2023-08-09}, + url = {https://www.fz-juelich.de/en/pgi/pgi-1/expertise/spintronics-and-quantum-transformation-spin-qx-2023}, + eventtitle = {Spintronics and {{Quantum Transformation}} ({{Spin-QX}} 2023)}, + venue = {{Forschungszentrum Jülich}}, + keywords = {BdG,CPA,defects,DFT,FZJ,impurity embedding,JuKKR,KKR,KS-BdG,MZM,PGI,PGI-1/IAS-1,physics,quantum materials,SOC,spintronics,superconducting spitronics,superconductor,Topological Superconductor,Yu-Shiba-Rusinov} +} + @article{russmannInitioTheoryFourierTransformed2021, title = {Ab {{Initio Theory}} of {{Fourier-Transformed Quasiparticle Interference Maps}} and {{Application}} to the {{Topological Insulator Bi2Te3}}}, author = {Rüßmann, Philipp and Mavropoulos, Phivos and Blügel, Stefan}, @@ -9353,6 +12591,38 @@ file = {/Users/wasmer/Nextcloud/Zotero/Rüßmann et al_2021_Ab Initio Theory of Fourier-Transformed Quasiparticle Interference Maps and.pdf} } +@online{russmannInterorbitalCooperPairing2023, + title = {Inter-Orbital {{Cooper}} Pairing at Finite Energies in {{Rashba}} Surface States}, + author = {Rüßmann, Philipp and Bahari, Masoud and Blügel, Stefan and Trauzettel, Björn}, + date = {2023-07-26}, + eprint = {2307.13990}, + eprinttype = {arxiv}, + eprintclass = {cond-mat}, + doi = {10.48550/arXiv.2307.13990}, + url = {http://arxiv.org/abs/2307.13990}, + urldate = {2023-08-19}, + abstract = {Multi-band effects in hybrid structures provide a rich playground for unconventional superconductivity. We combine two complementary approaches based on density-functional theory (DFT) and effective low-energy model theory in order to investigate the proximity effect in a Rashba surface state in contact to an \$s\$-wave superconductor. We discuss these synergistic approaches and combine the effective model and DFT analysis at the example of a Au/Al heterostructure. This allows us to predict finite-energy superconducting pairing due to the interplay of the Rashba surface state of Au, and hybridization with the electronic structure of superconducting Al. We investigate the nature of the induced superconducting pairing and quantify its mixed singlet-triplet character. Our findings demonstrate general recipes to explore real material systems that exhibit inter-orbital pairing away from the Fermi energy.}, + pubstate = {preprint}, + keywords = {/unread,BdG,DFT,FZJ,heterostructures,juKKR,KKR,Mat4QIT,mesoscopic,MZM,PGI,PGI-1/IAS-1,physics,quantum materials,Rashba effect,S-wave Superconductors,SOC,superconductor}, + file = {/Users/wasmer/Nextcloud/Zotero/Rüßmann et al_2023_Inter-orbital Cooper pairing at finite energies in Rashba surface states.pdf;/Users/wasmer/Zotero/storage/THRDSW2F/2307.html} +} + +@online{russmannProximityInducedSuperconductivity2022, + title = {Proximity Induced Superconductivity in a Topological Insulator}, + author = {Rüßmann, Philipp and Blügel, Stefan}, + date = {2022-08-30}, + eprint = {2208.14289}, + eprinttype = {arxiv}, + eprintclass = {cond-mat}, + doi = {10.48550/arXiv.2208.14289}, + url = {http://arxiv.org/abs/2208.14289}, + urldate = {2023-08-19}, + abstract = {Interfacing a topological insulator (TI) with an \$s\$-wave superconductor (SC) is a promising material platform that offers the possibility to realize a topological superconductor through which Majorana-based topologically protected qubits can be engineered. In our computational study of the prototypical SC/TI interface between Nb and Bi\$\_2\$Te\$\_3\$, we identify the benefits and possible bottlenecks of this potential Majorana material platform. Bringing Nb in contact with the TI film induces charge doping from the SC to the TI, which shifts the Fermi level into the TI conduction band. For thick TI films, this results in band bending leading to the population of trivial TI quantum-well states at the interface. In the superconducting state, we uncover that the topological surface state experiences a sizable superconducting gap-opening at the SC/TI interface, which is furthermore robust against fluctuations of the Fermi energy. We also show that the trivial interface state is only marginally proximitized, potentially obstructing the realization of Majorana-based qubits in this material platform.}, + pubstate = {preprint}, + keywords = {/unread,PGI-1/IAS-1,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Rüßmann_Blügel_2022_Proximity induced superconductivity in a topological insulator.pdf;/Users/wasmer/Zotero/storage/5Q45YH6R/2208.html} +} + @thesis{russmannSpinScatteringTopologically2018, title = {Spin Scattering of Topologically Protected Electrons at Defects}, author = {Rüßmann, Philipp}, @@ -9381,6 +12651,23 @@ file = {/Users/wasmer/Nextcloud/Zotero/Ryczko_2019_Deep learning and density-functional theory.pdf;/Users/wasmer/Zotero/storage/DYNSZ4CL/1811.html;/Users/wasmer/Zotero/storage/NJ67I8R7/PhysRevA.100.html} } +@article{saalMachineLearningMaterials2020, + title = {Machine {{Learning}} in {{Materials Discovery}}: {{Confirmed Predictions}} and {{Their Underlying Approaches}}}, + shorttitle = {Machine {{Learning}} in {{Materials Discovery}}}, + author = {Saal, James E. and Oliynyk, Anton O. and Meredig, Bryce}, + date = {2020}, + journaltitle = {Annual Review of Materials Research}, + volume = {50}, + number = {1}, + pages = {49--69}, + doi = {10.1146/annurev-matsci-090319-010954}, + url = {https://doi.org/10.1146/annurev-matsci-090319-010954}, + urldate = {2023-08-19}, + abstract = {The rapidly growing interest in machine learning (ML) for materials discovery has resulted in a large body of published work. However, only a small fraction of these publications includes confirmation of ML predictions, either via experiment or via physics-based simulations. In this review, we first identify the core components common to materials informatics discovery pipelines, such as training data, choice of ML algorithm, and measurement of model performance. Then we discuss some prominent examples of validated ML-driven materials discovery across a wide variety of materials classes, with special attention to methodological considerations and advances. Across these case studies, we identify several common themes, such as the use of domain knowledge to inform ML models.}, + keywords = {Citrine Informatics,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Saal et al_2020_Machine Learning in Materials Discovery.pdf} +} + @article{saalMaterialsDesignDiscovery2013, title = {Materials {{Design}} and {{Discovery}} with {{High-Throughput Density Functional Theory}}: {{The Open Quantum Materials Database}} ({{OQMD}})}, shorttitle = {Materials {{Design}} and {{Discovery}} with {{High-Throughput Density Functional Theory}}}, @@ -9400,6 +12687,26 @@ file = {/Users/wasmer/Nextcloud/Zotero/Saal et al_2013_Materials Design and Discovery with High-Throughput Density Functional Theory.pdf} } +@article{saalMaterialsDesignDiscovery2013a, + title = {Materials {{Design}} and {{Discovery}} with {{High-Throughput Density Functional Theory}}: {{The Open Quantum Materials Database}} ({{OQMD}})}, + shorttitle = {Materials {{Design}} and {{Discovery}} with {{High-Throughput Density Functional Theory}}}, + author = {Saal, James E. and Kirklin, Scott and Aykol, Muratahan and Meredig, Bryce and Wolverton, C.}, + date = {2013-11-01}, + journaltitle = {JOM}, + shortjournal = {JOM}, + volume = {65}, + number = {11}, + pages = {1501--1509}, + issn = {1543-1851}, + doi = {10.1007/s11837-013-0755-4}, + url = {https://doi.org/10.1007/s11837-013-0755-4}, + urldate = {2023-08-19}, + abstract = {High-throughput density functional theory (HT DFT) is fast becoming a powerful tool for accelerating materials design and discovery by the amassing tens and even hundreds of thousands of DFT calculations in large databases. Complex materials problems can be approached much more efficiently and broadly through the sheer quantity of structures and chemistries available in such databases. Our HT DFT database, the Open Quantum Materials Database (OQMD), contains over 200,000 DFT calculated crystal structures and will be freely available for public use at http://oqmd.org. In this review, we describe the OQMD and its use in five materials problems, spanning a wide range of applications and materials types: (I) Li-air battery combination catalyst/electrodes, (II) Li-ion battery anodes, (III) Li-ion battery cathode coatings reactive with HF, (IV) Mg-alloy long-period stacking ordered (LPSO) strengthening precipitates, and (V) training a machine learning model to predict new stable ternary compounds.}, + langid = {english}, + keywords = {/unread,Citrine Informatics,DFT,electrochemistry,High-throughput,HT-DFT,Li-ion battery,materials database,materials discovery,OQMD,original publication,ternary systems,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Saal et al_2013_Materials Design and Discovery with High-Throughput Density Functional Theory2.pdf} +} + @article{sadeghiMetricsMeasuringDistances2013, title = {Metrics for Measuring Distances in Configuration Spaces}, author = {Sadeghi, Ali and Ghasemi, S. Alireza and Schaefer, Bastian and Mohr, Stephan and Lill, Markus A. and Goedecker, Stefan}, @@ -9435,6 +12742,27 @@ file = {/Users/wasmer/Nextcloud/Zotero/Salzbrenner et al_2023_Developments and Further Applications of Ephemeral Data Derived Potentials.pdf;/Users/wasmer/Zotero/storage/FAY5AQLM/2306.html} } +@article{samarakoonMachinelearningassistedInsightSpin2020, + title = {Machine-Learning-Assisted Insight into Spin Ice {{Dy2Ti2O7}}}, + author = {Samarakoon, Anjana M. and Barros, Kipton and Li, Ying Wai and Eisenbach, Markus and Zhang, Qiang and Ye, Feng and Sharma, V. and Dun, Z. L. and Zhou, Haidong and Grigera, Santiago A. and Batista, Cristian D. and Tennant, D. Alan}, + date = {2020-02-14}, + journaltitle = {Nature Communications}, + shortjournal = {Nat Commun}, + volume = {11}, + number = {1}, + pages = {892}, + publisher = {{Nature Publishing Group}}, + issn = {2041-1723}, + doi = {10.1038/s41467-020-14660-y}, + url = {https://www.nature.com/articles/s41467-020-14660-y}, + urldate = {2023-09-19}, + abstract = {Complex behavior poses challenges in extracting models from experiment. An example is spin liquid formation in frustrated magnets like Dy2Ti2O7. Understanding has been hindered by issues including disorder, glass formation, and interpretation of scattering data. Here, we use an automated capability to extract model Hamiltonians from data, and to identify different magnetic regimes. This involves training an autoencoder to learn a compressed representation of three-dimensional diffuse scattering, over a wide range of spin Hamiltonians. The autoencoder finds optimal matches according to scattering and heat capacity data and provides confidence intervals. Validation tests indicate that our optimal Hamiltonian accurately predicts temperature and field dependence of both magnetic structure and magnetization, as well as glass formation and irreversibility in Dy2Ti2O7. The autoencoder can also categorize different magnetic behaviors and eliminate background noise and artifacts in raw data. Our methodology is readily applicable to other materials and types of scattering problems.}, + issue = {1}, + langid = {english}, + keywords = {/unread,experimental,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Samarakoon et al_2020_Machine-learning-assisted insight into spin ice Dy2Ti2O7.pdf} +} + @unpublished{samuelMachineLearningPipelines2020, title = {Machine {{Learning Pipelines}}: {{Provenance}}, {{Reproducibility}} and {{FAIR Data Principles}}}, shorttitle = {Machine {{Learning Pipelines}}}, @@ -9502,10 +12830,42 @@ abstract = {This paper introduces a new model to learn graph neural networks equivariant to rotations, translations, reflections and permutations called E(n)-Equivariant Graph Neural Networks (EGNNs). In contrast with existing methods, our work does not require computationally expensive higher-order representations in intermediate layers while it still achieves competitive or better performance. In addition, whereas existing methods are limited to equivariance on 3 dimensional spaces, our model is easily scaled to higher-dimensional spaces. We demonstrate the effectiveness of our method on dynamical systems modelling, representation learning in graph autoencoders and predicting molecular properties.}, eventtitle = {International {{Conference}} on {{Machine Learning}}}, langid = {english}, - keywords = {EGNN,equivariant,GDL,GNN,ML,NN,original publication,rec-by-bluegel}, + keywords = {EGNN,equivariant,GDL,general ML,GNN,ML,NN,original publication,rec-by-bluegel}, file = {/Users/wasmer/Nextcloud/Zotero/Satorras et al_2021_E(n) Equivariant Graph Neural Networks2.pdf;/Users/wasmer/Zotero/storage/3ATM3ZJA/Satorras et al_2021_E(n) Equivariant Graph Neural Networks.pdf} } +@online{satorrasEquivariantGraphNeural2022, + title = {E(n) {{Equivariant Graph Neural Networks}}}, + author = {Satorras, Victor Garcia and Hoogeboom, Emiel and Welling, Max}, + date = {2022-02-16}, + eprint = {2102.09844}, + eprinttype = {arxiv}, + eprintclass = {cs, stat}, + doi = {10.48550/arXiv.2102.09844}, + url = {http://arxiv.org/abs/2102.09844}, + urldate = {2023-08-22}, + abstract = {This paper introduces a new model to learn graph neural networks equivariant to rotations, translations, reflections and permutations called E(n)-Equivariant Graph Neural Networks (EGNNs). In contrast with existing methods, our work does not require computationally expensive higher-order representations in intermediate layers while it still achieves competitive or better performance. In addition, whereas existing methods are limited to equivariance on 3 dimensional spaces, our model is easily scaled to higher-dimensional spaces. We demonstrate the effectiveness of our method on dynamical systems modelling, representation learning in graph autoencoders and predicting molecular properties.}, + pubstate = {preprint}, + keywords = {/unread,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Satorras et al_2022_E(n) Equivariant Graph Neural Networks.pdf;/Users/wasmer/Zotero/storage/TKSFFYBM/2102.html} +} + +@online{satorrasEquivariantNormalizingFlows2022, + title = {E(n) {{Equivariant Normalizing Flows}}}, + author = {Satorras, Victor Garcia and Hoogeboom, Emiel and Fuchs, Fabian B. and Posner, Ingmar and Welling, Max}, + date = {2022-01-14}, + eprint = {2105.09016}, + eprinttype = {arxiv}, + eprintclass = {physics, stat}, + doi = {10.48550/arXiv.2105.09016}, + url = {http://arxiv.org/abs/2105.09016}, + urldate = {2023-08-22}, + abstract = {This paper introduces a generative model equivariant to Euclidean symmetries: E(n) Equivariant Normalizing Flows (E-NFs). To construct E-NFs, we take the discriminative E(n) graph neural networks and integrate them as a differential equation to obtain an invertible equivariant function: a continuous-time normalizing flow. We demonstrate that E-NFs considerably outperform baselines and existing methods from the literature on particle systems such as DW4 and LJ13, and on molecules from QM9 in terms of log-likelihood. To the best of our knowledge, this is the first flow that jointly generates molecule features and positions in 3D.}, + pubstate = {preprint}, + keywords = {/unread,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Satorras et al_2022_E(n) Equivariant Normalizing Flows.pdf;/Users/wasmer/Zotero/storage/8GNMV24K/2105.html} +} + @article{saucedaBIGDMLAccurateQuantum2022, title = {{{BIGDML}}—{{Towards}} Accurate Quantum Machine Learning Force Fields for Materials}, author = {Sauceda, Huziel E. and Gálvez-González, Luis E. and Chmiela, Stefan and Paz-Borbón, Lauro Oliver and Müller, Klaus-Robert and Tkatchenko, Alexandre}, @@ -9871,6 +13231,38 @@ file = {/Users/wasmer/Nextcloud/Zotero/Schütt et al_2019_Unifying machine learning and quantum chemistry with a deep neural network for.pdf;/Users/wasmer/Zotero/storage/ADRZDHRZ/s41467-019-12875-2.html} } +@article{schweidtmannGraphNeuralNetworks2020, + title = {Graph {{Neural Networks}} for {{Prediction}} of {{Fuel Ignition Quality}}}, + author = {Schweidtmann, Artur M. and Rittig, Jan G. and König, Andrea and Grohe, Martin and Mitsos, Alexander and Dahmen, Manuel}, + date = {2020-09-17}, + journaltitle = {Energy \& Fuels}, + shortjournal = {Energy Fuels}, + volume = {34}, + number = {9}, + pages = {11395--11407}, + publisher = {{American Chemical Society}}, + issn = {0887-0624}, + doi = {10.1021/acs.energyfuels.0c01533}, + url = {https://doi.org/10.1021/acs.energyfuels.0c01533}, + urldate = {2023-10-11}, + abstract = {Prediction of combustion-related properties of (oxygenated) hydrocarbons is an important and challenging task for which quantitative structure–property relationship (QSPR) models are frequently employed. Recently, a machine learning method, graph neural networks (GNNs), has shown promising results for the prediction of structure–property relationships. GNNs utilize a graph representation of molecules, where atoms correspond to nodes and bonds to edges containing information about the molecular structure. More specifically, GNNs learn physicochemical properties as a function of the molecular graph in a supervised learning setup using a backpropagation algorithm. This end-to-end learning approach eliminates the need for selection of molecular descriptors or structural groups, as it learns optimal fingerprints through graph convolutions and maps the fingerprints to the physicochemical properties by deep learning. We develop GNN models for predicting three fuel ignition quality indicators, i.e., the derived cetane number (DCN), the research octane number (RON), and the motor octane number (MON), of oxygenated and nonoxygenated hydrocarbons. In light of limited experimental data in the order of hundreds, we propose a combination of multitask learning, transfer learning, and ensemble learning. The results show competitive performance of the proposed GNN approach compared to state-of-the-art QSPR models, making it a promising field for future research. The prediction tool is available via a web front-end at www.avt.rwth-aachen.de/gnn.}, + file = {/Users/wasmer/Zotero/storage/S7ZBZ36U/acs.energyfuels.html} +} + +@inproceedings{sculleyHiddenTechnicalDebt2015, + title = {Hidden {{Technical Debt}} in {{Machine Learning Systems}}}, + booktitle = {Advances in {{Neural Information Processing Systems}}}, + author = {Sculley, D. and Holt, Gary and Golovin, Daniel and Davydov, Eugene and Phillips, Todd and Ebner, Dietmar and Chaudhary, Vinay and Young, Michael and Crespo, Jean-François and Dennison, Dan}, + date = {2015}, + volume = {28}, + publisher = {{Curran Associates, Inc.}}, + url = {https://papers.nips.cc/paper_files/paper/2015/hash/86df7dcfd896fcaf2674f757a2463eba-Abstract.html}, + urldate = {2023-09-01}, + abstract = {Machine learning offers a fantastically powerful toolkit for building useful complexprediction systems quickly. This paper argues it is dangerous to think ofthese quick wins as coming for free. Using the software engineering frameworkof technical debt, we find it is common to incur massive ongoing maintenancecosts in real-world ML systems. We explore several ML-specific risk factors toaccount for in system design. These include boundary erosion, entanglement,hidden feedback loops, undeclared consumers, data dependencies, configurationissues, changes in the external world, and a variety of system-level anti-patterns.}, + keywords = {/unread,for introductions,general ML,ML,MLOps,project management,software engineering,technical debt}, + file = {/Users/wasmer/Nextcloud/Zotero/Sculley et al_2015_Hidden Technical Debt in Machine Learning Systems.pdf} +} + @article{sendekMachineLearningModeling2022, title = {Machine {{Learning Modeling}} for {{Accelerated Battery Materials Design}} in the {{Small Data Regime}}}, author = {Sendek, Austin D. and Ransom, Brandi and Cubuk, Ekin D. and Pellouchoud, Lenson A. and Nanda, Jagjit and Reed, Evan J.}, @@ -9889,8 +13281,24 @@ file = {/Users/wasmer/Nextcloud/Zotero/Sendek et al_Machine Learning Modeling for Accelerated Battery Materials Design in the Small.pdf;/Users/wasmer/Zotero/storage/55KE647F/aenm.html} } +@online{sevillaComputeTrendsThree2022, + title = {Compute {{Trends Across Three Eras}} of {{Machine Learning}}}, + author = {Sevilla, Jaime and Heim, Lennart and Ho, Anson and Besiroglu, Tamay and Hobbhahn, Marius and Villalobos, Pablo}, + date = {2022-03-09}, + eprint = {2202.05924}, + eprinttype = {arxiv}, + eprintclass = {cs}, + doi = {10.48550/arXiv.2202.05924}, + url = {http://arxiv.org/abs/2202.05924}, + urldate = {2023-09-19}, + abstract = {Compute, data, and algorithmic advances are the three fundamental factors that guide the progress of modern Machine Learning (ML). In this paper we study trends in the most readily quantified factor - compute. We show that before 2010 training compute grew in line with Moore's law, doubling roughly every 20 months. Since the advent of Deep Learning in the early 2010s, the scaling of training compute has accelerated, doubling approximately every 6 months. In late 2015, a new trend emerged as firms developed large-scale ML models with 10 to 100-fold larger requirements in training compute. Based on these observations we split the history of compute in ML into three eras: the Pre Deep Learning Era, the Deep Learning Era and the Large-Scale Era. Overall, our work highlights the fast-growing compute requirements for training advanced ML systems.}, + pubstate = {preprint}, + keywords = {AI,ecological footprint,economics,energy consumption,energy efficiency,environmental impact,for introductions,General ML,history of AI,history of science,HPC,ML,Moore's Law,Our World in Data,policy,supercomputing}, + file = {/Users/wasmer/Nextcloud/Zotero/Sevilla et al_2022_Compute Trends Across Three Eras of Machine Learning.pdf;/Users/wasmer/Zotero/storage/WA244FAW/2202.html} +} + @online{shahPhysicsInformedNeuralNetworks2022, - title = {Physics-{{Informed Neural Networks}} as {{Solvers}} for the {{Time-Dependent Schr}}\textbackslash "odinger {{Equation}}}, + title = {Physics-{{Informed Neural Networks}} as {{Solvers}} for the {{Time-Dependent Schrödinger Equation}}}, author = {Shah, Karan and Stiller, Patrick and Hoffmann, Nico and Cangi, Attila}, date = {2022-10-22}, eprint = {2210.12522}, @@ -9905,6 +13313,20 @@ file = {/Users/wasmer/Nextcloud/Zotero/Shah et al_2022_Physics-Informed Neural Networks as Solvers for the Time-Dependent.pdf;/Users/wasmer/Zotero/storage/NSJSIKTH/2210.html} } +@thesis{shahUncertaintyQuantificationMachine2018, + type = {bathesis}, + title = {Uncertainty {{Quantification}} of {{Machine Learned Density Functionals}}}, + author = {Shah, Karan}, + date = {2018-05}, + institution = {{Georgia Institute of Technology}}, + url = {http://hdl.handle.net/1853/61364}, + urldate = {2023-10-06}, + abstract = {Density Functional Theory(DFT) is one of the most popular and successful methods for quantum mechanical simulations of matter because of its relatively lower computational costs. While it is formally exact, approximations of eXchange Correlation(XC) functionals have to be made. These calculations are highly time consuming and scale poorly with system size. The prospect of combining computer vision and deep learning is a fundamentally new approach to designing these XC functionals. This approach combines the intuitive power of physical insight with the flexibility of machine learning and high-quality training data in order to develop new routes to approximating exchange-correlation energies. A parameterized function is first fit on the data and the resulting residuals are used for bootstrap aggregating via an ensemble of neural networks. This two-stage method provides robust uncertainty quantification on the predicted XC energies and can be automated for many systems without significant manual intervention.}, + langid = {american}, + keywords = {/unread}, + file = {/Users/wasmer/Nextcloud/Zotero/Shah_2018_Uncertainty Quantification of Machine Learned Density Functionals.pdf} +} + @article{shapeevAccurateRepresentationFormation2017, title = {Accurate Representation of Formation Energies of Crystalline Alloys with Many Components}, author = {Shapeev, A.}, @@ -10007,6 +13429,23 @@ file = {/Users/wasmer/Nextcloud/Zotero/Simmhan et al_2005_A survey of data provenance in e-science.pdf} } +@book{singhPlanewavesPseudopotentialsLAPW2006, + title = {Planewaves, {{Pseudopotentials}} and the {{LAPW Method}}}, + author = {Singh, David J. and Nordström, Lars}, + date = {2006}, + edition = {2}, + publisher = {{Springer US}}, + doi = {10.1007/978-0-387-29684-5}, + url = {http://link.springer.com/10.1007/978-0-387-29684-5}, + urldate = {2023-10-01}, + abstract = {The first edition of this book, published in 1994, provided an exposition of the LAPW method and its relationship with other electronic structure approaches, especially Car-Parrinello based planewave methods. Since publication of that book, the LAPW method has been transformed from a specialized method used mostly by researchers running their own home made versions, to a popular, widely used method, where most users run standard codes to investigate materials of interest to them. This is an exciting development because it opens the door to widespread use of first principles calculations in diverse areas of condensed matter physics and materials science. The positive impact of this on scientific progress is already becoming clear. Also as a result of this trend, the great majority of researchers using the LAPW method are no longer directly involved in the development of LAPW codes. Nonetheless, it remains important to understand how the LAPW method works, what its limitations are, and how its parameters determine the quality and efficiency of calculations. The scientist with an understanding of how the method works has a clear advantage. This edition is an updated and expanded treatment of the LAPW method, including descriptions of key developments in the LAPW method since 1994, such as p\{sub 1/2\} local orbitals, the APW+LO method, LDA+U calculations and non-colliner magnetism, as well as much of the material from the first edition. Written at a level accessible to graduate students and researchers familiar with the fundamentals of solid state physics Only book available on this popular methodology}, + isbn = {978-0-387-28780-5}, + langid = {english}, + pagetotal = {134}, + keywords = {DFT,DFT theory,educational,FLAPW,LAPW,learn DFT,magnetism,numerical,physics,textbook}, + file = {/Users/wasmer/Nextcloud/Zotero/2006_Planewaves, Pseudopotentials and the LAPW Method.pdf} +} + @article{singhRareearthBasedHalfHeusler2020, title = {Rare-Earth Based Half-{{Heusler}} Topological Quantum Materials: {{A}} Perspective}, shorttitle = {Rare-Earth Based Half-{{Heusler}} Topological Quantum Materials}, @@ -10026,6 +13465,24 @@ file = {/Users/wasmer/Nextcloud/Zotero/Singh et al_2020_Rare-earth based half-Heusler topological quantum materials.pdf;/Users/wasmer/Zotero/storage/K3TVA8HX/Rare-earth-based-half-Heusler-topological-quantum.html} } +@article{singhSelfconsistentfieldKorringaKohnRostokerCoherentpotentialapproximation1991, + title = {Self-Consistent-Field {{Korringa-Kohn-Rostoker}} Coherent-Potential-Approximation Calculations in the Atomic-Sphere Approximation}, + author = {Singh, Prabhakar P. and family=Fontaine, given=Didier, prefix=de, useprefix=true and Gonis, A.}, + date = {1991-10-15}, + journaltitle = {Physical Review B}, + shortjournal = {Phys. Rev. B}, + volume = {44}, + number = {16}, + pages = {8578--8583}, + publisher = {{American Physical Society}}, + doi = {10.1103/PhysRevB.44.8578}, + url = {https://link.aps.org/doi/10.1103/PhysRevB.44.8578}, + urldate = {2023-09-19}, + abstract = {We have extended the recently formulated Korringa-Kohn-Rostoker coherent-potential approximation (KKR-CPA) for the treatment of substitutionally disordered alloys within the KKR atomic-spheres approximation (ASA) to include charge self-consistency. To test the accuracy of the present approach we have carried out self-consistent-field (SCF) KKR-ASA-CPA calculations of Cu-Pd alloys. Our results are in good agreement with experimental results and previous SCF-KKR-CPA calculations. We also present a comparison of the electronic structures of the ordered with the disordered Cu-Pd alloys.}, + keywords = {todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Singh et al_1991_Self-consistent-field Korringa-Kohn-Rostoker coherent-potential-approximation.pdf} +} + @article{singraberParallelMultistreamTraining2019, title = {Parallel {{Multistream Training}} of {{High-Dimensional Neural Network Potentials}}}, author = {Singraber, Andreas and Morawietz, Tobias and Behler, Jörg and Dellago, Christoph}, @@ -10045,6 +13502,22 @@ file = {/Users/wasmer/Nextcloud/Zotero/Singraber et al_2019_Parallel Multistream Training of High-Dimensional Neural Network Potentials.pdf} } +@online{sinitskiyDeepNeuralNetwork2018, + title = {Deep {{Neural Network Computes Electron Densities}} and {{Energies}} of a {{Large Set}} of {{Organic Molecules Faster}} than {{Density Functional Theory}} ({{DFT}})}, + author = {Sinitskiy, Anton V. and Pande, Vijay S.}, + date = {2018-09-07}, + eprint = {1809.02723}, + eprinttype = {arxiv}, + eprintclass = {physics}, + doi = {10.48550/arXiv.1809.02723}, + url = {http://arxiv.org/abs/1809.02723}, + urldate = {2023-10-01}, + abstract = {Density functional theory (DFT) is one of the main methods in Quantum Chemistry that offers an attractive trade off between the cost and accuracy of quantum chemical computations. The electron density plays a key role in DFT. In this work, we explore whether machine learning - more specifically, deep neural networks (DNNs) - can be trained to predict electron densities faster than DFT. First, we choose a practically efficient combination of a DFT functional and a basis set (PBE0/pcS-3) and use it to generate a database of DFT solutions for more than 133,000 organic molecules from a previously published database QM9. Next, we train a DNN to predict electron densities and energies of such molecules. The only input to the DNN is an approximate electron density computed with a cheap quantum chemical method in a small basis set (HF/cc-VDZ). We demonstrate that the DNN successfully learns differences in the electron densities arising both from electron correlation and small basis set artifacts in the HF computations. All qualitative features in density differences, including local minima on lone pairs, local maxima on nuclei, toroidal shapes around C-H and C-C bonds, complex shapes around aromatic and cyclopropane rings and CN group, etc. are captured by the DNN. Accuracy of energy predictions by the DNN is \textasciitilde{} 1 kcal/mol, on par with other models reported in the literature, while those models do not predict the electron density. Computations with the DNN, including HF computations, take much less time that DFT computations (by a factor of \textasciitilde 20-30 for most QM9 molecules in the current version, and it is clear how it could be further improved).}, + pubstate = {preprint}, + keywords = {AML,benchmarking,CCSD(T),CNN,delta learning,ML,ML-DFT,ML-ESM,MLP,MLP comparison,molecules,organic chemistry,PBE,prediction of electron density,prediction of energy,QM9,U-net,with-data}, + file = {/Users/wasmer/Nextcloud/Zotero/Sinitskiy_Pande_2018_Deep Neural Network Computes Electron Densities and Energies of a Large Set of.pdf;/Users/wasmer/Zotero/storage/C263WSGH/1809.html} +} + @article{sivaramanMachinelearnedInteratomicPotentials2020, title = {Machine-Learned Interatomic Potentials by Active Learning: Amorphous and Liquid Hafnium Dioxide}, shorttitle = {Machine-Learned Interatomic Potentials by Active Learning}, @@ -10155,6 +13628,23 @@ file = {/Users/wasmer/Nextcloud/Zotero/Sommer et al_2022_3DSC - A New Dataset of Superconductors Including Crystal Structures.pdf;/Users/wasmer/Zotero/storage/JMMVYJCI/2212.html} } +@online{songDeepSpeed4ScienceInitiativeEnabling2023, + title = {{{DeepSpeed4Science Initiative}}: {{Enabling Large-Scale Scientific Discovery}} through {{Sophisticated AI System Technologies}}}, + shorttitle = {{{DeepSpeed4Science Initiative}}}, + author = {Song, Shuaiwen Leon and Kruft, Bonnie and Zhang, Minjia and Li, Conglong and Chen, Shiyang and Zhang, Chengming and Tanaka, Masahiro and Wu, Xiaoxia and Rasley, Jeff and Awan, Ammar Ahmad and Holmes, Connor and Cai, Martin and Ghanem, Adam and Zhou, Zhongzhu and He, Yuxiong and Luferenko, Pete and Kumar, Divya and Weyn, Jonathan and Zhang, Ruixiong and Klocek, Sylwester and Vragov, Volodymyr and AlQuraishi, Mohammed and Ahdritz, Gustaf and Floristean, Christina and Negri, Cristina and Kotamarthi, Rao and Vishwanath, Venkatram and Ramanathan, Arvind and Foreman, Sam and Hippe, Kyle and Arcomano, Troy and Maulik, Romit and Zvyagin, Maxim and Brace, Alexander and Zhang, Bin and Bohorquez, Cindy Orozco and Clyde, Austin and Kale, Bharat and Perez-Rivera, Danilo and Ma, Heng and Mann, Carla M. and Irvin, Michael and Pauloski, J. Gregory and Ward, Logan and Hayot, Valerie and Emani, Murali and Xie, Zhen and Lin, Diangen and Shukla, Maulik and Foster, Ian and Davis, James J. and Papka, Michael E. and Brettin, Thomas and Balaprakash, Prasanna and Tourassi, Gina and Gounley, John and Hanson, Heidi and Potok, Thomas E. and Pasini, Massimiliano Lupo and Evans, Kate and Lu, Dan and Lunga, Dalton and Yin, Junqi and Dash, Sajal and Wang, Feiyi and Shankar, Mallikarjun and Lyngaas, Isaac and Wang, Xiao and Cong, Guojing and Zhang, Pei and Fan, Ming and Liu, Siyan and Hoisie, Adolfy and Yoo, Shinjae and Ren, Yihui and Tang, William and Felker, Kyle and Svyatkovskiy, Alexey and Liu, Hang and Aji, Ashwin and Dalton, Angela and Schulte, Michael and Schulz, Karl and Deng, Yuntian and Nie, Weili and Romero, Josh and Dallago, Christian and Vahdat, Arash and Xiao, Chaowei and Gibbs, Thomas and Anandkumar, Anima and Stevens, Rick}, + date = {2023-10-11}, + eprint = {2310.04610}, + eprinttype = {arxiv}, + eprintclass = {cs}, + doi = {10.48550/arXiv.2310.04610}, + url = {http://arxiv.org/abs/2310.04610}, + urldate = {2023-11-05}, + abstract = {In the upcoming decade, deep learning may revolutionize the natural sciences, enhancing our capacity to model and predict natural occurrences. This could herald a new era of scientific exploration, bringing significant advancements across sectors from drug development to renewable energy. To answer this call, we present DeepSpeed4Science initiative (deepspeed4science.ai) which aims to build unique capabilities through AI system technology innovations to help domain experts to unlock today's biggest science mysteries. By leveraging DeepSpeed's current technology pillars (training, inference and compression) as base technology enablers, DeepSpeed4Science will create a new set of AI system technologies tailored for accelerating scientific discoveries by addressing their unique complexity beyond the common technical approaches used for accelerating generic large language models (LLMs). In this paper, we showcase the early progress we made with DeepSpeed4Science in addressing two of the critical system challenges in structural biology research.}, + pubstate = {preprint}, + keywords = {AI4Science,AML,Argonne National Laboratory,biomolecules,drug discovery,foundation models,GPU,HPC,library,LLM,materials discovery,Microsoft Research,ML,NVIDIA,performance optimization,Quantum chemistry,surrogate model,transformer,white paper,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Song et al_2023_DeepSpeed4Science Initiative.pdf;/Users/wasmer/Zotero/storage/9XE4R4E2/2310.html} +} + @inproceedings{souzaProvenanceDataMachine2019, title = {Provenance {{Data}} in the {{Machine Learning Lifecycle}} in {{Computational Science}} and {{Engineering}}}, booktitle = {2019 {{IEEE}}/{{ACM Workflows}} in {{Support}} of {{Large-Scale Science}} ({{WORKS}})}, @@ -10168,6 +13658,24 @@ file = {/Users/wasmer/Nextcloud/Zotero/Souza et al_2019_Provenance Data in the Machine Learning Lifecycle in Computational Science and.pdf;/Users/wasmer/Zotero/storage/NXAA6T76/8943505.html} } +@article{sovenCoherentPotentialModelSubstitutional1967, + title = {Coherent-{{Potential Model}} of {{Substitutional Disordered Alloys}}}, + author = {Soven, Paul}, + date = {1967-04-15}, + journaltitle = {Physical Review}, + shortjournal = {Phys. Rev.}, + volume = {156}, + number = {3}, + pages = {809--813}, + publisher = {{American Physical Society}}, + doi = {10.1103/PhysRev.156.809}, + url = {https://link.aps.org/doi/10.1103/PhysRev.156.809}, + urldate = {2023-09-19}, + abstract = {We introduce a model of a substitutional alloy based on the concept of an effective or coherent potential which, when placed on every site of the alloy lattice, will simulate the electronic properties of the actual alloy. The coherent potential is necessarily a complex, energy-dependent quantity. We evaluate the model for the simple case of a one-dimensional alloy of δ-function potentials. In order to provide a basis for comparison, as well as to see if a simpler scheme will suffice, we also calculate the spectrum of the same alloy using the average t-matrix approximation introduced by Beeby. On the basis of these results, we conclude that the average t-matrix approximation is not adequate for the description of an actual transition-metal alloy, while the coherent-potential picture will provide a more reasonable facsimile of the density of states in such an alloy.}, + keywords = {CPA,DFT,disordered,KKR,KKR foundations,original publication}, + file = {/Users/wasmer/Nextcloud/Zotero/Soven_1967_Coherent-Potential Model of Substitutional Disordered Alloys.pdf;/Users/wasmer/Zotero/storage/IQJHV65I/PhysRev.156.html} +} + @book{spaldinMagneticMaterialsFundamentals2010, title = {Magnetic {{Materials}}: {{Fundamentals}} and {{Applications}}}, shorttitle = {Magnetic {{Materials}}}, @@ -10185,6 +13693,22 @@ file = {/Users/wasmer/Nextcloud/Zotero/Spaldin_2010_Magnetic Materials.pdf;/Users/wasmer/Zotero/storage/3A42DP7U/4C8C2C5DF32C9E8D528E1E8D26381C1F.html} } +@online{speckhardExtrapolationCompleteBasisset2023, + title = {Extrapolation to Complete Basis-Set Limit in Density-Functional Theory by Quantile Random-Forest Models}, + author = {Speckhard, Daniel T. and Carbogno, Christian and Ghiringhelli, Luca and Lubeck, Sven and Scheffler, Matthias and Draxl, Claudia}, + date = {2023-06-01}, + eprint = {2303.14760}, + eprinttype = {arxiv}, + eprintclass = {cond-mat, physics:physics, stat}, + doi = {10.48550/arXiv.2303.14760}, + url = {http://arxiv.org/abs/2303.14760}, + urldate = {2023-10-11}, + abstract = {The numerical precision of density-functional-theory (DFT) calculations depends on a variety of computational parameters, one of the most critical being the basis-set size. The ultimate precision is reached with an infinitely large basis set, i.e., in the limit of a complete basis set (CBS). Our aim in this work is to find a machine-learning model that extrapolates finite basis-size calculations to the CBS limit. We start with a data set of 63 binary solids investigated with two all-electron DFT codes, exciting and FHI-aims, which employ very different types of basis sets. A quantile-random-forest model is used to estimate the total-energy correction with respect to a fully converged calculation as a function of the basis-set size. The random-forest model achieves a symmetric mean absolute percentage error of lower than 25\% for both codes and outperforms previous approaches in the literature. Our approach also provides prediction intervals, which quantify the uncertainty of the models' predictions.}, + pubstate = {preprint}, + keywords = {/unread,DFT,DFT codes comparison,DFT numerics,error estimate,exciting DFT code,FHI-aims,numerical analysis,numerical errors,rec-by-bluegel}, + file = {/Users/wasmer/Nextcloud/Zotero/Speckhard et al_2023_Extrapolation to complete basis-set limit in density-functional theory by.pdf;/Users/wasmer/Zotero/storage/6FSJEP2H/2303.html} +} + @online{spencerBetterFasterFermionic2020, title = {Better, {{Faster Fermionic Neural Networks}}}, author = {Spencer, James S. and Pfau, David and Botev, Aleksandar and Foulkes, W. M. C.}, @@ -10282,6 +13806,41 @@ file = {/Users/wasmer/Nextcloud/Zotero/Stevens et al_2020_AI for Science Report 2020.pdf;/Users/wasmer/Zotero/storage/Q2KH2Y8A/ai-for-science-report-2020.html} } +@article{stocksCompleteSolutionKorringaKohnRostoker1978, + title = {Complete {{Solution}} of the {{Korringa-Kohn-Rostoker Coherent-Potential-Approximation Equations}}: {{Cu-Ni Alloys}}}, + shorttitle = {Complete {{Solution}} of the {{Korringa-Kohn-Rostoker Coherent-Potential-Approximation Equations}}}, + author = {Stocks, G. M. and Temmerman, W. M. and Gyorffy, B. L.}, + date = {1978-07-31}, + journaltitle = {Physical Review Letters}, + shortjournal = {Phys. Rev. Lett.}, + volume = {41}, + number = {5}, + pages = {339--343}, + publisher = {{American Physical Society}}, + doi = {10.1103/PhysRevLett.41.339}, + url = {https://link.aps.org/doi/10.1103/PhysRevLett.41.339}, + urldate = {2023-09-19}, + abstract = {We report on calculations of the electronic states in disordered CucNi(1−c) alloys based on a complete solution of the coherent-potential approximation for a muffin-tin model of the alloy potential [Korringa-Kohn-Rostoker coherent-potential-approximation (KKR-CPA)]. The computational effort required is modest on the scale of that involved in bandstructure calculations for many atoms per unit cell. The calculated densities of states are in good agreement with the results of photoemission and other experiments. The adequacy of previous approximate KKR-CPA and averaged t-matrix calculations is discussed.}, + file = {/Users/wasmer/Nextcloud/Zotero/Stocks et al_1978_Complete Solution of the Korringa-Kohn-Rostoker.pdf;/Users/wasmer/Zotero/storage/8KJXURZT/PhysRevLett.41.html} +} + +@online{suSVNetWhereEquivariance2022, + title = {{{SVNet}}: {{Where SO}}(3) {{Equivariance Meets Binarization}} on {{Point Cloud Representation}}}, + shorttitle = {{{SVNet}}}, + author = {Su, Zhuo and Welling, Max and Pietikäinen, Matti and Liu, Li}, + date = {2022-09-20}, + eprint = {2209.05924}, + eprinttype = {arxiv}, + eprintclass = {cs}, + doi = {10.48550/arXiv.2209.05924}, + url = {http://arxiv.org/abs/2209.05924}, + urldate = {2023-08-22}, + abstract = {Efficiency and robustness are increasingly needed for applications on 3D point clouds, with the ubiquitous use of edge devices in scenarios like autonomous driving and robotics, which often demand real-time and reliable responses. The paper tackles the challenge by designing a general framework to construct 3D learning architectures with SO(3) equivariance and network binarization. However, a naive combination of equivariant networks and binarization either causes sub-optimal computational efficiency or geometric ambiguity. We propose to locate both scalar and vector features in our networks to avoid both cases. Precisely, the presence of scalar features makes the major part of the network binarizable, while vector features serve to retain rich structural information and ensure SO(3) equivariance. The proposed approach can be applied to general backbones like PointNet and DGCNN. Meanwhile, experiments on ModelNet40, ShapeNet, and the real-world dataset ScanObjectNN, demonstrated that the method achieves a great trade-off between efficiency, rotation robustness, and accuracy. The codes are available at https://github.com/zhuoinoulu/svnet.}, + pubstate = {preprint}, + keywords = {/unread,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Su et al_2022_SVNet.pdf;/Users/wasmer/Zotero/storage/YCIPFJTS/2209.html} +} + @article{suttonIdentifyingDomainsApplicability2020, title = {Identifying Domains of Applicability of Machine Learning Models for Materials Science}, author = {Sutton, Christopher and Boley, Mario and Ghiringhelli, Luca M. and Rupp, Matthias and Vreeken, Jilles and Scheffler, Matthias}, @@ -10432,6 +13991,28 @@ file = {/Users/wasmer/Nextcloud/Zotero/Talirz et al_2021_Trends in Atomistic Simulation Software Usage [Article v1.pdf} } +@unpublished{tangiralaNeuralNetworkPredictiveModeling2022, + title = {Neural-{{Network Predictive Modeling}} of {{Physical Properties}} in {{Binary Magnetic}} and {{Non-Magnetic Alloys}}}, + author = {Tangirala, Sairam}, + date = {2022-03-17}, + url = {https://meetings.aps.org/Meeting/MAR22/Session/T32.8}, + urldate = {2023-09-19}, + abstract = {We present a deep learning (DL) approach to reproduce the first principles Density Functional Theory (DFT) based calculations pertaining to macroscopic physical properties of a non-magnetic (CuAu) and a magnetic (FePt) binary alloys. In this study, a neural network (NN) is developed and trained using thousands of theoretically possible lattice configurations obtained from the Locally Self-Consistent Multiple Scattering (LSMS) DFT code [1]. The intrinsic physical properties of alloys like composition ratio, unit-cell structure, spatial charge distributions, Coulombic interactions, etc. are inputted into the NN model structured by the “bag-of-bonds†representation [2]. The NN regression model is trained to capture the relationship between intrinsic parameters and the total energy of the alloys. Although NNs are complex and computationally expensive to train, they are flexible and can effectively pick up nonlinear relationships between inputs and outputs. Our results show that the trained NN model is orders-of-magnitude faster than DFT in inferring the total energy with comparable accuracy [3]. This demonstrates the potential of applying the NN formalism in accelerating the computational studies of condensed matter systems. [1] LSMS. Computer software. https://www.osti.gov//servlets/purl/1420087. Vers. 00. USDOE. 1 Dec. 2017. Web. [2] J. Phys. Chem. Lett. 6, 12, 2326–2331 (2015). [3] J. Phys.: Condens. Matter 33, 084005 (2021). *ST acknowledges funding from Georgia Gwinnett College through its "Educational and Professional Leave" program}, + eventtitle = {{{APS March Meeting}} 2022}, + venue = {{Chicago}}, + keywords = {/unread,todo-tagging}, + annotation = {Authors: + +Sairam Tangirala (Georgia Gwinnett College) + +Massimiliano L Pasini (Oakridge National Laboratory) + +Markus Eisenbach (Oak Ridge National Lab) + +Ying-Wai Li (Los Alamos National Laboratory)}, + file = {/Users/wasmer/Zotero/storage/SGNGMK25/T32.html} +} + @article{tealeDFTExchangeSharing2022, title = {{{DFT Exchange}}: {{Sharing Perspectives}} on the {{Workhorse}} of {{Quantum Chemistry}} and {{Materials Science}}}, shorttitle = {{{DFT Exchange}}}, @@ -10459,7 +14040,7 @@ urldate = {2023-03-02}, abstract = {Explainable artificial intelligence (XAI) methods are expected to improve trust during human-AI interactions, provide tools for model analysis and extend human understanding of complex problems. Explanation-supervised training allows to improve explanation quality by training self-explaining XAI models on ground truth or human-generated explanations. However, existing explanation methods have limited expressiveness and interoperability due to the fact that only single explanations in form of node and edge importance are generated. To that end we propose the novel multi-explanation graph attention network (MEGAN). Our fully differentiable, attention-based model features multiple explanation channels, which can be chosen independently of the task specifications. We first validate our model on a synthetic graph regression dataset. We show that for the special single explanation case, our model significantly outperforms existing post-hoc and explanation-supervised baseline methods. Furthermore, we demonstrate significant advantages when using two explanations, both in quantitative explanation measures as well as in human interpretability. Finally, we demonstrate our model's capabilities on multiple real-world datasets. We find that our model produces sparse high-fidelity explanations consistent with human intuition about those tasks and at the same time matches state-of-the-art graph neural networks in predictive performance, indicating that explanations and accuracy are not necessarily a trade-off.}, pubstate = {preprint}, - keywords = {/unread,AML,GAT,GNN,materials,ML,XAI}, + keywords = {AML,GAT,GNN,materials,ML,XAI}, file = {/Users/wasmer/Nextcloud/Zotero/Teufel et al_2022_MEGAN.pdf;/Users/wasmer/Zotero/storage/4ZA3I5AT/2211.html} } @@ -10472,10 +14053,28 @@ abstract = {Despite the increasing relevance of explainable AI, assessing the quality of explanations remains a challenging issue. Due to the high costs associated with human-subject experiments, various proxy metrics are often used to approximately quantify explanation quality. Generally, one possible interpretation of the quality of an explanation is its inherent value for teaching a related concept to a student. In this work, we extend artificial simulatability studies to the domain of graph neural networks. Instead of costly human trials, we use explanation-supervisable graph neural networks to perform simulatability studies to quantify the inherent usefulness of attributional graph explanations. We perform an extensive ablation study to investigate the conditions under which the proposed analyses are most meaningful. We additionally validate our methods applicability on real-world graph classification and regression datasets. We find that relevant explanations can significantly boost the sample efficiency of graph neural networks and analyze the robustness towards noise and bias in the explanations. We believe that the notion of usefulness obtained from our proposed simulatability analysis provides a dimension of explanation quality that is largely orthogonal to the common practice of faithfulness and has great potential to expand the toolbox of explanation quality assessments, specifically for graph explanations.}, langid = {english}, organization = {{arXiv.org}}, - keywords = {/unread}, + keywords = {todo-tagging}, file = {/Users/wasmer/Nextcloud/Zotero/Teufel et al_2023_Quantifying the Intrinsic Usefulness of Attributional Explanations for Graph.pdf} } +@article{tewaryGreenfunctionMethodLattice1973, + title = {Green-Function Method for Lattice Statics}, + author = {Tewary, V.K.}, + date = {1973-11-01}, + journaltitle = {Advances in Physics}, + volume = {22}, + number = {6}, + pages = {757--810}, + publisher = {{Taylor \& Francis}}, + issn = {0001-8732}, + doi = {10.1080/00018737300101389}, + url = {https://doi.org/10.1080/00018737300101389}, + urldate = {2023-08-30}, + abstract = {This article gives a review of the Green-function method for the calculation of the static properties of lattices with point defects. This method, based upon the Born-von Kármán model of a lattice, uses the zero frequency limit of the phonon Green function which gives the static response of the lattice to an applied force. The method is quite general and is applicable to most kind of defects but the attention in this review is restricted to point defects in cubic Bravais lattices. The Green-function method is shown to be formally equivalent to the Kanzaki method but is more powerful and computationally more convenient. For the purpose of illustration the Green-function method has been applied to a vacancy in the Rosenstock-Newell model of a solid. Although the model is physically unrealistic, it has the advantage of yielding results in closed analytic forms which are very useful for qualitative discussions. The applications of the Green-function method to some real systems—vacancies and interstitials in f.c.c. and b.c.c. lattices—have also been described. Finally the Green-function method is generalized to account for a regular array of point defects and its application to super lattices of voids and gas interstitials in certain metals is discussed.}, + keywords = {/unread,defects,Green’s Function Method,microstructure,multi-scale Green function (MSGF),multiscale,nanomaterials,point defects,structure relaxation,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Tewary_1973_Green-function method for lattice statics.pdf} +} + @thesis{thiessDevelopmentApplicationMassively2011, title = {Development and Application of a Massively Parallel {{KKR Green}} Function Method for Large Scale Systems}, author = {Thieß, Alexander R. and Blügel, Stefan}, @@ -10659,6 +14258,21 @@ file = {/Users/wasmer/Nextcloud/Zotero/Tokura et al_2019_Magnetic topological insulators.pdf} } +@incollection{TopologicalInsulators2013, + title = {Topological {{Insulators}}}, + booktitle = {Contemporary {{Concepts}} of {{Condensed Matter Science}}}, + date = {2013}, + volume = {6}, + pages = {i}, + publisher = {{Elsevier}}, + doi = {10.1016/B978-0-444-63314-9.00012-3}, + url = {https://linkinghub.elsevier.com/retrieve/pii/B9780444633149000123}, + urldate = {2023-07-12}, + isbn = {978-0-444-63314-9}, + langid = {english}, + keywords = {/unread,learning material,topological,topological insulator} +} + @article{townsendDataDrivenAccelerationCoupledCluster2019, title = {Data-{{Driven Acceleration}} of the {{Coupled-Cluster Singles}} and {{Doubles Iterative Solver}}}, author = {Townsend, Jacob and Vogiatzis, Konstantinos D.}, @@ -10677,6 +14291,23 @@ file = {/Users/wasmer/Nextcloud/Zotero/Townsend_Vogiatzis_2019_Data-Driven Acceleration of the Coupled-Cluster Singles and Doubles Iterative.pdf;/Users/wasmer/Zotero/storage/RVTRBAZI/acs.jpclett.html} } +@online{townshendATOM3DTasksMolecules2022, + title = {{{ATOM3D}}: {{Tasks On Molecules}} in {{Three Dimensions}}}, + shorttitle = {{{ATOM3D}}}, + author = {Townshend, Raphael J. L. and Vögele, Martin and Suriana, Patricia and Derry, Alexander and Powers, Alexander and Laloudakis, Yianni and Balachandar, Sidhika and Jing, Bowen and Anderson, Brandon and Eismann, Stephan and Kondor, Risi and Altman, Russ B. and Dror, Ron O.}, + date = {2022-01-15}, + eprint = {2012.04035}, + eprinttype = {arxiv}, + eprintclass = {physics, q-bio}, + doi = {10.48550/arXiv.2012.04035}, + url = {http://arxiv.org/abs/2012.04035}, + urldate = {2023-10-05}, + abstract = {Computational methods that operate on three-dimensional molecular structure have the potential to solve important questions in biology and chemistry. In particular, deep neural networks have gained significant attention, but their widespread adoption in the biomolecular domain has been limited by a lack of either systematic performance benchmarks or a unified toolkit for interacting with molecular data. To address this, we present ATOM3D, a collection of both novel and existing benchmark datasets spanning several key classes of biomolecules. We implement several classes of three-dimensional molecular learning methods for each of these tasks and show that they consistently improve performance relative to methods based on one- and two-dimensional representations. The specific choice of architecture proves to be critical for performance, with three-dimensional convolutional networks excelling at tasks involving complex geometries, graph networks performing well on systems requiring detailed positional information, and the more recently developed equivariant networks showing significant promise. Our results indicate that many molecular problems stand to gain from three-dimensional molecular learning, and that there is potential for improvement on many tasks which remain underexplored. To lower the barrier to entry and facilitate further developments in the field, we also provide a comprehensive suite of tools for dataset processing, model training, and evaluation in our open-source atom3d Python package. All datasets are available for download from https://www.atom3d.ai .}, + pubstate = {preprint}, + keywords = {AML,benchmark dataset,benchmarking,biomolecules,chemistry,Database,library,ML,with-code,with-data}, + file = {/Users/wasmer/Nextcloud/Zotero/Townshend et al_2022_ATOM3D2.pdf;/Users/wasmer/Zotero/storage/2D2AMNSF/2012.html} +} + @online{tranMethodsComparingUncertainty2020, title = {Methods for Comparing Uncertainty Quantifications for Material Property Predictions}, author = {Tran, Kevin and Neiswanger, Willie and Yoon, Junwoong and Zhang, Qingyang and Xing, Eric and Ulissi, Zachary W.}, @@ -10693,6 +14324,47 @@ file = {/Users/wasmer/Nextcloud/Zotero/Tran et al_2020_Methods for comparing uncertainty quantifications for material property.pdf;/Users/wasmer/Zotero/storage/6RLGREQU/1912.html} } +@article{tranOpenCatalyst20222023, + title = {The {{Open Catalyst}} 2022 ({{OC22}}) {{Dataset}} and {{Challenges}} for {{Oxide Electrocatalysts}}}, + author = {Tran, Richard and Lan, Janice and Shuaibi, Muhammed and Wood, Brandon M. and Goyal, Siddharth and Das, Abhishek and Heras-Domingo, Javier and Kolluru, Adeesh and Rizvi, Ammar and Shoghi, Nima and Sriram, Anuroop and Therrien, Felix and Abed, Jehad and Voznyy, Oleksandr and Sargent, Edward H. and Ulissi, Zachary and Zitnick, C. Lawrence}, + date = {2023-03-03}, + journaltitle = {ACS Catalysis}, + shortjournal = {ACS Catal.}, + volume = {13}, + number = {5}, + eprint = {2206.08917}, + eprinttype = {arxiv}, + eprintclass = {cond-mat, physics:physics}, + pages = {3066--3084}, + issn = {2155-5435, 2155-5435}, + doi = {10.1021/acscatal.2c05426}, + url = {http://arxiv.org/abs/2206.08917}, + urldate = {2023-08-22}, + abstract = {The development of machine learning models for electrocatalysts requires a broad set of training data to enable their use across a wide variety of materials. One class of materials that currently lacks sufficient training data is oxides, which are critical for the development of OER catalysts. To address this, we developed the OC22 dataset, consisting of 62,331 DFT relaxations (\textasciitilde 9,854,504 single point calculations) across a range of oxide materials, coverages, and adsorbates. We define generalized total energy tasks that enable property prediction beyond adsorption energies; we test baseline performance of several graph neural networks; and we provide pre-defined dataset splits to establish clear benchmarks for future efforts. In the most general task, GemNet-OC sees a \textasciitilde 36\% improvement in energy predictions when combining the chemically dissimilar OC20 and OC22 datasets via fine-tuning. Similarly, we achieved a \textasciitilde 19\% improvement in total energy predictions on OC20 and a \textasciitilde 9\% improvement in force predictions in OC22 when using joint training. We demonstrate the practical utility of a top performing model by capturing literature adsorption energies and important OER scaling relationships. We expect OC22 to provide an important benchmark for models seeking to incorporate intricate long-range electrostatic and magnetic interactions in oxide surfaces. Dataset and baseline models are open sourced, and a public leaderboard is available to encourage continued community developments on the total energy tasks and data.}, + keywords = {/unread,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Tran et al_2023_The Open Catalyst 2022 (OC22) Dataset and Challenges for Oxide Electrocatalysts.pdf;/Users/wasmer/Zotero/storage/7252VZH8/2206.html} +} + +@article{tshitoyanUnsupervisedWordEmbeddings2019, + title = {Unsupervised Word Embeddings Capture Latent Knowledge from Materials Science Literature}, + author = {Tshitoyan, Vahe and Dagdelen, John and Weston, Leigh and Dunn, Alexander and Rong, Ziqin and Kononova, Olga and Persson, Kristin A. and Ceder, Gerbrand and Jain, Anubhav}, + date = {2019-07}, + journaltitle = {Nature}, + volume = {571}, + number = {7763}, + pages = {95--98}, + publisher = {{Nature Publishing Group}}, + issn = {1476-4687}, + doi = {10.1038/s41586-019-1335-8}, + url = {https://www.nature.com/articles/s41586-019-1335-8}, + urldate = {2023-07-12}, + abstract = {The overwhelming majority of scientific knowledge is published as text, which is difficult to analyse by either traditional statistical analysis or modern machine learning methods. By contrast, the main source of machine-interpretable data for the materials research community has come from structured property databases1,2, which encompass only a small fraction of the knowledge present in the research literature. Beyond property values, publications contain valuable knowledge regarding the connections and relationships between data items as interpreted by the authors. To improve the identification and use of this knowledge, several studies have focused on the retrieval of information from scientific literature using supervised natural language processing3–10, which requires large hand-labelled datasets for training. Here we show that materials science knowledge present in the published literature can be efficiently encoded as information-dense word embeddings11–13 (vector representations of words) without human labelling or supervision. Without any explicit insertion of chemical knowledge, these embeddings capture complex materials science concepts such as the underlying structure of the periodic table and structure–property relationships in materials. Furthermore, we demonstrate that an unsupervised method can recommend materials for functional applications several years before their discovery. This suggests that latent knowledge regarding future discoveries is to a large extent embedded in past publications. Our findings highlight the possibility of extracting knowledge and relationships from the massive body of scientific literature in a collective manner, and point towards a generalized approach to the mining of scientific literature.}, + issue = {7763}, + langid = {english}, + keywords = {/unread,AML,descriptors,embedding,literature analysis,Mat2Vec,materials,ML,nlp,unsupervised learning,with-code,Word2Vec}, + file = {/Users/wasmer/Zotero/storage/NEI3YJWG/Tshitoyan et al. - 2019 - Unsupervised word embeddings capture latent knowle.pdf} +} + @article{tsubakiQuantumDeepField2020, title = {Quantum {{Deep Field}}: {{Data-Driven Wave Function}}, {{Electron Density Generation}}, and {{Atomization Energy Prediction}} and {{Extrapolation}} with {{Machine Learning}}}, shorttitle = {Quantum {{Deep Field}}}, @@ -10712,6 +14384,25 @@ file = {/Users/wasmer/Nextcloud/Zotero/Tsubaki_Mizoguchi_2020_Quantum Deep Field.pdf;/Users/wasmer/Zotero/storage/U2B2LBDQ/PhysRevLett.125.html} } +@article{tylerArtificialIntelligenceMaterials2023, + title = {Artificial {{Intelligence}} in {{Materials Education}}: {{A Roundtable Discussion}}}, + shorttitle = {Artificial {{Intelligence}} in {{Materials Education}}}, + author = {Tyler, Kaitlin and Chen, Enze and Meredig, Bryce and Sparks, Taylor}, + date = {2023-07-01}, + journaltitle = {JOM}, + shortjournal = {JOM}, + volume = {75}, + number = {7}, + pages = {2083--2085}, + issn = {1543-1851}, + doi = {10.1007/s11837-023-05922-3}, + url = {https://doi.org/10.1007/s11837-023-05922-3}, + urldate = {2023-08-19}, + langid = {english}, + keywords = {Citrine Informatics,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Tyler et al_2023_Artificial Intelligence in Materials Education.pdf} +} + @article{uhrinWorkflowsAiiDAEngineering2021, title = {Workflows in {{AiiDA}}: {{Engineering}} a High-Throughput, Event-Based Engine for Robust and Modular Computational Workflows}, shorttitle = {Workflows in {{AiiDA}}}, @@ -10731,6 +14422,16 @@ file = {/Users/wasmer/Nextcloud/Zotero/Uhrin et al_2021_Workflows in AiiDA.pdf;/Users/wasmer/Zotero/storage/KDEGTQ46/S0927025620305772.html} } +@online{UNCONVENTIONALEXOTICMAGNETISM, + title = {{{UNCONVENTIONAL AND EXOTIC MAGNETISM IN CARBON-BASED STRUCTURES AND RELATED MATERIALS}}}, + doi = {10.1142/S0217979213300077}, + url = {https://www.worldscientific.com/doi/epdf/10.1142/S0217979213300077}, + urldate = {2023-09-01}, + langid = {english}, + keywords = {/unread,review,RKKY interaction}, + file = {/Users/wasmer/Nextcloud/Zotero/UNCONVENTIONAL AND EXOTIC MAGNETISM IN CARBON-BASED STRUCTURES AND RELATED.pdf;/Users/wasmer/Zotero/storage/4SS3FPGT/S0217979213300077.html} +} + @online{unitedstatesMaterialsGenomeInitiative, title = {About the {{Materials Genome Initiative}}}, author = {United States, National Science {and} Technology Council}, @@ -10797,6 +14498,27 @@ file = {/Users/wasmer/Nextcloud/Zotero/Vandermause et al_2022_Active learning of reactive Bayesian force fields applied to heterogeneous.pdf} } +@article{vanderoordHyperactiveLearningDatadriven2023, + title = {Hyperactive Learning for Data-Driven Interatomic Potentials}, + author = {family=Oord, given=Cas, prefix=van der, useprefix=true and Sachs, Matthias and Kovács, Dávid Péter and Ortner, Christoph and Csányi, Gábor}, + date = {2023-09-13}, + journaltitle = {npj Computational Materials}, + shortjournal = {npj Comput Mater}, + volume = {9}, + number = {1}, + pages = {1--14}, + publisher = {{Nature Publishing Group}}, + issn = {2057-3960}, + doi = {10.1038/s41524-023-01104-6}, + url = {https://www.nature.com/articles/s41524-023-01104-6}, + urldate = {2023-09-22}, + abstract = {Data-driven interatomic potentials have emerged as a powerful tool for approximating ab initio potential energy surfaces. The most time-consuming step in creating these interatomic potentials is typically the generation of a suitable training database. To aid this process hyperactive learning (HAL), an accelerated active learning scheme, is presented as a method for rapid automated training database assembly. HAL adds a biasing term to a physically motivated sampler (e.g. molecular dynamics) driving atomic structures towards uncertainty in turn generating unseen or valuable training configurations. The proposed HAL framework is used to develop atomic cluster expansion (ACE) interatomic potentials for the AlSi10 alloy and polyethylene glycol (PEG) polymer starting from roughly a dozen initial configurations. The HAL generated ACE potentials are shown to be able to determine macroscopic properties, such as melting temperature and density, with close to experimental accuracy.}, + issue = {1}, + langid = {english}, + keywords = {\_tablet,ACE,active learning,alloys,AML,Bayesian methods,Bayesian optimization,Bayesian regression,binary systems,database generation,HAL,HAL-MD,iterative learning,iterative learning scheme,library,MD,MD17,ML,MLP,molecules,uncertainty quantification,with-code}, + file = {/Users/wasmer/Zotero/storage/S3FJEDUC/van der Oord et al_2023_Hyperactive learning for data-driven interatomic potentials.pdf} +} + @online{vanderoordHyperactiveLearningHAL2022, title = {Hyperactive {{Learning}} ({{HAL}}) for {{Data-Driven Interatomic Potentials}}}, author = {family=Oord, given=Cas, prefix=van der, useprefix=true and Sachs, Matthias and Kovács, Dávid Péter and Ortner, Christoph and Csányi, Gábor}, @@ -10809,7 +14531,7 @@ urldate = {2023-02-05}, abstract = {Data-driven interatomic potentials have emerged as a powerful class of surrogate models for \{\textbackslash it ab initio\} potential energy surfaces that are able to reliably predict macroscopic properties with experimental accuracy. In generating accurate and transferable potentials the most time-consuming and arguably most important task is generating the training set, which still requires significant expert user input. To accelerate this process, this work presents \textbackslash text\{\textbackslash it hyperactive learning\} (HAL), a framework for formulating an accelerated sampling algorithm specifically for the task of training database generation. The key idea is to start from a physically motivated sampler (e.g., molecular dynamics) and add a biasing term that drives the system towards high uncertainty and thus to unseen training configurations. Building on this framework, general protocols for building training databases for alloys and polymers leveraging the HAL framework will be presented. For alloys, ACE potentials for AlSi10 are created by fitting to a minimal HAL-generated database containing 88 configurations (32 atoms each) with fast evaluation times of {$<$}100 microsecond/atom/cpu-core. These potentials are demonstrated to predict the melting temperature with excellent accuracy. For polymers, a HAL database is built using ACE, able to determine the density of a long polyethylene glycol (PEG) polymer formed of 200 monomer units with experimental accuracy by only fitting to small isolated PEG polymers with sizes ranging from 2 to 32.}, pubstate = {preprint}, - keywords = {ACE,active learning,Bayesian methods,Bayesian optimization,Bayesian regression,database generation,HAL,HAL-MD,iterative learning,iterative learning scheme,MD,MD17,uncertainty quantification}, + keywords = {ACE,active learning,alloys,AML,Bayesian methods,Bayesian optimization,Bayesian regression,binary systems,database generation,HAL,HAL-MD,iterative learning,iterative learning scheme,library,MD,MD17,ML,MLP,molecules,uncertainty quantification,with-code}, file = {/Users/wasmer/Zotero/storage/4S2GHGVG/van der Oord et al. - 2022 - Hyperactive Learning (HAL) for Data-Driven Interat.pdf;/Users/wasmer/Zotero/storage/YJBLUYLE/2210.html} } @@ -10902,6 +14624,40 @@ file = {/Users/wasmer/Nextcloud/Zotero/Vedmedenko et al_2020_The 2020 magnetism roadmap.pdf} } +@article{velickySingleSiteApproximationsElectronic1968, + title = {Single-{{Site Approximations}} in the {{Electronic Theory}} of {{Simple Binary Alloys}}}, + author = {Velický, B. and Kirkpatrick, S. and Ehrenreich, H.}, + date = {1968-11-15}, + journaltitle = {Physical Review}, + shortjournal = {Phys. Rev.}, + volume = {175}, + number = {3}, + pages = {747--766}, + publisher = {{American Physical Society}}, + doi = {10.1103/PhysRev.175.747}, + url = {https://link.aps.org/doi/10.1103/PhysRev.175.747}, + urldate = {2023-09-19}, + abstract = {A single-band model Hamiltonian is used to describe the electronic structure of a three-dimensional disordered binary alloy. Several common theories based on the single-site approximation in a multiple-scattering description are compared with exact results for this Hamiltonian. The coherent-potential theory of Soven and others is shown to be the best of these. Within the appropriate limits, it exhibits dilute-alloy, virtual-crystal, and well separated impurity-band behavior. Hubbard and Onodera's and Toyozawa's simple model density of states is employed in numerical calculations for a wide variety of concentrations and scattering-potential strengths. Explicit results are exhibited for the total density of states, the partial density contributed by each component, and such k-dependent properties as the Bloch-wave spectral density and the distribution function. These illustrate the general conclusions as well as the limitations of the quasiparticle description.}, + file = {/Users/wasmer/Nextcloud/Zotero/Velický et al_1968_Single-Site Approximations in the Electronic Theory of Simple Binary Alloys.pdf;/Users/wasmer/Zotero/storage/JBY43F6G/PhysRev.175.html} +} + +@online{venugopalMatKGLargestKnowledge2022, + title = {{{MatKG}}: {{The Largest Knowledge Graph}} in {{Materials Science}} -- {{Entities}}, {{Relations}}, and {{Link Prediction}} through {{Graph Representation Learning}}}, + shorttitle = {{{MatKG}}}, + author = {Venugopal, Vineeth and Pai, Sumit and Olivetti, Elsa}, + date = {2022-10-31}, + eprint = {2210.17340}, + eprinttype = {arxiv}, + eprintclass = {cond-mat}, + doi = {10.48550/arXiv.2210.17340}, + url = {http://arxiv.org/abs/2210.17340}, + urldate = {2023-11-05}, + abstract = {This paper introduces MatKG, a novel graph database of key concepts in material science spanning the traditional material-structure-property-processing paradigm. MatKG is autonomously generated through transformer-based, large language models and generates pseudo ontological schema through statistical co-occurrence mapping. At present, MatKG contains over 2 million unique relationship triples derived from 80,000 entities. This allows the curated analysis, querying, and visualization of materials knowledge at unique resolution and scale. Further, Knowledge Graph Embedding models are used to learn embedding representations of nodes in the graph which are used for downstream tasks such as link prediction and entity disambiguation. MatKG allows the rapid dissemination and assimilation of data when used as a knowledge base, while enabling the discovery of new relations when trained as an embedding model.}, + pubstate = {preprint}, + keywords = {AML,dataset,graph embedding,knowledge graph,knowledge graph embedding,materials,ML,representation learning}, + file = {/Users/wasmer/Nextcloud/Zotero/Venugopal et al_2022_MatKG.pdf;/Users/wasmer/Zotero/storage/GBSDFV8K/2210.html} +} + @inproceedings{villarScalarsAreUniversal2021, title = {Scalars Are Universal: {{Equivariant}} Machine Learning, Structured like Classical Physics}, shorttitle = {Scalars Are Universal}, @@ -11004,6 +14760,22 @@ annotation = {OCLC: 633422775} } +@online{wangApproximatelyEquivariantNetworks2022, + title = {Approximately {{Equivariant Networks}} for {{Imperfectly Symmetric Dynamics}}}, + author = {Wang, Rui and Walters, Robin and Yu, Rose}, + date = {2022-06-16}, + eprint = {2201.11969}, + eprinttype = {arxiv}, + eprintclass = {cs}, + doi = {10.48550/arXiv.2201.11969}, + url = {http://arxiv.org/abs/2201.11969}, + urldate = {2023-11-11}, + abstract = {Incorporating symmetry as an inductive bias into neural network architecture has led to improvements in generalization, data efficiency, and physical consistency in dynamics modeling. Methods such as CNNs or equivariant neural networks use weight tying to enforce symmetries such as shift invariance or rotational equivariance. However, despite the fact that physical laws obey many symmetries, real-world dynamical data rarely conforms to strict mathematical symmetry either due to noisy or incomplete data or to symmetry breaking features in the underlying dynamical system. We explore approximately equivariant networks which are biased towards preserving symmetry but are not strictly constrained to do so. By relaxing equivariance constraints, we find that our models can outperform both baselines with no symmetry bias and baselines with overly strict symmetry in both simulated turbulence domains and real-world multi-stream jet flow.}, + pubstate = {preprint}, + keywords = {alternative approaches,approximat,approximative equivariance,CNN,data equivariance,Deep learning,equivariant,General ML,GNN,inductive bias,invariance,ML,model equivariance,symmetry}, + file = {/Users/wasmer/Nextcloud/Zotero/Wang et al_2022_Approximately Equivariant Networks for Imperfectly Symmetric Dynamics.pdf;/Users/wasmer/Zotero/storage/L7W3IW67/2201.html} +} + @online{wangGraphNetsPartial2019, title = {Graph {{Nets}} for {{Partial Charge Prediction}}}, author = {Wang, Yuanqing and Fass, Josh and Stern, Chaya D. and Luo, Kun and Chodera, John}, @@ -11036,6 +14808,25 @@ file = {/Users/wasmer/Nextcloud/Zotero/Wang et al_2022_Intrinsic Magnetic Topological Materials.pdf;/Users/wasmer/Zotero/storage/MLELX7M9/2212.html} } +@article{wangKnowledgeGraphEmbedding2017, + title = {Knowledge {{Graph Embedding}}: {{A Survey}} of {{Approaches}} and {{Applications}}}, + shorttitle = {Knowledge {{Graph Embedding}}}, + author = {Wang, Quan and Mao, Zhendong and Wang, Bin and Guo, Li}, + date = {2017-12}, + journaltitle = {IEEE Transactions on Knowledge and Data Engineering}, + volume = {29}, + number = {12}, + pages = {2724--2743}, + issn = {1558-2191}, + doi = {10.1109/TKDE.2017.2754499}, + url = {https://ieeexplore.ieee.org/document/8047276}, + urldate = {2023-11-05}, + abstract = {Knowledge graph (KG) embedding is to embed components of a KG including entities and relations into continuous vector spaces, so as to simplify the manipulation while preserving the inherent structure of the KG. It can benefit a variety of downstream tasks such as KG completion and relation extraction, and hence has quickly gained massive attention. In this article, we provide a systematic review of existing techniques, including not only the state-of-the-arts but also those with latest trends. Particularly, we make the review based on the type of information used in the embedding task. Techniques that conduct embedding using only facts observed in the KG are first introduced. We describe the overall framework, specific model design, typical training procedures, as well as pros and cons of such techniques. After that, we discuss techniques that further incorporate additional information besides facts. We focus specifically on the use of entity types, relation paths, textual descriptions, and logical rules. Finally, we briefly introduce how KG embedding can be applied to and benefit a wide variety of downstream tasks such as KG completion, relation extraction, question answering, and so forth.}, + eventtitle = {{{IEEE Transactions}} on {{Knowledge}} and {{Data Engineering}}}, + keywords = {general ML,knowledge graph,knowledge graph embedding,ML}, + file = {/Users/wasmer/Nextcloud/Zotero/Wang et al_2017_Knowledge Graph Embedding.pdf;/Users/wasmer/Zotero/storage/2AR3R726/8047276.html} +} + @article{wangLargeScaleDataset2022, title = {Large Scale Dataset of Real Space Electronic Charge Density of Cubic Inorganic Materials from Density Functional Theory ({{DFT}}) Calculations}, author = {Wang, Fancy Qian and Choudhary, Kamal and Liu, Yu and Hu, Jianjun and Hu, Ming}, @@ -11077,6 +14868,71 @@ file = {/Users/wasmer/Nextcloud/Zotero/Wang et al_2020_Machine Learning for Materials Scientists.pdf;/Users/wasmer/Zotero/storage/PY7PFU35/acs.chemmater.html} } +@article{wangMuSTHighPerformance2021, + title = {{{MuST}}: {{A}} High Performance Ab Initio Framework for the Study of Disordered Structures}, + shorttitle = {{{MuST}}}, + author = {Wang, Yang and Eisenbach, Markus and Liu, Xianglin and Karabin, Mariia and Ghosh, Swarnava and Terletska, Hanna and Mondal, Wasim and Tam, Ka-Ming and Zhang, Yi and Chioncel, Liviu and Raghuraman, Vishnu and Widom, Michael and Tian, Fuyang}, + date = {2021-01-01}, + volume = {2021}, + pages = {F22.006}, + url = {https://ui.adsabs.harvard.edu/abs/2021APS..MARF22006W}, + urldate = {2023-09-19}, + abstract = {The effect of disorder in materials is of great fundamental and technological interest. In this presentation, I will introduce MuST, an open source package designed for enabling first principles investigation of disordered materials. MuST is developed based on full-potential multiple scattering theory with Green function approach, and is built upon decades of development of research codes that include KKR-CPA, a highly efficient ab initio method for the study of random alloys, and Locally Self-consistent Multiple Scattering (LSMS) method, a linear scaling ab initio code capable of treating extremely large disordered systems from the first principles using the largest parallel supercomputers available. Strong disorder and localization effects can also be studied in real system within the LSMS formalism with cluster embedding in an effective medium with the Typical Medium Dynamical Cluster Approximation (TMDCA), which enables a scalable approach for first principles studies of quantum materials. I will show the latest development of the MuST project, and discuss its potential applications and computational challenges. This work is jointly supported by the NSF OCA and DMR under Award Number 1931525/1931367/1931445, and is supported in parts by the Office of Science of DOE.}, + eventtitle = {{{APS March Meeting Abstracts}}}, + keywords = {/unread,todo-tagging}, + annotation = {ADS Bibcode: 2021APS..MARF22006W} +} + +@article{wangOrderNMultipleScattering1995, + title = {Order-{{N Multiple Scattering Approach}} to {{Electronic Structure Calculations}}}, + author = {Wang, Yang and Stocks, G. M. and Shelton, W. A. and Nicholson, D. M. C. and Szotek, Z. and Temmerman, W. M.}, + date = {1995-10-09}, + journaltitle = {Physical Review Letters}, + shortjournal = {Phys. Rev. Lett.}, + volume = {75}, + number = {15}, + pages = {2867--2870}, + publisher = {{American Physical Society}}, + doi = {10.1103/PhysRevLett.75.2867}, + url = {https://link.aps.org/doi/10.1103/PhysRevLett.75.2867}, + urldate = {2023-09-19}, + abstract = {A new approach to the calculation of the electronic structure of large systems within the local density approximation is outlined. The electronic structure problem is formulated using real space multiple scattering theory. Employing a compute-node ↔ atom equivalence, the method has been implemented on a massively parallel processing supercomputer. The method is naturally highly parallel and ideal order- N scaling is obtained. The convergence of the method is demonstrated by comparison with the result of conventional electronic structure calculation for elemental metals and through calculation of the ordering energy of β brass.}, + keywords = {todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Wang et al_1995_Order-N Multiple Scattering Approach to Electronic Structure Calculations.pdf} +} + +@incollection{wangRealSpaceMultipleScatteringTheory2017, + title = {Real-{{Space Multiple-Scattering Theory}} and {{Its Applications}} at {{Exascale}}}, + booktitle = {Exascale {{Scientific Applications}}}, + author = {Wang, Yang, Markus Eisenbach}, + date = {2017}, + publisher = {{Chapman and Hall/CRC}}, + abstract = {This chapter presents a linear scaling ab initio method based on multiple scattering theories that shows clear advantage over other ab initio methods. It demonstrates its petascale computing capability in the ab initio calculation of magnetic and crystal structure phase transition temperatures. The chapter discusses its potential applications at exascale. The unit cell consists of the constituent atoms in a predetermined proportion and in a real-space distribution to mimic the atomic composition and spatial arrangement in the actual material. The construction of the local interaction zone itself requires a loop over all atom sites in our naive implementation. As significant amount of computational effort of ocally self-consistent multiple-scattering is spent in inverting the multiple-scattering matrix, a non-Hermitian complex matrix, this formed the main focus for porting to graphics processing units and the experience is guiding the portability to future platforms.}, + isbn = {978-1-315-27740-0}, + pagetotal = {12}, + keywords = {/unread,todo-tagging} +} + +@article{wangScientificDiscoveryAge2023, + title = {Scientific Discovery in the Age of Artificial Intelligence}, + author = {Wang, Hanchen and Fu, Tianfan and Du, Yuanqi and Gao, Wenhao and Huang, Kexin and Liu, Ziming and Chandak, Payal and Liu, Shengchao and Van Katwyk, Peter and Deac, Andreea and Anandkumar, Anima and Bergen, Karianne and Gomes, Carla P. and Ho, Shirley and Kohli, Pushmeet and Lasenby, Joan and Leskovec, Jure and Liu, Tie-Yan and Manrai, Arjun and Marks, Debora and Ramsundar, Bharath and Song, Le and Sun, Jimeng and Tang, Jian and VeliÄković, Petar and Welling, Max and Zhang, Linfeng and Coley, Connor W. and Bengio, Yoshua and Zitnik, Marinka}, + date = {2023-08}, + journaltitle = {Nature}, + volume = {620}, + number = {7972}, + pages = {47--60}, + publisher = {{Nature Publishing Group}}, + issn = {1476-4687}, + doi = {10.1038/s41586-023-06221-2}, + url = {https://www.nature.com/articles/s41586-023-06221-2}, + urldate = {2023-10-07}, + abstract = {Artificial intelligence (AI) is being increasingly integrated into scientific discovery to augment and accelerate research, helping scientists to generate hypotheses, design experiments, collect and interpret large datasets, and gain insights that might not have been possible using traditional scientific methods alone. Here we examine breakthroughs over the past decade that include self-supervised learning, which allows models to be trained on vast amounts of unlabelled data, and geometric deep learning, which leverages knowledge about the structure of scientific data to enhance model accuracy and efficiency. Generative AI methods can create designs, such as small-molecule drugs and proteins, by analysing diverse data modalities, including images and sequences. We discuss how these methods can help scientists throughout the scientific process and the central issues that remain despite such advances. Both developers and users of AI tools need a better understanding of when such approaches need improvement, and challenges posed by poor data quality and stewardship remain. These issues cut across scientific disciplines and require developing foundational algorithmic approaches that can contribute to scientific understanding or acquire it autonomously, making them critical areas of focus for AI innovation.}, + issue = {7972}, + langid = {english}, + keywords = {AI4Science,equivariant,General ML,geometric deep learning,LLM,ML,MPNN,neural operator,review,review-of-AI4science,roadmap,symmetry,transformer}, + file = {/Users/wasmer/Nextcloud/Zotero/Wang et al_2023_Scientific discovery in the age of artificial intelligence.pdf} +} + @online{wangSymmetrybasedComputationalSearch2022, title = {Symmetry-Based Computational Search for Novel Binary and Ternary {{2D}} Materials}, author = {Wang, Hai-Chen and Schmidt, Jonathan and Marques, Miguel A. L. and Wirtz, Ludger and Romero, Aldo H.}, @@ -11248,6 +15104,23 @@ file = {/home/johannes/Nextcloud/Zotero/false;/Users/wasmer/Nextcloud/Zotero/Wasmer_2021_Development of a surrogate machine learning model for the acceleration of.pdf;/Users/wasmer/Zotero/storage/AC483X2N/master-thesis.html} } +@online{weiGraphLearningIts2023, + title = {Graph {{Learning}} and {{Its Applications}}: {{A Holistic Survey}}}, + shorttitle = {Graph {{Learning}} and {{Its Applications}}}, + author = {Wei, Shaopeng and Zhao, Yu and Chen, Xingyan and Li, Qing and Zhuang, Fuzhen and Liu, Ji and Kou, Gang}, + date = {2023-06-03}, + eprint = {2212.08966}, + eprinttype = {arxiv}, + eprintclass = {cs}, + doi = {10.48550/arXiv.2212.08966}, + url = {http://arxiv.org/abs/2212.08966}, + urldate = {2023-11-14}, + abstract = {Graph learning is a prevalent domain that endeavors to learn the intricate relationships among nodes and the topological structure of graphs. Over the years, graph learning has transcended from graph theory to graph data mining. With the advent of representation learning, it has attained remarkable performance in diverse scenarios. Owing to its extensive application prospects, graph learning attracts copious attention. While some researchers have accomplished impressive surveys on graph learning, they failed to connect related objectives, methods, and applications in a more coherent way. As a result, they did not encompass current ample scenarios and challenging problems due to the rapid expansion of graph learning. Particularly, large language models have recently had a disruptive effect on human life, but they also show relative weakness in structured scenarios. The question of how to make these models more powerful with graph learning remains open. Different from previous surveys on graph learning, we provide a holistic review that analyzes current works from the perspective of graph structure, and discusses the latest applications, trends, and challenges in graph learning. Specifically, we commence by proposing a taxonomy and then summarize the methods employed in graph learning. We then provide a detailed elucidation of mainstream applications. Finally, we propose future directions.}, + pubstate = {preprint}, + keywords = {Computer Science - Artificial Intelligence}, + file = {/Users/wasmer/Nextcloud/Zotero/Wei et al_2023_Graph Learning and Its Applications.pdf;/Users/wasmer/Zotero/storage/WLJFND3J/2212.html} +} + @online{weilerGeneralEquivariantSteerable2021, title = {General {{E}}(2)-{{Equivariant Steerable CNNs}}}, author = {Weiler, Maurice and Cesa, Gabriele}, @@ -11283,6 +15156,24 @@ file = {/Users/wasmer/Nextcloud/Zotero/Weinert_1981_Solution of Poisson’s equation.pdf} } +@article{weinertSolutionPoissonEquation1981a, + title = {Solution of {{Poisson}}’s Equation: {{Beyond Ewald}}â€type Methods}, + shorttitle = {Solution of {{Poisson}}’s Equation}, + author = {Weinert, M.}, + date = {1981-11-01}, + journaltitle = {Journal of Mathematical Physics}, + shortjournal = {Journal of Mathematical Physics}, + volume = {22}, + number = {11}, + pages = {2433--2439}, + issn = {0022-2488}, + doi = {10.1063/1.524800}, + url = {https://doi.org/10.1063/1.524800}, + urldate = {2023-09-24}, + abstract = {A general method for solving Poisson’s equation without shape approximation for an arbitrary periodic charge distribution is presented. The method is based on the concept of multipole potentials and the boundary value problem for a sphere. In contrast to the usual Ewaldâ€type methods, this method has only absolutely and uniformly convergent reciprocal space sums, and treats all components of the charge density equivalently. Applications to band structure calculations and lattice summations are also discussed.}, + file = {/Users/wasmer/Zotero/storage/K6VZIJKD/Weinert - 1981 - Solution of Poisson’s equation Beyond Ewaldâ€type .pdf;/Users/wasmer/Zotero/storage/X9ME72CA/Solution-of-Poisson-s-equation-Beyond-Ewald-type.html} +} + @online{wellawattePerspectiveExplanationsMolecular2022, title = {A {{Perspective}} on {{Explanations}} of {{Molecular Prediction Models}}}, author = {Wellawatte, Geemi P. and Gandhi, Heta A. and Seshadri, Aditi and White, Andrew D.}, @@ -11298,6 +15189,38 @@ file = {/Users/wasmer/Nextcloud/Zotero/Wellawatte et al_2022_A Perspective on Explanations of Molecular Prediction Models.pdf} } +@article{wellawattePerspectiveExplanationsMolecular2023, + title = {A {{Perspective}} on {{Explanations}} of {{Molecular Prediction Models}}}, + author = {Wellawatte, Geemi P. and Gandhi, Heta A. and Seshadri, Aditi and White, Andrew D.}, + date = {2023-04-25}, + journaltitle = {Journal of Chemical Theory and Computation}, + shortjournal = {J. Chem. Theory Comput.}, + volume = {19}, + number = {8}, + pages = {2149--2160}, + publisher = {{American Chemical Society}}, + issn = {1549-9618}, + doi = {10.1021/acs.jctc.2c01235}, + url = {https://doi.org/10.1021/acs.jctc.2c01235}, + urldate = {2023-10-08}, + abstract = {Chemists can be skeptical in using deep learning (DL) in decision making, due to the lack of interpretability in “black-box†models. Explainable artificial intelligence (XAI) is a branch of artificial intelligence (AI) which addresses this drawback by providing tools to interpret DL models and their predictions. We review the principles of XAI in the domain of chemistry and emerging methods for creating and evaluating explanations. Then, we focus on methods developed by our group and their applications in predicting solubility, blood–brain barrier permeability, and the scent of molecules. We show that XAI methods like chemical counterfactuals and descriptor explanations can explain DL predictions while giving insight into structure–property relationships. Finally, we discuss how a two-step process of developing a black-box model and explaining predictions can uncover structure–property relationships.}, + keywords = {/unread,counterfactual explanation,Deep learning,GNN,library,LLM,molecules,nlp,with-code,XAI}, + file = {/Users/wasmer/Nextcloud/Zotero/Wellawatte et al_2023_A Perspective on Explanations of Molecular Prediction Models.pdf;/Users/wasmer/Zotero/storage/ILWW84AT/acs.jctc.html} +} + +@unpublished{wellingConvergingAdvancesAccelerate2022, + title = {Converging {{Advances}} to {{Accelerate Molecular Simulation}}}, + author = {Welling, Max}, + date = {2022-12-20}, + url = {https://portal.valencelabs.com/events/post/converging-advances-to-accelerate-molecular-simulation-EvHb5ThyGLXPJBp}, + urldate = {2023-11-11}, + abstract = {Everything tangible in the universe is made of molecules. Yet our ability to digitally simulate even small molecules is rather poor due to the complexities of quantum mechanics. However, there are a number of advances that are converging to dramatically improve our ability to understand the behavior of molecules. Firstly, deep learning and in particular equivariant graph neural networks are now an important tool to model molecules. They are for instance the core technology in Deepmind’s AlphaFold to predict the 3d shape of a molecule from its amino acid sequence. Second, despite claims to the contrary, Moore’s law is still alive, and in particular the design of ASIC architectures for special purpose computation will continue to accelerate our ability to break new computational barriers. And finally there is the rapid advance of quantum computation. While fault tolerant quantum computation might still be a decade away, it is expected that it’s first useful application, to simulate (quantum) nature itself, may be much closer. In this talk I will introduce some technology around equivariant graph neural networks and give my perspective on why I am excited about the opportunities that will come from new breakthroughs in molecular simulation. It may facilitate the search for new sustainable technologies to capture carbon from the air, develop biodegradable plastics, reduce the cost of electrolysis through better catalysts, develop cleaner and cheaper fertilizers, design new drugs to treat disease and so on. Our understanding of matter will be key to unlocking these new materials for the benefit of humanity.}, + langid = {american}, + venue = {{Valence Labs - M2D2 reading group}}, + keywords = {/unread,AI4Science,AML,biomolecules,GNN,Microsoft Research,ML,molecules,neural operator,PDE,review,review-of-AI4science}, + file = {/Users/wasmer/Zotero/storage/HF88TM6X/converging-advances-to-accelerate-molecular-simulation-EvHb5ThyGLXPJBp.html} +} + @article{westermayrMachineLearningElectronically2021, title = {Machine {{Learning}} for {{Electronically Excited States}} of {{Molecules}}}, author = {Westermayr, Julia and Marquetand, Philipp}, @@ -11388,6 +15311,18 @@ file = {/Users/wasmer/Nextcloud/Zotero/White_2023_The future of chemistry is language.pdf} } +@unpublished{whiteLanguageFutureChemistry2023, + title = {Language Is the {{Future}} of {{Chemistry}}}, + author = {White, Andrew D.}, + date = {2023-10-05}, + url = {https://www.youtube.com/watch?v=hMow_BFwIqM}, + urldate = {2023-10-07}, + abstract = {Andrew White is a Member of Technical Staff, Future House The Applied Machine Learning Days channel features talks and performances from the Applied Machine Learning Days held at the EPFL. AMLD is one of the largest machine learning \& AI events in Europe, focused specifically on the applications of machine learning and AI, making it particularly interesting to industry and academia. Follow AMLD: on Twitter: https://www.twitter.com/appliedmldays on LinkedIn: https://www.linkedin.com/company/appl... on Mastodon : https://mastodon.social/@amld AMLD Website: https://www.appliedmldays.org}, + eventtitle = {Applied {{Machine Learning Days}}}, + venue = {{EPFL, Lausanne, Switzerland}}, + keywords = {/unread,chemistry,equivariant,foundation models,GNN,LLM,pretrained models} +} + @article{wiederCompactReviewMolecular2020, title = {A Compact Review of Molecular Property Prediction with Graph Neural Networks}, author = {Wieder, Oliver and Kohlbacher, Stefan and Kuenemann, Mélaine and Garon, Arthur and Ducrot, Pierre and Seidel, Thomas and Langer, Thierry}, @@ -11497,6 +15432,42 @@ file = {/Users/wasmer/Nextcloud/Zotero/Winter et al_2022_Unsupervised Learning of Group Invariant and Equivariant Representations.pdf;/Users/wasmer/Zotero/storage/5PYE8XM2/2202.html} } +@online{wittACEpotentialsJlJulia2023, + title = {{{ACEpotentials}}.Jl: {{A Julia Implementation}} of the {{Atomic Cluster Expansion}}}, + shorttitle = {{{ACEpotentials}}.Jl}, + author = {Witt, William C. and family=Oord, given=Cas, prefix=van der, useprefix=true and GelžinytÄ—, Elena and Järvinen, Teemu and Ross, Andres and Darby, James P. and Ho, Cheuk Hin and Baldwin, William J. and Sachs, Matthias and Kermode, James and Bernstein, Noam and Csányi, Gábor and Ortner, Christoph}, + date = {2023-09-07}, + eprint = {2309.03161}, + eprinttype = {arxiv}, + eprintclass = {physics}, + doi = {10.48550/arXiv.2309.03161}, + url = {http://arxiv.org/abs/2309.03161}, + urldate = {2023-09-22}, + abstract = {We introduce ACEpotentials.jl, a Julia-language software package that constructs interatomic potentials from quantum mechanical reference data using the Atomic Cluster Expansion (Drautz, 2019). As the latter provides a complete description of atomic environments, including invariance to overall translation and rotation as well as permutation of like atoms, the resulting potentials are systematically improvable and data efficient. Furthermore, the descriptor's expressiveness enables use of a linear model, facilitating rapid evaluation and straightforward application of Bayesian techniques for active learning. We summarize the capabilities of ACEpotentials.jl and demonstrate its strengths (simplicity, interpretability, robustness, performance) on a selection of prototypical atomistic modelling workflows.}, + pubstate = {preprint}, + keywords = {ACE,active learning,AML,descriptors,invariance,Julia,library,linear regression,ML,sparsification,todo-tagging,tutorial,with-code}, + file = {/Users/wasmer/Zotero/storage/P4J69KC5/Witt et al. - 2023 - ACEpotentials.jl A Julia Implementation of the At.pdf;/Users/wasmer/Zotero/storage/SCPAUKAV/2309.html} +} + +@article{wittOrbitalfreeDensityFunctional2018, + title = {Orbital-Free Density Functional Theory for Materials Research}, + author = {Witt, William C. and family=Rio, given=Beatriz G., prefix=del, useprefix=false and Dieterich, Johannes M. and Carter, Emily A.}, + date = {2018-04}, + journaltitle = {Journal of Materials Research}, + volume = {33}, + number = {7}, + pages = {777--795}, + publisher = {{Cambridge University Press}}, + issn = {0884-2914, 2044-5326}, + doi = {10.1557/jmr.2017.462}, + url = {https://www.cambridge.org/core/journals/journal-of-materials-research/article/orbitalfree-density-functional-theory-for-materials-research/49FCB1F2856649AA431E803AE340674E}, + urldate = {2023-09-22}, + abstract = {, Orbital-free density functional theory (OFDFT) is both grounded in quantum physics and suitable for direct simulation of thousands of atoms. This article describes the application of OFDFT for materials research over roughly the past two decades, highlighting computational studies that would have been impractical (or impossible) to perform with other techniques. In particular, we review the growing body of simulations of solids and liquids that have been conducted with planewave-pseudopotential (or related) techniques. We also provide an updated account of the fundamentals of OFDFT, emphasizing aspects—such as nonlocal density functionals for computing the kinetic energy of noninteracting electrons—that enabled much of the application work. The article concludes with a discussion of the OFDFT frontier, which contains brief descriptions of other topics at the forefront of OFDFT research.}, + langid = {english}, + keywords = {/unread,DFT,DFT theory,OF-DFT,review,review-of-OF-DFT}, + file = {/Users/wasmer/Zotero/storage/XIWM32BX/Witt et al. - 2018 - Orbital-free density functional theory for materia.pdf} +} + @online{woodgateInterplayMagnetismShortrange2023, title = {Interplay between Magnetism and Short-Range Order in {{Ni-based}} High-Entropy Alloys: {{CrCoNi}}, {{CrFeCoNi}}, and {{CrMnFeCoNi}}}, shorttitle = {Interplay between Magnetism and Short-Range Order in {{Ni-based}} High-Entropy Alloys}, @@ -11541,6 +15512,44 @@ file = {/Users/wasmer/Zotero/storage/KYZY7YXB/7891361.html} } +@online{wuMoleculeNetBenchmarkMolecular2018, + title = {{{MoleculeNet}}: {{A Benchmark}} for {{Molecular Machine Learning}}}, + shorttitle = {{{MoleculeNet}}}, + author = {Wu, Zhenqin and Ramsundar, Bharath and Feinberg, Evan N. and Gomes, Joseph and Geniesse, Caleb and Pappu, Aneesh S. and Leswing, Karl and Pande, Vijay}, + date = {2018-10-25}, + eprint = {1703.00564}, + eprinttype = {arxiv}, + eprintclass = {physics, stat}, + doi = {10.48550/arXiv.1703.00564}, + url = {http://arxiv.org/abs/1703.00564}, + urldate = {2023-09-25}, + abstract = {Molecular machine learning has been maturing rapidly over the last few years. Improved methods and the presence of larger datasets have enabled machine learning algorithms to make increasingly accurate predictions about molecular properties. However, algorithmic progress has been limited due to the lack of a standard benchmark to compare the efficacy of proposed methods; most new algorithms are benchmarked on different datasets making it challenging to gauge the quality of proposed methods. This work introduces MoleculeNet, a large scale benchmark for molecular machine learning. MoleculeNet curates multiple public datasets, establishes metrics for evaluation, and offers high quality open-source implementations of multiple previously proposed molecular featurization and learning algorithms (released as part of the DeepChem open source library). MoleculeNet benchmarks demonstrate that learnable representations are powerful tools for molecular machine learning and broadly offer the best performance. However, this result comes with caveats. Learnable representations still struggle to deal with complex tasks under data scarcity and highly imbalanced classification. For quantum mechanical and biophysical datasets, the use of physics-aware featurizations can be more important than choice of particular learning algorithm.}, + pubstate = {preprint}, + keywords = {/unread,AML,benchmark dataset,benchmarking,Database,graph ML,ML,molecules}, + file = {/Users/wasmer/Zotero/storage/HQS5MMUL/Wu et al. - 2018 - MoleculeNet A Benchmark for Molecular Machine Lea.pdf;/Users/wasmer/Zotero/storage/RAREFZ8Y/1703.html} +} + +@article{wuMoleculeNetBenchmarkMolecular2018a, + title = {{{MoleculeNet}}: A Benchmark for Molecular Machine Learning}, + shorttitle = {{{MoleculeNet}}}, + author = {Wu, Zhenqin and Ramsundar, Bharath and Feinberg, Evan N. and Gomes, Joseph and Geniesse, Caleb and Pappu, Aneesh S. and Leswing, Karl and Pande, Vijay}, + date = {2018-01-03}, + journaltitle = {Chemical Science}, + shortjournal = {Chem. Sci.}, + volume = {9}, + number = {2}, + pages = {513--530}, + publisher = {{The Royal Society of Chemistry}}, + issn = {2041-6539}, + doi = {10.1039/C7SC02664A}, + url = {https://pubs.rsc.org/en/content/articlelanding/2018/sc/c7sc02664a}, + urldate = {2023-09-25}, + abstract = {Molecular machine learning has been maturing rapidly over the last few years. Improved methods and the presence of larger datasets have enabled machine learning algorithms to make increasingly accurate predictions about molecular properties. However, algorithmic progress has been limited due to the lack of a standard benchmark to compare the efficacy of proposed methods; most new algorithms are benchmarked on different datasets making it challenging to gauge the quality of proposed methods. This work introduces MoleculeNet, a large scale benchmark for molecular machine learning. MoleculeNet curates multiple public datasets, establishes metrics for evaluation, and offers high quality open-source implementations of multiple previously proposed molecular featurization and learning algorithms (released as part of the DeepChem open source library). MoleculeNet benchmarks demonstrate that learnable representations are powerful tools for molecular machine learning and broadly offer the best performance. However, this result comes with caveats. Learnable representations still struggle to deal with complex tasks under data scarcity and highly imbalanced classification. For quantum mechanical and biophysical datasets, the use of physics-aware featurizations can be more important than choice of particular learning algorithm.}, + langid = {english}, + keywords = {/unread,AML,benchmark dataset,benchmarking,Database,graph ML,ML,molecules}, + file = {/Users/wasmer/Zotero/storage/EULDRFQB/Wu et al. - 2018 - MoleculeNet a benchmark for molecular machine lea.pdf;/Users/wasmer/Zotero/storage/J3CZHR7A/Wu et al. - 2018 - MoleculeNet a benchmark for molecular machine lea.pdf} +} + @article{wurgerExploringStructurepropertyRelationships2021, title = {Exploring Structure-Property Relationships in Magnesium Dissolution Modulators}, author = {Würger, Tim and Mei, Di and Vaghefinazari, Bahram and Winkler, David A. and Lamaka, Sviatlana V. and Zheludkevich, Mikhail L. and Meißner, Robert H. and Feiler, Christian}, @@ -11563,6 +15572,25 @@ file = {/Users/wasmer/Nextcloud/Zotero/Würger et al_2021_Exploring structure-property relationships in magnesium dissolution modulators.pdf;/Users/wasmer/Zotero/storage/NM6RVQRY/s41529-020-00148-z.html} } +@article{xiaGraphLearningSurvey2021, + title = {Graph {{Learning}}: {{A Survey}}}, + shorttitle = {Graph {{Learning}}}, + author = {Xia, Feng and Sun, Ke and Yu, Shuo and Aziz, Abdul and Wan, Liangtian and Pan, Shirui and Liu, Huan}, + date = {2021-04}, + journaltitle = {IEEE Transactions on Artificial Intelligence}, + shortjournal = {IEEE Trans. Artif. Intell.}, + volume = {2}, + number = {2}, + pages = {109--127}, + issn = {2691-4581}, + doi = {10.1109/TAI.2021.3076021}, + url = {https://ieeexplore.ieee.org/document/9416834/}, + urldate = {2023-11-14}, + abstract = {Graphs are widely used as a popular representation of the network structure of connected data. Graph data can be found in a broad spectrum of application domains such as social systems, ecosystems, biological networks, knowledge graphs, and information systems. With the continuous penetration of artificial intelligence technologies, graph learning (i.e., machine learning on graphs) is gaining attention from both researchers and practitioners. Graph learning proves effective for many tasks, such as classification, link prediction, and matching. Generally, graph learning methods extract relevant features of graphs by taking advantage of machine learning algorithms. In this survey, we present a comprehensive overview on the state-of-the-art of graph learning. Special attention is paid to four categories of existing graph learning methods, including graph signal processing, matrix factorization, random walk, and deep learning. Major models and algorithms under these categories are reviewed, respectively. We examine graph learning applications in areas such as text, images, science, knowledge graphs, and combinatorial optimization. In addition, we discuss several promising research directions in this field.}, + keywords = {/unread,General ML,GNN,graph,graph ML,ML,review,review-of-graph-ML}, + file = {/Users/wasmer/Nextcloud/Zotero/Xia et al_2021_Graph Learning.pdf} +} + @article{xieCrystalGraphConvolutional2018, title = {Crystal {{Graph Convolutional Neural Networks}} for an {{Accurate}} and {{Interpretable Prediction}} of {{Material Properties}}}, author = {Xie, Tian and Grossman, Jeffrey C.}, @@ -11651,7 +15679,7 @@ url = {http://arxiv.org/abs/2110.00624}, urldate = {2022-05-09}, abstract = {All-atom dynamics simulations are an indispensable quantitative tool in physics, chemistry, and materials science, but large systems and long simulation times remain challenging due to the trade-off between computational efficiency and predictive accuracy. To address this challenge, we combine effective two- and three-body potentials in a cubic B-spline basis with regularized linear regression to obtain machine-learning potentials that are physically interpretable, sufficiently accurate for applications, as fast as the fastest traditional empirical potentials, and two to four orders of magnitude faster than state-of-the-art machine-learning potentials. For data from empirical potentials, we demonstrate exact retrieval of the potential. For data from density functional theory, the predicted energies, forces, and derived properties, including phonon spectra, elastic constants, and melting points, closely match those of the reference method. The introduced potentials might contribute towards accurate all-atom dynamics simulations of large atomistic systems over long time scales.}, - keywords = {descriptors,UFP}, + keywords = {/unread,AML,B-splines,benchmarking,body-order,descriptors,GAP,library,linear regression,ML,MLP,MLP comparison,original publication,qSNAP,SNAP,UF3,UFP,with-code}, file = {/Users/wasmer/Nextcloud/Zotero/Xie et al_2021_Ultra-fast interpretable machine-learning potentials.pdf;/Users/wasmer/Zotero/storage/8585X9YA/2110.html} } @@ -11667,10 +15695,31 @@ urldate = {2023-05-06}, abstract = {All-atom dynamics simulations are an indispensable quantitative tool in physics, chemistry, and materials science, but large systems and long simulation times remain challenging due to the trade-off between computational efficiency and predictive accuracy. To address this challenge, we combine effective two- and three-body potentials in a cubic B-spline basis with regularized linear regression to obtain machine-learning potentials that are physically interpretable, sufficiently accurate for applications, as fast as the fastest traditional empirical potentials, and two to four orders of magnitude faster than state-of-the-art machine-learning potentials. For data from empirical potentials, we demonstrate exact retrieval of the potential. For data from density functional theory, the predicted energies, forces, and derived properties, including phonon spectra, elastic constants, and melting points, closely match those of the reference method. The introduced potentials might contribute towards accurate all-atom dynamics simulations of large atomistic systems over long time scales.}, pubstate = {preprint}, - keywords = {/unread,AML,B-splines,body-order,descriptors,GAP,library,linear regression,ML,MLP,MLP comparison,original publication,qSNAP,SNAP,UF3}, + keywords = {/unread,AML,B-splines,benchmarking,body-order,descriptors,GAP,library,linear regression,ML,MLP,MLP comparison,original publication,qSNAP,SNAP,UF3,with-code}, file = {/Users/wasmer/Nextcloud/Zotero/false;/Users/wasmer/Zotero/storage/B9DGEUPF/2110.html} } +@article{xieUltrafastInterpretableMachinelearning2023, + title = {Ultra-Fast Interpretable Machine-Learning Potentials}, + author = {Xie, Stephen R. and Rupp, Matthias and Hennig, Richard G.}, + date = {2023-09-02}, + journaltitle = {npj Computational Materials}, + shortjournal = {npj Comput Mater}, + volume = {9}, + number = {1}, + pages = {1--9}, + publisher = {{Nature Publishing Group}}, + issn = {2057-3960}, + doi = {10.1038/s41524-023-01092-7}, + url = {https://www.nature.com/articles/s41524-023-01092-7}, + urldate = {2023-10-01}, + abstract = {All-atom dynamics simulations are an indispensable quantitative tool in physics, chemistry, and materials science, but large systems and long simulation times remain challenging due to the trade-off between computational efficiency and predictive accuracy. To address this challenge, we combine effective two- and three-body potentials in a cubic B-spline basis with regularized linear regression to obtain machine-learning potentials that are physically interpretable, sufficiently accurate for applications, as fast as the fastest traditional empirical potentials, and two to four orders of magnitude faster than state-of-the-art machine-learning potentials. For data from empirical potentials, we demonstrate the exact retrieval of the potential. For data from density functional theory, the predicted energies, forces, and derived properties, including phonon spectra, elastic constants, and melting points, closely match those of the reference method. The introduced potentials might contribute towards accurate all-atom dynamics simulations of large atomistic systems over long-time scales.}, + issue = {1}, + langid = {english}, + keywords = {AML,B-splines,benchmarking,body-order,descriptors,GAP,library,linear regression,ML,MLP,MLP comparison,original publication,qSNAP,SNAP,UF3,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Xie et al_2023_Ultra-fast interpretable machine-learning potentials.pdf} +} + @article{xuSurveyMultiOutputLearning2020, title = {Survey on {{Multi-Output Learning}}}, author = {Xu, Donna and Shi, Yaxin and Tsang, Ivor W. and Ong, Yew-Soon and Gong, Chen and Shen, Xiaobo}, @@ -11775,6 +15824,26 @@ file = {/Users/wasmer/Zotero/storage/G9EKXIZ4/Yanagi et al. - 2023 - Generation of modulated magnetic structures based .pdf;/Users/wasmer/Zotero/storage/ILBXKEJP/PhysRevB.107.html} } +@article{yangDecipheringChemicalOrder2017, + title = {Deciphering Chemical Order/Disorder and Material Properties at the Single-Atom Level}, + author = {Yang, Yongsoo and Chen, Chien-Chun and Scott, M. C. and Ophus, Colin and Xu, Rui and Pryor, Alan and Wu, Li and Sun, Fan and Theis, Wolfgang and Zhou, Jihan and Eisenbach, Markus and Kent, Paul R. C. and Sabirianov, Renat F. and Zeng, Hao and Ercius, Peter and Miao, Jianwei}, + date = {2017-02}, + journaltitle = {Nature}, + volume = {542}, + number = {7639}, + pages = {75--79}, + publisher = {{Nature Publishing Group}}, + issn = {1476-4687}, + doi = {10.1038/nature21042}, + url = {https://www.nature.com/articles/nature21042}, + urldate = {2023-09-19}, + abstract = {The three-dimensional coordinates of more than 23,000 atoms in an iron-platinum nanoparticle are determined with 22 picometre precision to correlate chemical order/disorder and crystal defects with magnetic properties.}, + issue = {7639}, + langid = {english}, + keywords = {/unread,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Yang et al_2017_Deciphering chemical order-disorder and material properties at the single-atom.pdf} +} + @article{yangMachinelearningAcceleratedGeometry2021, title = {Machine-Learning Accelerated Geometry Optimization in Molecular Simulation}, author = {Yang, Yilin and Jiménez-Negrón, Omar A. and Kitchin, John R.}, @@ -11794,6 +15863,91 @@ file = {/home/johannes/Nextcloud/Zotero/false;/Users/wasmer/Zotero/storage/2L5JFJN8/5.html} } +@article{yaoTensorMol0ModelChemistry2018, + title = {The {{TensorMol-0}}.1 Model Chemistry: A Neural Network Augmented with Long-Range Physics}, + shorttitle = {The {{TensorMol-0}}.1 Model Chemistry}, + author = {Yao, Kun and E.~Herr, John and W.~Toth, David and Mckintyre, Ryker and Parkhill, John}, + date = {2018}, + journaltitle = {Chemical Science}, + volume = {9}, + number = {8}, + pages = {2261--2269}, + publisher = {{Royal Society of Chemistry}}, + doi = {10.1039/C7SC04934J}, + url = {https://pubs.rsc.org/en/content/articlelanding/2018/sc/c7sc04934j}, + urldate = {2023-08-24}, + langid = {english}, + keywords = {/unread,AML,chemistry,library,long-range interaction,ML,TensorFlow,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Yao et al_2018_The TensorMol-0.pdf} +} + +@online{yuCapturingLongrangeInteraction2022, + title = {Capturing Long-Range Interaction with Reciprocal Space Neural Network}, + author = {Yu, Hongyu and Hong, Liangliang and Chen, Shiyou and Gong, Xingao and Xiang, Hongjun}, + date = {2022-11-29}, + eprint = {2211.16684}, + eprinttype = {arxiv}, + eprintclass = {cond-mat, physics:physics}, + doi = {10.48550/arXiv.2211.16684}, + url = {http://arxiv.org/abs/2211.16684}, + urldate = {2023-10-13}, + abstract = {Machine Learning (ML) interatomic models and potentials have been widely employed in simulations of materials. Long-range interactions often dominate in some ionic systems whose dynamics behavior is significantly influenced. However, the long-range effect such as Coulomb and Van der Wales potential is not considered in most ML interatomic potentials. To address this issue, we put forward a method that can take long-range effects into account for most ML local interatomic models with the reciprocal space neural network. The structure information in real space is firstly transformed into reciprocal space and then encoded into a reciprocal space potential or a global descriptor with full atomic interactions. The reciprocal space potential and descriptor keep full invariance of Euclidean symmetry and choice of the cell. Benefiting from the reciprocal-space information, ML interatomic models can be extended to describe the long-range potential including not only Coulomb but any other long-range interaction. A model NaCl system considering Coulomb interaction and the GaxNy system with defects are applied to illustrate the advantage of our approach. At the same time, our approach helps to improve the prediction accuracy of some global properties such as the band gap where the full atomic interaction beyond local atomic environments plays a very important role. In summary, our work has expanded the ability of current ML interatomic models and potentials when dealing with the long-range effect, hence paving a new way for accurate prediction of global properties and large-scale dynamic simulations of systems with defects.}, + pubstate = {preprint}, + keywords = {AML,CGCNN,charge transfer,descriptors,DimeNet,E(3),invariance,long-range interaction,materials,ML,MLP,PAiNN,reciprocal space,reciprocal space descriptor,vdW}, + file = {/Users/wasmer/Nextcloud/Zotero/Yu et al_2022_Capturing long-range interaction with reciprocal space neural network.pdf;/Users/wasmer/Zotero/storage/KHNJJ4VF/2211.html} +} + +@online{yuEfficientEquivariantGraph2023, + title = {Efficient and {{Equivariant Graph Networks}} for {{Predicting Quantum Hamiltonian}}}, + author = {Yu, Haiyang and Xu, Zhao and Qian, Xiaofeng and Qian, Xiaoning and Ji, Shuiwang}, + date = {2023-06-07}, + eprint = {2306.04922}, + eprinttype = {arxiv}, + eprintclass = {physics}, + doi = {10.48550/arXiv.2306.04922}, + url = {http://arxiv.org/abs/2306.04922}, + urldate = {2023-10-13}, + abstract = {We consider the prediction of the Hamiltonian matrix, which finds use in quantum chemistry and condensed matter physics. Efficiency and equivariance are two important, but conflicting factors. In this work, we propose a SE(3)-equivariant network, named QHNet, that achieves efficiency and equivariance. Our key advance lies at the innovative design of QHNet architecture, which not only obeys the underlying symmetries, but also enables the reduction of number of tensor products by 92\textbackslash\%. In addition, QHNet prevents the exponential growth of channel dimension when more atom types are involved. We perform experiments on MD17 datasets, including four molecular systems. Experimental results show that our QHNet can achieve comparable performance to the state of the art methods at a significantly faster speed. Besides, our QHNet consumes 50\textbackslash\% less memory due to its streamlined architecture. Our code is publicly available as part of the AIRS library (\textbackslash url\{https://github.com/divelab/AIRS\}).}, + pubstate = {preprint}, + keywords = {ablation study,AML,chemical species scaling problem,DFT,e3nn,emulator,equivariant,GNN,hybrid AI/simulation,library,ML,ML-DFT,ML-ESM,molecules,MPNN,PBE,PhiSNet,prediction of Hamiltonian matrix,PySCF,QHNet,quantum tensors,SE(3),with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/Yu et al_2023_Efficient and Equivariant Graph Networks for Predicting Quantum Hamiltonian.pdf;/Users/wasmer/Zotero/storage/8MAS3EXH/2306.html} +} + +@article{yuELSIUnifiedSoftware2018, + title = {{{ELSI}}: {{A}} Unified Software Interface for {{Kohn}}–{{Sham}} Electronic Structure Solvers}, + shorttitle = {{{ELSI}}}, + author = {Yu, Victor Wen-zhe and Corsetti, Fabiano and GarcÃa, Alberto and Huhn, William P. and Jacquelin, Mathias and Jia, Weile and Lange, Björn and Lin, Lin and Lu, Jianfeng and Mi, Wenhui and Seifitokaldani, Ali and Vázquez-Mayagoitia, Ãlvaro and Yang, Chao and Yang, Haizhao and Blum, Volker}, + date = {2018-01-01}, + journaltitle = {Computer Physics Communications}, + shortjournal = {Computer Physics Communications}, + volume = {222}, + pages = {267--285}, + issn = {0010-4655}, + doi = {10.1016/j.cpc.2017.09.007}, + url = {https://www.sciencedirect.com/science/article/pii/S0010465517302941}, + urldate = {2023-09-24}, + abstract = {Solving the electronic structure from a generalized or standard eigenproblem is often the bottleneck in large scale calculations based on Kohn–Sham density-functional theory. This problem must be addressed by essentially all current electronic structure codes, based on similar matrix expressions, and by high-performance computation. We here present a unified software interface, ELSI, to access different strategies that address the Kohn–Sham eigenvalue problem. Currently supported algorithms include the dense generalized eigensolver library ELPA, the orbital minimization method implemented in libOMM, and the pole expansion and selected inversion (PEXSI) approach with lower computational complexity for semilocal density functionals. The ELSI interface aims to simplify the implementation and optimal use of the different strategies, by offering (a) a unified software framework designed for the electronic structure solvers in Kohn–Sham density-functional theory; (b) reasonable default parameters for a chosen solver; (c) automatic conversion between input and internal working matrix formats, and in the future (d) recommendation of the optimal solver depending on the specific problem. Comparative benchmarks are shown for system sizes up to 11,520 atoms (172,800 basis functions) on distributed memory supercomputing architectures. Program summary Program title: ELSI Interface Program Files doi: http://dx.doi.org/10.17632/y8vzhzdm62.1 Licensing provisions: BSD 3-clause Programming language: Fortran 2003, with interface to C/C++ External routines/libraries: MPI, BLAS, LAPACK, ScaLAPACK, ELPA, libOMM, PEXSI, ParMETIS, SuperLU\_DIST Nature of problem: Solving the electronic structure from a generalized or standard eigenvalue problem in calculations based on Kohn–Sham density functional theory (KS-DFT). Solution method: To connect the KS-DFT codes and the KS electronic structure solvers, ELSI provides a unified software interface with reasonable default parameters, hierarchical control over the interface and the solvers, and automatic conversions between input and internal working matrix formats. Supported solvers are: ELPA (dense generalized eigensolver), libOMM (orbital minimization method), and PEXSI (pole expansion and selected inversion method). Restrictions: The ELSI interface requires complete information of the Hamiltonian matrix.}, + keywords = {/unread,Density-functional theory,Kohn–Sham eigenvalue problem,Parallel computing}, + file = {/Users/wasmer/Zotero/storage/ZHVDM662/Yu et al. - 2018 - ELSI A unified software interface for Kohn–Sham e.pdf;/Users/wasmer/Zotero/storage/RFWBX7DN/S0010465517302941.html} +} + +@online{yuQH9QuantumHamiltonian2023, + title = {{{QH9}}: {{A Quantum Hamiltonian Prediction Benchmark}} for {{QM9 Molecules}}}, + shorttitle = {{{QH9}}}, + author = {Yu, Haiyang and Liu, Meng and Luo, Youzhi and Strasser, Alex and Qian, Xiaofeng and Qian, Xiaoning and Ji, Shuiwang}, + date = {2023-06-15}, + eprint = {2306.09549}, + eprinttype = {arxiv}, + eprintclass = {physics}, + doi = {10.48550/arXiv.2306.09549}, + url = {http://arxiv.org/abs/2306.09549}, + urldate = {2023-10-13}, + abstract = {Supervised machine learning approaches have been increasingly used in accelerating electronic structure prediction as surrogates of first-principle computational methods, such as density functional theory (DFT). While numerous quantum chemistry datasets focus on chemical properties and atomic forces, the ability to achieve accurate and efficient prediction of the Hamiltonian matrix is highly desired, as it is the most important and fundamental physical quantity that determines the quantum states of physical systems and chemical properties. In this work, we generate a new Quantum Hamiltonian dataset, named as QH9, to provide precise Hamiltonian matrices for 2,399 molecular dynamics trajectories and 130,831 stable molecular geometries, based on the QM9 dataset. By designing benchmark tasks with various molecules, we show that current machine learning models have the capacity to predict Hamiltonian matrices for arbitrary molecules. Both the QH9 dataset and the baseline models are provided to the community through an open-source benchmark, which can be highly valuable for developing machine learning methods and accelerating molecular and materials design for scientific and technological applications. Our benchmark is publicly available at https://github.com/divelab/AIRS/tree/main/OpenDFT/QHBench.}, + pubstate = {preprint}, + keywords = {AML,benchmark dataset,benchmarking,Database,dataset,DeepH,DFT speedup,DFT speedup with ML,equivariant,ML,ML-DFT,ML-ESM,molecules,out-of-distribution,prediction of energy,prediction of Hamiltonian matrix,prediction of wavefunction,QHNet,QM9,SchNOrb,with-data}, + file = {/Users/wasmer/Nextcloud/Zotero/Yu et al_2023_QH9.pdf;/Users/wasmer/Zotero/storage/LHYKLK48/2306.html} +} + @online{yuSpinDependentGraphNeural2023, title = {Spin-{{Dependent Graph Neural Network Potential}} for {{Magnetic Materials}}}, author = {Yu, Hongyu and Zhong, Yang and Hong, Liangliang and Xu, Changsong and Ren, Wei and Gong, Xingao and Xiang, Hongjun}, @@ -11803,10 +15957,45 @@ urldate = {2023-06-12}, abstract = {The development of machine learning interatomic potentials has immensely contributed to the accuracy of simulations of molecules and crystals. However, creating interatomic potentials for magnetic systems that account for both magnetic moments and structural degrees of freedom remains a challe...}, langid = {english}, - keywords = {\_tablet,Allegro,AML,collinear,DimeNet,equivariant,GNN,HDNNP,heat transport,Heisenberg model,Jij,LAMMPS,LAMMPS SPIN,magnetism,MD,mHDNNP,ML,MLP,mMTP,MTP,multiferroic,non-collinear,original publication,PES,prediction of Jij,prediction of magnetic ground state,spin dynamics,Spin-Allegro,spin-dependent,Spin-Dimenet,spin-lattice coupling,SpinGNN,with-code}, + pubstate = {preprint}, + keywords = {\_tablet,Allegro,AML,collinear,DimeNet,equivariant,GNN,HDNNP,heat transport,Heisenberg model,Jij,LAMMPS,LAMMPS SPIN,magnetism,MD,mHDNNP,ML,MLP,mMTP,MTP,multiferroic,non-collinear,original publication,PES,prediction of Jij,prediction of magnetic ground state,skyrmions,spin dynamics,Spin-Allegro,spin-dependent,Spin-Dimenet,spin-lattice coupling,SpinGNN,with-code}, file = {/Users/wasmer/Nextcloud/Zotero/Yu et al_2023_Spin-Dependent Graph Neural Network Potential for Magnetic Materials.pdf} } +@online{yuSpinDependentGraphNeural2023a, + title = {Spin-{{Dependent Graph Neural Network Potential}} for {{Magnetic Materials}}}, + author = {Yu, Hongyu and Zhong, Yang and Hong, Liangliang and Xu, Changsong and Ren, Wei and Gong, Xingao and Xiang, Hongjun}, + date = {2023-04-20}, + eprint = {2203.02853}, + eprinttype = {arxiv}, + eprintclass = {cond-mat, physics:physics}, + doi = {10.48550/arXiv.2203.02853}, + url = {http://arxiv.org/abs/2203.02853}, + urldate = {2023-10-14}, + abstract = {The development of machine learning interatomic potentials has immensely contributed to the accuracy of simulations of molecules and crystals. However, creating interatomic potentials for magnetic systems that account for both magnetic moments and structural degrees of freedom remains a challenge. This work introduces SpinGNN, a spin-dependent interatomic potential approach that employs the graph neural network (GNN) to describe magnetic systems. SpinGNN consists of two types of edge GNNs: Heisenberg edge GNN (HEGNN) and spin-distance edge GNN (SEGNN). HEGNN is tailored to capture Heisenberg-type spin-lattice interactions, while SEGNN accurately models multi-body and high-order spin-lattice coupling. The effectiveness of SpinGNN is demonstrated by its exceptional precision in fitting a high-order spin Hamiltonian and two complex spin-lattice Hamiltonians with great precision. Furthermore, it successfully models the subtle spin-lattice coupling in BiFeO3 and performs large-scale spin-lattice dynamics simulations, predicting its antiferromagnetic ground state, magnetic phase transition, and domain wall energy landscape with high accuracy. Our study broadens the scope of graph neural network potentials to magnetic systems, serving as a foundation for carrying out large-scale spin-lattice dynamic simulations of such systems.}, + pubstate = {preprint}, + keywords = {\_tablet,Allegro,AML,collinear,Computer Science - Machine Learning,Condensed Matter - Disordered Systems and Neural Networks,DimeNet,equivariant,GNN,HDNNP,heat transport,Heisenberg model,Jij,LAMMPS,LAMMPS SPIN,magnetism,MD,mHDNNP,ML,MLP,mMTP,MTP,multiferroic,non-collinear,original publication,PES,Physics - Computational Physics,prediction of Jij,prediction of magnetic ground state,skyrmions,spin dynamics,Spin-Allegro,spin-dependent,Spin-Dimenet,spin-lattice coupling,SpinGNN,with-code}, + file = {/Users/wasmer/Nextcloud/Zotero/false;/Users/wasmer/Zotero/storage/B3HN773D/2203.html} +} + +@book{zabloudilElectronScatteringSolid2005, + title = {Electron {{Scattering}} in {{Solid Matter}}}, + editor = {Zabloudil, Jan and Hammerling, Robert and Weinberger, Peter and Szunyogh, Laszlo}, + editorb = {Cardona, Manuel and Fulde, Peter and Von Klitzing, Klaus and Queisser, Hans-Joachim and Merlin, Roberto and Störmer, Horst}, + editorbtype = {redactor}, + date = {2005}, + series = {Springer {{Series}} in {{Solid-State Sciences}}}, + volume = {147}, + publisher = {{Springer}}, + location = {{Berlin, Heidelberg}}, + doi = {10.1007/b138290}, + url = {http://link.springer.com/10.1007/b138290}, + urldate = {2023-09-19}, + isbn = {978-3-540-22524-9 978-3-540-27001-0}, + keywords = {Green's functions,Helium-Atom-Streuung,Magnetism,Nanostructures,Relativistic electron theory,Scattering theory,STEM}, + file = {/Users/wasmer/Nextcloud/Zotero/Zabloudil et al_2005_Electron Scattering in Solid Matter.pdf} +} + @online{zachglickDoesItFeel2021, type = {Tweet}, title = {Does It Feel like Everyone in \#compchem Is Doing Machine Learning Now? {{I}} Thought so after \#{{ACSSpring2021}}, and Decided to Look at How Frequently Certain Phrases Appeared in {{COMP}} Division Abstracts over the Years: {{https://t.co/F4awnzVebs}}}, @@ -11847,6 +16036,22 @@ file = {/Users/wasmer/Nextcloud/Zotero/Zaverkin et al_2021_Fast and Sample-Efficient Interatomic Neural Network Potentials for Molecules.pdf} } +@online{zeerPromotingPbasedHall2023, + title = {Promoting P-Based {{Hall}} Effects by p-d-f Hybridization in {{Gd-based}} Dichalcogenides}, + author = {Zeer, Mahmoud and Go, Dongwook and Schmitz, Peter and Saunderson, Tom G. and Wang, Hao and Ghabboun, Jamal and Blügel, Stefan and Wulfhekel, Wulf and Mokrousov, Yuriy}, + date = {2023-08-16}, + eprint = {2308.08207}, + eprinttype = {arxiv}, + eprintclass = {cond-mat}, + doi = {10.48550/arXiv.2308.08207}, + url = {http://arxiv.org/abs/2308.08207}, + urldate = {2023-10-04}, + abstract = {We conduct a first-principles study of Hall effects in rare-earth dichalcogenides, focusing on monolayers of the H-phase EuX\$\_2\$ and GdX\$\_2\$, where X = S, Se, and Te. Our predictions reveal that all EuX\$\_2\$ and GdX\$\_2\$ systems exhibit high magnetic moments and wide bandgaps. We observe that while in case of EuX\$\_2\$ the \$p\$ and \$f\$ states hybridize directly below the Fermi energy, the absence of \$f\$ and \$d\$ states of Gd at the Fermi energy results in \$p\$-like spin-polarized electronic structure of GdX\$\_2\$, which mediates \$p\$-based magnetotransport. Notably, these systems display significant anomalous, spin, and orbital Hall conductivities. We find that in GdX\$\_2\$ the strength of correlations controls the relative position of \$p\$, \$d\$ and \$f\$-states and their hybridization which has a crucial impact on \$p\$-state polarization and the anomalous Hall effect, but not the spin and orbital Hall effect. Moreover, we find that the application of strain can significantly modify the electronic structure of the monolayers, resulting in quantized charge, spin and orbital transport in GdTe\$\_2\$ via a strain-mediated orbital inversion mechanism taking place at the Fermi energy. Our findings suggest that rare-earth dichalcogenides hold promise as a platform for topological spintronics and orbitronics.}, + pubstate = {preprint}, + keywords = {chalcogenides,DFT,DFT+U,FLEUR,FZJ,Hall effect,Hall OHE,PGI,PGI-1/IAS-1,physics,quantum materials,rare earths,SOC,Wannier}, + file = {/Users/wasmer/Nextcloud/Zotero/Zeer et al_2023_Promoting $p$-based Hall effects by $p$-$d$-$f$ hybridization in Gd-based.pdf;/Users/wasmer/Zotero/storage/N7TZNGEI/2308.html} +} + @article{zeledonStructuralInformationFiltered2020, title = {The Structural Information Filtered Features ({{SIFF}}) Potential: {{Maximizing}} Information Stored in Machine-Learning Descriptors for Materials Prediction}, shorttitle = {The Structural Information Filtered Features ({{SIFF}}) Potential}, @@ -11884,6 +16089,23 @@ file = {/Users/wasmer/Nextcloud/Zotero/Zeller_2012_Correlated electrons.pdf;/Users/wasmer/Zotero/storage/BKBRXSWN/136393.html} } +@book{zengQuantumInformationMeets2019, + title = {Quantum {{Information Meets Quantum Matter}}: {{From Quantum Entanglement}} to {{Topological Phases}} of {{Many-Body Systems}}}, + shorttitle = {Quantum {{Information Meets Quantum Matter}}}, + author = {Zeng, Bei and Chen, Xie and Zhou, Duan-Lu and Wen, Xiao-Gang}, + date = {2019}, + series = {Quantum {{Science}} and {{Technology}}}, + publisher = {{Springer}}, + location = {{New York, NY}}, + doi = {10.1007/978-1-4939-9084-9}, + url = {http://link.springer.com/10.1007/978-1-4939-9084-9}, + urldate = {2023-08-24}, + isbn = {978-1-4939-9082-5 978-1-4939-9084-9}, + langid = {english}, + keywords = {boson/spin liquids,educational,entanglement in many-body systems,gapped quantum phases,Hall effect,Hall FQHE,Ising,learning material,quantum entanglement,quantum information,quantum information and condensed matter physics,quantum many-body physics,quantum order,spin,symmetry,symmetry breaking,tensor network,todo-tagging,topological,topological order,TRS}, + file = {/Users/wasmer/Nextcloud/Zotero/Zeng et al_2019_Quantum Information Meets Quantum Matter.pdf} +} + @article{zeniCompactAtomicDescriptors2021, title = {Compact Atomic Descriptors Enable Accurate Predictions via Linear Models}, author = {Zeni, Claudio and Rossi, Kevin and Glielmo, Aldo and family=Gironcoli, given=Stefano, prefix=de, useprefix=true}, @@ -11903,6 +16125,22 @@ file = {/Users/wasmer/Nextcloud/Zotero/Zeni et al_2021_Compact atomic descriptors enable accurate predictions via linear models.pdf} } +@online{zenilFutureFundamentalScience2023, + title = {The {{Future}} of {{Fundamental Science Led}} by {{Generative Closed-Loop Artificial Intelligence}}}, + author = {Zenil, Hector and Tegnér, Jesper and Abrahão, Felipe S. and Lavin, Alexander and Kumar, Vipin and Frey, Jeremy G. and Weller, Adrian and Soldatova, Larisa and Bundy, Alan R. and Jennings, Nicholas R. and Takahashi, Koichi and Hunter, Lawrence and Dzeroski, Saso and Briggs, Andrew and Gregory, Frederick D. and Gomes, Carla P. and Williams, Christopher K. I. and Rowe, Jon and Evans, James and Kitano, Hiroaki and Tenenbaum, Joshua B. and King, Ross}, + date = {2023-08-14}, + eprint = {2307.07522}, + eprinttype = {arxiv}, + eprintclass = {cs}, + doi = {10.48550/arXiv.2307.07522}, + url = {http://arxiv.org/abs/2307.07522}, + urldate = {2023-08-22}, + abstract = {Recent advances in machine learning and AI, including Generative AI and LLMs, are disrupting technological innovation, product development, and society as a whole. AI's contribution to technology can come from multiple approaches that require access to large training data sets and clear performance evaluation criteria, ranging from pattern recognition and classification to generative models. Yet, AI has contributed less to fundamental science in part because large data sets of high-quality data for scientific practice and model discovery are more difficult to access. Generative AI, in general, and Large Language Models in particular, may represent an opportunity to augment and accelerate the scientific discovery of fundamental deep science with quantitative models. Here we explore and investigate aspects of an AI-driven, automated, closed-loop approach to scientific discovery, including self-driven hypothesis generation and open-ended autonomous exploration of the hypothesis space. Integrating AI-driven automation into the practice of science would mitigate current problems, including the replication of findings, systematic production of data, and ultimately democratisation of the scientific process. Realising these possibilities requires a vision for augmented AI coupled with a diversity of AI approaches able to deal with fundamental aspects of causality analysis and model discovery while enabling unbiased search across the space of putative explanations. These advances hold the promise to unleash AI's potential for searching and discovering the fundamental structure of our world beyond what human scientists have been able to achieve. Such a vision would push the boundaries of new fundamental science rather than automatize current workflows and instead open doors for technological innovation to tackle some of the greatest challenges facing humanity today.}, + pubstate = {preprint}, + keywords = {/unread,Pasteur \& ISI,todo-tagging}, + file = {/Users/wasmer/Zotero/storage/RXPDN4KU/Zenil et al. - 2023 - The Future of Fundamental Science Led by Generativ.pdf;/Users/wasmer/Zotero/storage/FRVYJZDW/2307.html} +} + @article{zepeda-nunezDeepDensityCircumventing2021, title = {Deep {{Density}}: {{Circumventing}} the {{Kohn-Sham}} Equations via Symmetry Preserving Neural Networks}, shorttitle = {Deep {{Density}}}, @@ -11922,6 +16160,22 @@ file = {/Users/wasmer/Nextcloud/Zotero/Zepeda-Núñez et al_2021_Deep Density.pdf;/Users/wasmer/Zotero/storage/TJJ4NCEI/S0021999121004186.html} } +@online{zhangArtificialIntelligenceScience2023, + title = {Artificial {{Intelligence}} for {{Science}} in {{Quantum}}, {{Atomistic}}, and {{Continuum Systems}}}, + author = {Zhang, Xuan and Wang, Limei and Helwig, Jacob and Luo, Youzhi and Fu, Cong and Xie, Yaochen and Liu, Meng and Lin, Yuchao and Xu, Zhao and Yan, Keqiang and Adams, Keir and Weiler, Maurice and Li, Xiner and Fu, Tianfan and Wang, Yucheng and Yu, Haiyang and Xie, YuQing and Fu, Xiang and Strasser, Alex and Xu, Shenglong and Liu, Yi and Du, Yuanqi and Saxton, Alexandra and Ling, Hongyi and Lawrence, Hannah and Stärk, Hannes and Gui, Shurui and Edwards, Carl and Gao, Nicholas and Ladera, Adriana and Wu, Tailin and Hofgard, Elyssa F. and Tehrani, Aria Mansouri and Wang, Rui and Daigavane, Ameya and Bohde, Montgomery and Kurtin, Jerry and Huang, Qian and Phung, Tuong and Xu, Minkai and Joshi, Chaitanya K. and Mathis, Simon V. and Azizzadenesheli, Kamyar and Fang, Ada and Aspuru-Guzik, Alán and Bekkers, Erik and Bronstein, Michael and Zitnik, Marinka and Anandkumar, Anima and Ermon, Stefano and Liò, Pietro and Yu, Rose and Günnemann, Stephan and Leskovec, Jure and Ji, Heng and Sun, Jimeng and Barzilay, Regina and Jaakkola, Tommi and Coley, Connor W. and Qian, Xiaoning and Qian, Xiaofeng and Smidt, Tess and Ji, Shuiwang}, + date = {2023-07-17}, + eprint = {2307.08423}, + eprinttype = {arxiv}, + eprintclass = {physics}, + doi = {10.48550/arXiv.2307.08423}, + url = {http://arxiv.org/abs/2307.08423}, + urldate = {2023-07-24}, + abstract = {Advances in artificial intelligence (AI) are fueling a new paradigm of discoveries in natural sciences. Today, AI has started to advance natural sciences by improving, accelerating, and enabling our understanding of natural phenomena at a wide range of spatial and temporal scales, giving rise to a new area of research known as AI for science (AI4Science). Being an emerging research paradigm, AI4Science is unique in that it is an enormous and highly interdisciplinary area. Thus, a unified and technical treatment of this field is needed yet challenging. This paper aims to provide a technically thorough account of a subarea of AI4Science; namely, AI for quantum, atomistic, and continuum systems. These areas aim at understanding the physical world from the subatomic (wavefunctions and electron density), atomic (molecules, proteins, materials, and interactions), to macro (fluids, climate, and subsurface) scales and form an important subarea of AI4Science. A unique advantage of focusing on these areas is that they largely share a common set of challenges, thereby allowing a unified and foundational treatment. A key common challenge is how to capture physics first principles, especially symmetries, in natural systems by deep learning methods. We provide an in-depth yet intuitive account of techniques to achieve equivariance to symmetry transformations. We also discuss other common technical challenges, including explainability, out-of-distribution generalization, knowledge transfer with foundation and large language models, and uncertainty quantification. To facilitate learning and education, we provide categorized lists of resources that we found to be useful. We strive to be thorough and unified and hope this initial effort may trigger more community interests and efforts to further advance AI4Science.}, + pubstate = {preprint}, + keywords = {\_tablet,ACE,AI4Science,ALIGNN,Allegro,AlphaFold,AML,benchmarking,body-order,CCSD(T),CGCNN,chemistry,Database,DeepH,DFT,DimeNet,drug discovery,E(3),education,EGNN,equivariant,FermiNet,foundation models,G-SchNet,GemNet,generative models,GNN,graph ML,invariance,learning material,library,lists,LLM,M3GNet,MACE,magnetism,MatBench,materials discovery,materials project,MD,MD17,MEGNet,Microsoft Research,ML,ML-DFA,ML-DFT,ML-ESM,ML-FF,ML-QMBP,MLP,model comparison,model taxonomy,molecules,MPNN,NequIP,NQS,OC20,OF-DFT,open questions,out-of-distribution,PAiNN,PauliNet,PDE,PhiSNet,phonon,physics,QM7,QM9,representation learning,resources list,review,review-of-AI4science,review-of-AML,review-of-ML-DFT,roadmap,SchNet,SchNOrb,SE(3),SOTA,SphereNet,spin-dependent,SSL,symmetry,uncertainty quantification,with-code,XAI}, + file = {/Users/wasmer/Nextcloud/Zotero/Zhang et al_2023_Artificial Intelligence for Science in Quantum, Atomistic, and Continuum Systems.pdf;/Users/wasmer/Zotero/storage/PSVYKZKY/2307.html} +} + @article{zhangCrossoverThreeDimensionalTopological2010, title = {Crossover of {{Three-Dimensional Topological Insulator}} of {{Bi2Se3}} to the {{Two-Dimensional Limit}}}, author = {Zhang, Yi and He, Ke and Chang, Cui-Zu and Song, Can-Li and Wang, Lili and Chen, Xi and Jia, Jinfeng and Fang, Zhong and Dai, Xi and Shan, Wen-Yu and Shen, Shun-Qing and Niu, Qian and Qi, Xiaoliang and Zhang, Shou-Cheng and Ma, Xucun and Xue, Qi-Kun}, @@ -11964,6 +16218,24 @@ file = {/Users/wasmer/Nextcloud/Zotero/Zhang et al_2010_Crossover of the three-dimensional topological insulator Bi2Se3 to the.pdf} } +@article{zhangCurrentdrivenMagnetoresistanceVan2023, + title = {Current-Driven Magnetoresistance in van Der {{Waals}} Spin-Filter Antiferromagnetic Tunnel Junctions with \$\{\textbackslash mathrm\{\vphantom{\}\}}{{Mn}}\vphantom\{\}\textbackslash mathrm\{\vphantom\}{{Bi}}\vphantom\{\}\vphantom\{\}\_\{2\}\{\textbackslash mathrm\{\vphantom{\}\}}{{Te}}\vphantom\{\}\vphantom\{\}\_\{4\}\$}, + author = {Zhang, Lishu and Li, Hui and Jiang, Yanyan and Wang, Zishen and Li, Tao and Ghosh, Sumit}, + date = {2023-10-20}, + journaltitle = {Physical Review Applied}, + shortjournal = {Phys. Rev. Appl.}, + volume = {20}, + number = {4}, + pages = {044056}, + publisher = {{American Physical Society}}, + doi = {10.1103/PhysRevApplied.20.044056}, + url = {https://link.aps.org/doi/10.1103/PhysRevApplied.20.044056}, + urldate = {2023-11-05}, + abstract = {The field of two-dimensional magnetic materials has paved the way for the development of spintronics and nanodevices with other functionalities. Utilizing antiferromagnetic materials, in addition to layered van der Waals (vdW) ferromagnetic materials, has garnered significant interest. In this work, we present a theoretical investigation of the behavior of MnBi2Te4 devices based on the nonequilibrium Green’s function method. Our results show that the current-voltage (I-V) characteristics can be influenced significantly by controlling the length of the device and bias voltage and thus allow us to manipulate the tunneling magnetoresistance (TMR) with an external bias voltage. This can be further influenced by the presence of the boron nitride layer, which shows significantly enhanced TMR by selectively suppressing specific spin channels for different magnetic configurations. By exploiting this mechanism, the observed TMR value reaches up to 3690\%, which can be attributed to the spin-polarized transmission channel and the projected local density of states. Our findings on the influence of structural and magnetic configurations on the spin-polarized transport properties and TMR ratios give the potential implementation of antiferromagnetic vdW layered materials in ultrathin spintronics.}, + keywords = {AFM,FZJ,magnetism,magnetoresistance,PGI,PGI-1/IAS-1,spintronics,TMR,vdW materials}, + file = {/Users/wasmer/Nextcloud/Zotero/Zhang et al_2023_Current-driven magnetoresistance in van der Waals spin-filter antiferromagnetic.pdf;/Users/wasmer/Zotero/storage/CQGUSP89/PhysRevApplied.20.html} +} + @article{zhangDeepPotentialMolecular2018, title = {Deep {{Potential Molecular Dynamics}}: {{A Scalable Model}} with the {{Accuracy}} of {{Quantum Mechanics}}}, shorttitle = {Deep {{Potential Molecular Dynamics}}}, @@ -11997,6 +16269,24 @@ file = {/Users/wasmer/Nextcloud/Zotero/Zhang_2021_High-throughput design of magnetic materials.pdf} } +@article{zhangLocallySelfconsistentEmbedding2019, + title = {Locally Self-Consistent Embedding Approach for Disordered Electronic Systems}, + author = {Zhang, Yi and Terletska, Hanna and Tam, Ka-Ming and Wang, Yang and Eisenbach, Markus and Chioncel, Liviu and Jarrell, Mark}, + date = {2019-08-28}, + journaltitle = {Physical Review B}, + shortjournal = {Phys. Rev. B}, + volume = {100}, + number = {5}, + pages = {054205}, + publisher = {{American Physical Society}}, + doi = {10.1103/PhysRevB.100.054205}, + url = {https://link.aps.org/doi/10.1103/PhysRevB.100.054205}, + urldate = {2023-09-19}, + abstract = {We present an embedding scheme for the locally self-consistent method to study disordered electron systems. We test this method in a tight-binding basis and apply it to the single band Anderson model. The local interaction zone is used to efficiently compute the local Green's function of a supercell embedded into a local typical medium. We find a quick convergence as the size of the local interaction zone which reduces the computational costs as expected. This method captures the Anderson localization transition and accurately predicts the critical disorder strength. The present work opens the path towards the development of a typical medium embedding scheme for the O(N) multiple scattering methods.}, + keywords = {/unread,todo-tagging}, + file = {/Users/wasmer/Nextcloud/Zotero/Zhang et al_2019_Locally self-consistent embedding approach for disordered electronic systems.pdf} +} + @article{zhangMachineLearningMathbbZ2017, title = {Machine Learning \$\{\textbackslash mathbb\{\vphantom{\}\}}{{Z}}\vphantom\{\}\vphantom\{\}\_\{2\}\$ Quantum Spin Liquids with Quasiparticle Statistics}, author = {Zhang, Yi and Melko, Roger G. and Kim, Eun-Ah}, @@ -12068,6 +16358,23 @@ file = {/Users/wasmer/Nextcloud/Zotero/Zhang_Kim_2017_Quantum Loop Topography for Machine Learning.pdf;/Users/wasmer/Zotero/storage/M9VFL53W/PhysRevLett.118.html} } +@article{zhangRobustDatadrivenApproach2020, + title = {Robust Data-Driven Approach for Predicting the Configurational Energy of High Entropy Alloys}, + author = {Zhang, Jiaxin and Liu, Xianglin and Bi, Sirui and Yin, Junqi and Zhang, Guannan and Eisenbach, Markus}, + date = {2020-01-05}, + journaltitle = {Materials \& Design}, + shortjournal = {Materials \& Design}, + volume = {185}, + pages = {108247}, + issn = {0264-1275}, + doi = {10.1016/j.matdes.2019.108247}, + url = {https://www.sciencedirect.com/science/article/pii/S0264127519306859}, + urldate = {2023-09-19}, + abstract = {High entropy alloys (HEAs) are promising next-generation materials due to their various excellent properties. To understand these properties, it's necessary to characterize the chemical ordering and identify order-disorder transitions through efficient simulation and modeling of thermodynamics. In this study, a robust data-driven framework based on Bayesian approaches is proposed for the accurate and efficient prediction of configurational energy of high entropy alloys. The recently proposed effective pair interaction (EPI) model with ensemble sampling is used to map the configuration and its corresponding energy. Given limited data calculated by first-principles calculations, Bayesian regularized regression not only offers an accurate and stable prediction but also effectively quantifies the uncertainties associated with EPI parameters. Compared with the arbitrary truncation of model complexity, we further conduct a physical feature selection to identify the truncation of coordination shells in EPI model using Bayesian information criterion. The results achieve efficient and robust performance in predicting the configurational energy, particularly given small data. The developed methodology is applied to study a series of refractory HEAs, i.e. NbMoTaW, NbMoTaWV and NbMoTaWTi where it is demonstrated how dataset size affects the confidence when data is sparse.}, + keywords = {/unread,active learning,AML,HEA,ML,todo-tagging,uncertainty quantification}, + file = {/Users/wasmer/Nextcloud/Zotero/Zhang et al_2020_Robust data-driven approach for predicting the configurational energy of high.pdf} +} + @article{zhangStrategyApplyMachine2018, title = {A Strategy to Apply Machine Learning to Small Datasets in Materials Science}, author = {Zhang, Ying and Ling, Chen}, @@ -12135,10 +16442,47 @@ urldate = {2023-06-12}, abstract = {Complex spin-spin interactions in magnets can often lead to magnetic superlattices with complex local magnetic arrangements, and many of the magnetic superlattices have been found to possess non-trivial topological electronic properties. Due to the huge size and complex magnetic moment arrangement of the magnetic superlattices, it is a great challenge to perform a direct DFT calculation on them. In this work, an equivariant deep learning framework is designed to accelerate the electronic calculation of magnetic systems by exploiting both the equivariant constraints of the magnetic Hamiltonian matrix and the physical rules of spin-spin interactions. This framework can bypass the costly self-consistent iterations and build a direct mapping from a magnetic configuration to the ab initio Hamiltonian matrix. After training on the magnets with random magnetic configurations, our model achieved high accuracy on the test structures outside the training set, such as spin spiral and non-collinear antiferromagnetic configurations. The trained model is also used to predict the energy bands of a skyrmion configuration of NiBrI containing thousands of atoms, showing the high efficiency of our model on large magnetic superlattices.}, pubstate = {preprint}, - keywords = {\_tablet,2D material,AFM,AML,DFT,Dzyaloshinskii–Moriya interaction,E(3),equivariant,GNN,Hall effect,Heisenberg model,iron,Jij,magnetic Hamiltonian,magnetic supperlattice,magnetism,ML,ML-DFT,ML-ESM,MPNN,non-collinear,OpenMX,prediction from magnetic configuration,prediction of Hamiltonian matrix,prediction of Jij,skyrmions,SO(3),SOC,spin spiral,spin-dependent,SU(2),ternary systems,TRS}, + keywords = {\_tablet,2D material,AFM,AML,DFT,DFT speedup,DFT speedup with ML,Dzyaloshinskii–Moriya interaction,E(3),equivariant,GNN,Hall effect,Heisenberg model,iron,Jij,magnetic Hamiltonian,magnetic supperlattice,magnetism,ML,ML-DFT,ML-ESM,MPNN,non-collinear,OpenMX,prediction from magnetic configuration,prediction of Hamiltonian matrix,prediction of Jij,skyrmions,SO(3),SOC,spin spiral,spin-dependent,SU(2),ternary systems,TRS}, file = {/Users/wasmer/Nextcloud/Zotero/Zhong et al_2023_Accelerating the electronic-structure calculation of magnetic systems by.pdf;/Users/wasmer/Zotero/storage/RJIQYZHY/2306.html} } +@article{zhongGeneralTensorPrediction2023, + title = {A {{General Tensor Prediction Framework Based}} on {{Graph Neural Networks}}}, + author = {Zhong, Yang and Yu, Hongyu and Gong, Xingao and Xiang, Hongjun}, + date = {2023-07-07}, + journaltitle = {The Journal of Physical Chemistry Letters}, + shortjournal = {J. Phys. Chem. Lett.}, + pages = {6339--6348}, + publisher = {{American Chemical Society}}, + doi = {10.1021/acs.jpclett.3c01200}, + url = {https://doi.org/10.1021/acs.jpclett.3c01200}, + urldate = {2023-07-13}, + abstract = {Graph neural networks (GNNs) have been shown to be extremely flexible and accurate in predicting the physical properties of molecules and crystals. However, traditional invariant GNNs are not compatible with directional properties, which currently limits their usage to the prediction of only invariant scalar properties. To address this issue, here we propose a general framework, i.e., an edge-based tensor prediction graph neural network, in which a tensor is expressed as the linear combination of the local spatial components projected on the edge directions of clusters with varying sizes. This tensor decomposition is rotationally equivariant and exactly satisfies the symmetry of the local structures. The accuracy and universality of our new framework are demonstrated by the successful prediction of various tensor properties from first to third order. The framework proposed in this work will enable GNNs to step into the broad field of prediction of directional properties.}, + keywords = {AML,benchmarking,GNN,GPR,magnetic anisotropy,magnetism,ML,MLP,MPNN,prediction of magnetic anisotropy,SA-GPR,SOAP,tensorial target}, + file = {/Users/wasmer/Nextcloud/Zotero/Zhong et al_2023_A General Tensor Prediction Framework Based on Graph Neural Networks.pdf;/Users/wasmer/Nextcloud/Zotero/Zhong et al_2023_A General Tensor Prediction Framework Based on Graph Neural Networks2.pdf;/Users/wasmer/Zotero/storage/B7FX9ZP8/acs.jpclett.html} +} + +@article{zhongTransferableEquivariantGraph2023, + title = {Transferable Equivariant Graph Neural Networks for the {{Hamiltonians}} of Molecules and Solids}, + author = {Zhong, Yang and Yu, Hongyu and Su, Mao and Gong, Xingao and Xiang, Hongjun}, + date = {2023-10-06}, + journaltitle = {npj Computational Materials}, + shortjournal = {npj Comput Mater}, + volume = {9}, + number = {1}, + pages = {1--13}, + publisher = {{Nature Publishing Group}}, + issn = {2057-3960}, + doi = {10.1038/s41524-023-01130-4}, + url = {https://www.nature.com/articles/s41524-023-01130-4}, + urldate = {2023-10-13}, + abstract = {This work presents an E(3) equivariant graph neural network called HamGNN, which can fit the electronic Hamiltonian matrix of molecules and solids by a complete data-driven method. Unlike invariant models that achieve equivariance approximately through data augmentation, HamGNN employs E(3) equivariant convolutions to construct the Hamiltonian matrix, ensuring strict adherence to all equivariant constraints inherent in the physical system. In contrast to previous models with limited transferability, HamGNN demonstrates exceptional accuracy on various datasets, including QM9 molecular datasets, carbon allotropes, silicon allotropes, SiO2 isomers, and BixSey compounds. The trained HamGNN models exhibit accurate predictions of electronic structures for large crystals beyond the training set, including the Moiré twisted bilayer MoS2 and silicon supercells with dislocation defects, showcasing remarkable transferability and generalization capabilities. The HamGNN model, trained on small systems, can serve as an efficient alternative to density functional theory (DFT) for accurately computing the electronic structures of large systems.}, + issue = {1}, + langid = {english}, + keywords = {\_tablet,Electronic properties and materials,Electronic structure}, + file = {/Users/wasmer/Nextcloud/Zotero/Zhong et al_2023_Transferable equivariant graph neural networks for the Hamiltonians of.pdf} +} + @online{zhouComprehensiveSurveyPretrained2023, title = {A {{Comprehensive Survey}} on {{Pretrained Foundation Models}}: {{A History}} from {{BERT}} to {{ChatGPT}}}, shorttitle = {A {{Comprehensive Survey}} on {{Pretrained Foundation Models}}}, @@ -12156,6 +16500,41 @@ file = {/Users/wasmer/Nextcloud/Zotero/Zhou et al_2023_A Comprehensive Survey on Pretrained Foundation Models.pdf;/Users/wasmer/Zotero/storage/CWZ9H6CB/2302.html} } +@article{zhouGraphNeuralNetworks2020, + title = {Graph Neural Networks: {{A}} Review of Methods and Applications}, + shorttitle = {Graph Neural Networks}, + author = {Zhou, Jie and Cui, Ganqu and Hu, Shengding and Zhang, Zhengyan and Yang, Cheng and Liu, Zhiyuan and Wang, Lifeng and Li, Changcheng and Sun, Maosong}, + date = {2020-01-01}, + journaltitle = {AI Open}, + shortjournal = {AI Open}, + volume = {1}, + pages = {57--81}, + issn = {2666-6510}, + doi = {10.1016/j.aiopen.2021.01.001}, + url = {https://www.sciencedirect.com/science/article/pii/S2666651021000012}, + urldate = {2023-11-14}, + abstract = {Lots of learning tasks require dealing with graph data which contains rich relation information among elements. Modeling physics systems, learning molecular fingerprints, predicting protein interface, and classifying diseases demand a model to learn from graph inputs. In other domains such as learning from non-structural data like texts and images, reasoning on extracted structures (like the dependency trees of sentences and the scene graphs of images) is an important research topic which also needs graph reasoning models. Graph neural networks (GNNs) are neural models that capture the dependence of graphs via message passing between the nodes of graphs. In recent years, variants of GNNs such as graph convolutional network (GCN), graph attention network (GAT), graph recurrent network (GRN) have demonstrated ground-breaking performances on many deep learning tasks. In this survey, we propose a general design pipeline for GNN models and discuss the variants of each component, systematically categorize the applications, and propose four open problems for future research.}, + keywords = {/unread,General ML,GNN,graph,graph ML,ML,review,review-of-graph-ML}, + file = {/Users/wasmer/Nextcloud/Zotero/Zhou et al_2020_Graph neural networks.pdf;/Users/wasmer/Zotero/storage/YML8J4GK/S2666651021000012.html} +} + +@article{zhouLearningAtomsMaterials2018, + title = {Learning Atoms for Materials Discovery}, + author = {Zhou, Quan and Tang, Peizhe and Liu, Shenxiu and Pan, Jinbo and Yan, Qimin and Zhang, Shou-Cheng}, + date = {2018-07-10}, + journaltitle = {Proceedings of the National Academy of Sciences}, + volume = {115}, + number = {28}, + pages = {E6411-E6417}, + publisher = {{Proceedings of the National Academy of Sciences}}, + doi = {10.1073/pnas.1801181115}, + url = {https://www.pnas.org/doi/10.1073/pnas.1801181115}, + urldate = {2023-07-12}, + abstract = {Exciting advances have been made in artificial intelligence (AI) during recent decades. Among them, applications of machine learning (ML) and deep learning techniques brought human-competitive performances in various tasks of fields, including image recognition, speech recognition, and natural language understanding. Even in Go, the ancient game of profound complexity, the AI player has already beat human world champions convincingly with and without learning from the human. In this work, we show that our unsupervised machines (Atom2Vec) can learn the basic properties of atoms by themselves from the extensive database of known compounds and materials. These learned properties are represented in terms of high-dimensional vectors, and clustering of atoms in vector space classifies them into meaningful groups consistent with human knowledge. We use the atom vectors as basic input units for neural networks and other ML models designed and trained to predict materials properties, which demonstrate significant accuracy.}, + keywords = {/unread,AML,Atom2Vec,compositional descriptors,descriptors,embedding,library,materials,ML,unsupervised learning,with-code,Word2Vec}, + file = {/Users/wasmer/Zotero/storage/HN96KIR2/Zhou et al. - 2018 - Learning atoms for materials discovery.pdf} +} + @book{zhuBogoliubovdeGennesMethod2016, title = {Bogoliubov-de {{Gennes Method}} and {{Its Applications}}}, author = {Zhu, Jian-Xin}, -- GitLab