diff --git a/bib/bibliography.bib b/bib/bibliography.bib index 78aa8e738e78660834d75a2a2e4c1ab23a4cc384..a5910888a4f66e04cdbae6f6e84c236f5a0cb3fe 100644 --- a/bib/bibliography.bib +++ b/bib/bibliography.bib @@ -827,6 +827,19 @@ Subject\_term\_id: cheminformatics;computational-models;computational-science}, file = {/Users/wasmer/Nextcloud/Zotero/Bai et al_2022_Machine learning the Hohenberg-Kohn map for molecular excited states.pdf} } +@online{bakerNobelPrizeChemistry2024, + title = {The {{Nobel Prize}} in {{Chemistry}} 2024}, + author = {Baker, David and Hassabis, Demis and Jumper, John M.}, + date = {2024-10-09}, + url = {https://www.nobelprize.org/prizes/chemistry/2024/summary/}, + urldate = {2024-10-09}, + abstract = {The Nobel Prize in Chemistry 2024 was awarded with one half to David Baker “for computational protein design†and the other half jointly to Demis Hassabis and John M. Jumper “for protein structure predictionâ€}, + langid = {american}, + organization = {NobelPrize.org}, + keywords = {/unread,AI4Science,AlphaFold,AML,biomolecules,DeepMind,for introductions,ML,Nobel prize,Protein structure predictions}, + file = {/Users/wasmer/Nextcloud/Zotero/Baker et al. - 2024 - The Nobel Prize in Chemistry 2024_1.pdf;/Users/wasmer/Nextcloud/Zotero/Baker et al. - 2024 - The Nobel Prize in Chemistry 2024.pdf;/Users/wasmer/Zotero/storage/8V2RH9IV/summary.html} +} + @online{bakshiLearningQuantumHamiltonians2023, title = {Learning Quantum {{Hamiltonians}} at Any Temperature in Polynomial Time}, author = {Bakshi, Ainesh and Liu, Allen and Moitra, Ankur and Tang, Ewin}, @@ -6062,6 +6075,23 @@ Junqi Yin\\ file = {/Users/wasmer/Nextcloud/Zotero/Freitag et al_2021_The real climate and transformative impact of ICT.pdf;/Users/wasmer/Zotero/storage/3IPQYR9I/S2666389921001884.html} } +@article{freyHighthroughputSearchMagnetic2020, + title = {High-Throughput Search for Magnetic and Topological Order in Transition Metal Oxides}, + author = {Frey, Nathan C. and Horton, Matthew K. and Munro, Jason M. and Griffin, Sinéad M. and Persson, Kristin A. and Shenoy, Vivek B.}, + date = {2020-12-09}, + journaltitle = {Science Advances}, + volume = {6}, + number = {50}, + pages = {eabd1076}, + publisher = {American Association for the Advancement of Science}, + doi = {10.1126/sciadv.abd1076}, + url = {https://www.science.org/doi/10.1126/sciadv.abd1076}, + urldate = {2024-10-08}, + abstract = {The discovery of intrinsic magnetic topological order in MnBi2Te4 has invigorated the search for materials with coexisting magnetic and topological phases. These multiorder quantum materials are expected to exhibit new topological phases that can be tuned with magnetic fields, but the search for such materials is stymied by difficulties in predicting magnetic structure and stability. Here, we compute more than 27,000 unique magnetic orderings for more than 3000 transition metal oxides in the Materials Project database to determine their magnetic ground states and estimate their effective exchange parameters and critical temperatures. We perform a high-throughput band topology analysis of centrosymmetric magnetic materials, calculate topological invariants, and identify 18 new candidate ferromagnetic topological semimetals, axion insulators, and antiferromagnetic topological insulators. To accelerate future efforts, machine learning classifiers are trained to predict both magnetic ground states and magnetic topological order without requiring first-principles calculations.}, + keywords = {/unread,AML,HTC,magnetic topological materials,magnetism,materials discovery,ML,oxides,scikit-learn,topological,topological insulator,transition metals}, + file = {/Users/wasmer/Nextcloud/Zotero/Frey et al. - 2020 - High-throughput search for magnetic and topological order in transition metal oxides.pdf} +} + @article{freyMachineLearningEnabledDesign2020, title = {Machine {{Learning-Enabled Design}} of {{Point Defects}} in {{2D Materials}} for {{Quantum}} and {{Neuromorphic Information Processing}}}, author = {Frey, Nathan C. and Akinwande, Deji and Jariwala, Deep and Shenoy, Vivek B.}, @@ -8441,6 +8471,36 @@ Subject\_term\_id: condensed-matter-physics;theory-and-computation}, file = {/Users/wasmer/Nextcloud/Zotero/Hoogeboom et al_2022_Equivariant Diffusion for Molecule Generation in 3D.pdf;/Users/wasmer/Zotero/storage/K6KWYTSV/2203.html} } +@article{hopfieldNeuralNetworksPhysical1982, + title = {Neural Networks and Physical Systems with Emergent Collective Computational Abilities.}, + author = {Hopfield, J J}, + date = {1982-04}, + journaltitle = {Proceedings of the National Academy of Sciences}, + volume = {79}, + number = {8}, + pages = {2554--2558}, + publisher = {Proceedings of the National Academy of Sciences}, + doi = {10.1073/pnas.79.8.2554}, + url = {https://www.pnas.org/doi/abs/10.1073/pnas.79.8.2554}, + urldate = {2024-10-08}, + abstract = {Computational properties of use of biological organisms or to the construction of computers can emerge as collective properties of systems having a large number of simple equivalent components (or neurons). The physical meaning of content-addressable memory is described by an appropriate phase space flow of the state of a system. A model of such a system is given, based on aspects of neurobiology but readily adapted to integrated circuits. The collective properties of this model produce a content-addressable memory which correctly yields an entire memory from any subpart of sufficient size. The algorithm for the time evolution of the state of the system is based on asynchronous parallel processing. Additional emergent collective properties include some capacity for generalization, familiarity recognition, categorization, error correction, and time sequence retention. The collective properties are only weakly sensitive to details of the modeling or the failure of individual devices.}, + keywords = {/unread}, + file = {/Users/wasmer/Nextcloud/Zotero/Hopfield - 1982 - Neural networks and physical systems with emergent collective computational abilities..pdf} +} + +@online{hopfieldNobelPrizePhysics2024, + title = {The {{Nobel Prize}} in {{Physics}} 2024}, + author = {Hopfield, John and Hinton, Geoffrey}, + date = {2024-10-08}, + url = {https://www.nobelprize.org/prizes/physics/2024/summary/}, + urldate = {2024-10-08}, + abstract = {The Nobel Prize in Physics 2024 was awarded to John J. Hopfield and Geoffrey E. Hinton “for foundational discoveries and inventions that enable machine learning with artificial neural networksâ€}, + langid = {american}, + organization = {NobelPrize.org}, + keywords = {AI4Science,Deep learning,for introductions,General ML,Hopfield network,ML,neural network,Nobel prize,RBM,spin models,statistical physics}, + file = {/Users/wasmer/Nextcloud/Zotero/Hopfield and Hinton - 2024 - The Nobel Prize in Physics 2024_1.pdf;/Users/wasmer/Nextcloud/Zotero/Hopfield and Hinton - 2024 - The Nobel Prize in Physics 2024.pdf;/Users/wasmer/Zotero/storage/7KYN3SJY/summary.html} +} + @inproceedings{horschEuropeanStandardizationEfforts2023, title = {European Standardization Efforts from {{FAIR}} toward Explainable-{{AI-ready}} Data Documentation in Materials Modelling}, booktitle = {2023 3rd {{International Conference}} on {{Applied Artificial Intelligence}} ({{ICAPAI}})}, @@ -8678,6 +8738,24 @@ Subject\_term\_id: computational-methods;research-management}, file = {/Users/wasmer/Nextcloud/Zotero/Huh et al_2024_The Platonic Representation Hypothesis.pdf;/Users/wasmer/Zotero/storage/R7C9FVMV/2405.html} } +@article{hungUniversalEnsembleEmbeddingGraph2024, + title = {Universal {{Ensemble-Embedding Graph Neural Network}} for {{Direct Prediction}} of {{Optical Spectra}} from {{Crystal Structures}}}, + author = {Hung, Nguyen Tuan and Okabe, Ryotaro and Chotrattanapituk, Abhijatmedhi and Li, Mingda}, + date = {2024-09-12}, + journaltitle = {Advanced Materials}, + volume = {n/a}, + number = {n/a}, + pages = {2409175}, + issn = {1521-4095}, + doi = {10.1002/adma.202409175}, + url = {https://onlinelibrary.wiley.com/doi/abs/10.1002/adma.202409175}, + urldate = {2024-10-08}, + abstract = {Optical properties in solids, such as refractive index and absorption, hold vast applications ranging from solar panels to sensors, photodetectors, and transparent displays. However, first-principles computation of optical properties from crystal structures is a complex task due to the high convergence criteria and computational cost. Recent progress in machine learning shows promise in predicting material properties, yet predicting optical properties from crystal structures remains challenging due to the lack of efficient atomic embeddings. Here, Graph Neural Network for Optical spectra prediction (GNNOpt) is introduced, an equivariant graph-neural-network architecture featuring universal embedding with automatic optimization. This enables high-quality optical predictions with a dataset of only 944 materials. GNNOpt predicts all optical properties based on the Kramers-Krönig relations, including absorption coefficient, complex dielectric function, complex refractive index, and reflectance. The trained model is applied to screen photovoltaic materials based on spectroscopic limited maximum efficiency and search for quantum materials based on quantum weight. First-principles calculations validate the efficacy of the GNNOpt model, demonstrating excellent agreement in predicting the optical spectra of unseen materials. The discovery of new quantum materials with high predicted quantum weight, such as SiOs, which host exotic quasiparticles with multifold nontrivial topology, demonstrates the potential of GNNOpt in predicting optical properties across a broad range of materials and applications.}, + langid = {english}, + keywords = {/unread,AML,energy materials,equivariant,GNN,kramers-krönig relations,ML,photovoltaics,prediction of optical spectra,quantum materials}, + file = {/Users/wasmer/Nextcloud/Zotero/Hung et al. - Universal Ensemble-Embedding Graph Neural Network for Direct Prediction of Optical Spectra from Crys.pdf;/Users/wasmer/Zotero/storage/JVQULL49/adma.html} +} + @online{huOGBLSCLargeScaleChallenge2021, title = {{{OGB-LSC}}: {{A Large-Scale Challenge}} for {{Machine Learning}} on {{Graphs}}}, shorttitle = {{{OGB-LSC}}}, @@ -10505,6 +10583,23 @@ Subject\_term\_id: computational-methods;density-functional-theory;method-develo file = {/Users/wasmer/Nextcloud/Zotero/Kreuzberger et al_2023_Machine Learning Operations (MLOps).pdf;/Users/wasmer/Zotero/storage/AGAJG2J6/10081336.html} } +@article{krizhevskyImageNetClassificationDeep2017, + title = {{{ImageNet}} Classification with Deep Convolutional Neural Networks}, + author = {Krizhevsky, Alex and Sutskever, Ilya and Hinton, Geoffrey E.}, + date = {2017-05-24}, + journaltitle = {Commun. ACM}, + volume = {60}, + number = {6}, + pages = {84--90}, + issn = {0001-0782}, + doi = {10.1145/3065386}, + url = {https://dl.acm.org/doi/10.1145/3065386}, + urldate = {2024-10-08}, + abstract = {We trained a large, deep convolutional neural network to classify the 1.2 million high-resolution images in the ImageNet LSVRC-2010 contest into the 1000 different classes. On the test data, we achieved top-1 and top-5 error rates of 37.5\% and 17.0\%, respectively, which is considerably better than the previous state-of-the-art. The neural network, which has 60 million parameters and 650,000 neurons, consists of five convolutional layers, some of which are followed by max-pooling layers, and three fully connected layers with a final 1000-way softmax. To make training faster, we used non-saturating neurons and a very efficient GPU implementation of the convolution operation. To reduce overfitting in the fully connected layers we employed a recently developed regularization method called "dropout" that proved to be very effective. We also entered a variant of this model in the ILSVRC-2012 competition and achieved a winning top-5 test error rate of 15.3\%, compared to 26.2\% achieved by the second-best entry.}, + keywords = {/unread}, + file = {/Users/wasmer/Nextcloud/Zotero/Krizhevsky et al. - 2017 - ImageNet classification with deep convolutional neural networks.pdf} +} + @book{kronmullerHandbookMagnetismAdvanced2007, title = {Handbook of {{Magnetism}} and {{Advanced Magnetic Materials}}}, editor = {Kronmüller, Helmut and Parkin, Stuart}, @@ -16795,6 +16890,26 @@ Subject\_term\_id: condensed-matter-physics;electronic-structure;materials-scien file = {/Users/wasmer/Nextcloud/Zotero/Shapeev_2017_Accurate representation of formation energies of crystalline alloys with many.pdf;/Users/wasmer/Zotero/storage/EQYE3F3F/S0927025617303610.html} } +@article{sharmaQuantumaccurateMachineLearning2024, + title = {Quantum-Accurate Machine Learning Potentials for Metal-Organic Frameworks Using Temperature Driven Active Learning}, + author = {Sharma, Abhishek and Sanvito, Stefano}, + date = {2024-10-08}, + journaltitle = {npj Computational Materials}, + shortjournal = {npj Comput Mater}, + volume = {10}, + number = {1}, + pages = {1--13}, + publisher = {Nature Publishing Group}, + issn = {2057-3960}, + doi = {10.1038/s41524-024-01427-y}, + url = {https://www.nature.com/articles/s41524-024-01427-y}, + urldate = {2024-10-08}, + abstract = {Understanding structural flexibility of metal-organic frameworks (MOFs) via molecular dynamics simulations is crucial to design better MOFs. Density functional theory (DFT) and quantum-chemistry methods provide highly accurate molecular dynamics, but the computational overheads limit their use in long time-dependent simulations. In contrast, classical force fields struggle with the description of coordination bonds. Here we develop a DFT-accurate machine-learning spectral neighbor analysis potentials for two representative MOFs. Their structural and vibrational properties are then studied and tightly compared with available experimental data. Most importantly, we demonstrate an active-learning algorithm, based on mapping the relevant internal coordinates, which drastically reduces the number of training data to be computed at the DFT level. Thus, the workflow presented here appears as an efficient strategy for the study of flexible MOFs with DFT accuracy, but at a fraction of the DFT computational cost.}, + langid = {english}, + keywords = {/unread,AML,ML,MLP,MOF,SNAP,with-code,with-data}, + file = {/Users/wasmer/Nextcloud/Zotero/Sharma and Sanvito - 2024 - Quantum-accurate machine learning potentials for metal-organic frameworks using temperature driven a.pdf} +} + @online{shenRepresentationindependentElectronicCharge2021, title = {A Representation-Independent Electronic Charge Density Database for Crystalline Materials}, author = {Shen, Jimmy-Xuan and Munro, Jason M. and Horton, Matthew K. and Huck, Patrick and Dwaraknath, Shyam and Persson, Kristin A.},