Skip to content
Snippets Groups Projects
Commit d46b7b4c authored by Johannes Wasmer's avatar Johannes Wasmer
Browse files

update bibliography

parent 8da2f27a
No related branches found
No related tags found
No related merge requests found
......@@ -3199,6 +3199,17 @@
file = {/Users/wasmer/Nextcloud/Zotero/Degrave et al_2022_Magnetic control of tokamak plasmas through deep reinforcement learning.pdf;/Users/wasmer/Zotero/storage/U6PRS6KM/s41586-021-04301-9.html}
}
 
@book{deisenrothMathematicsMachineLearning2020,
title = {Mathematics for Machine Learning},
author = {Deisenroth, Marc Peter and Faisal, A. Aldo and Ong, Cheng Soon},
date = {2020},
publisher = {{Cambridge University Press}},
location = {{Cambridge ; New York, NY}},
abstract = {"The fundamental mathematical tools needed to understand machine learning include linear algebra, analytic geometry, matrix decompositions, vector calculus, optimization, probability, and statistics. These topics are traditionally taught in disparate courses, making it hard for data science or computer science students, or professionals, to efficiently learn the mathematics. This self-contained textbook bridges the gap between mathematical and machine learning texts, introducing the mathematical concepts with a minimum of prerequisites. It uses these concepts to derive four central machine learning methods: linear regression, principal component analysis, Gaussian mixture models, and support vector machines. For students and others with a mathematical background, these derivations provide a starting point to machine learning texts. For those learning the mathematics for the first time, the methods help build intuition and practical experience with applying mathematical concepts"--},
isbn = {978-1-108-47004-9 978-1-108-45514-5},
keywords = {/unread,educational,General ML,learning material,linear algebra,mathematics,ML,ML theory,online book,probability theory,statistics,textbook}
}
@article{delrioDeepLearningFramework2023,
title = {A Deep Learning Framework to Emulate Density Functional Theory},
author = {family=Rio, given=Beatriz G., prefix=del, useprefix=true and Phan, Brandon and Ramprasad, Rampi},
......@@ -10188,6 +10199,20 @@ Junqi Yin (Oak Ridge National Laboratory)},
file = {/Users/wasmer/Zotero/storage/X264Q7JM/S0008622320312252.html}
}
 
@book{mohriFoundationsMachineLearning2018,
title = {Foundations of Machine Learning},
author = {Mohri, Mehryar and Rostamizadeh, Afshin and Talwalkar, Ameet},
date = {2018},
series = {Adaptive Computation and Machine Learning},
edition = {Second edition},
publisher = {{The MIT Press}},
location = {{Cambridge, Massachusetts}},
url = {https://cs.nyu.edu/~mohri/mlbook/},
isbn = {978-0-262-03940-6},
pagetotal = {486},
keywords = {/unread,educational,General ML,learning material,ML,ML theory,online book,textbook}
}
@article{moldabekovNonempiricalMixingCoefficient2023,
title = {Non-Empirical {{Mixing Coefficient}} for {{Hybrid XC Functionals}} from {{Analysis}} of the {{XC Kernel}}},
author = {Moldabekov, Zhandos A. and Lokamani, Mani and Vorberger, Jan and Cangi, Attila and Dornheim, Tobias},
......@@ -10491,7 +10516,7 @@ Junqi Yin (Oak Ridge National Laboratory)},
date = {2022},
publisher = {{MIT Press}},
url = {probml.ai},
keywords = {/unread}
keywords = {/unread,educational,General ML,learning material,ML,ML theory,online book,textbook}
}
 
@unpublished{musaelianLearningLocalEquivariant2022,
......@@ -13939,6 +13964,16 @@ Junqi Yin (Oak Ridge National Laboratory)},
file = {/Users/wasmer/Nextcloud/Zotero/Szlachta et al_2014_Accuracy and transferability of Gaussian approximation potential models for.pdf;/Users/wasmer/Nextcloud/Zotero/Szlachta et al_2014_Accuracy and transferability of Gaussian approximation potential models for2.pdf;/Users/wasmer/Zotero/storage/YFHICPLQ/PhysRevB.90.html}
}
 
@online{tabogaStatlectDigitalTextbook2021,
title = {Statlect, the Digital Textbook | {{Probability}}, Statistics, Matrix Algebra},
author = {Taboga, Marco},
date = {2021},
url = {https://www.statlect.com/},
urldate = {2023-11-21},
keywords = {/unread,educational,learning material,linear algebra,mathematics,online book,probability theory,statistics},
file = {/Users/wasmer/Zotero/storage/IDWYNP9E/www.statlect.com.html}
}
@article{takamotoTeaNetUniversalNeural2022,
title = {{{TeaNet}}: {{Universal}} Neural Network Interatomic Potential Inspired by Iterative Electronic Relaxations},
shorttitle = {{{TeaNet}}},
......@@ -15202,7 +15237,7 @@ Ying-Wai Li (Los Alamos National Laboratory)},
urldate = {2023-11-18},
abstract = {In this book, Equivariant and Coordinate Independent Convolutional Networks, we develop a gauge theory of artificial neural networks for processing spatially structured data like images, audio, or videos. The standard neural network architecture for such data are convolutional networks, which are characterized by their position-independent inference. Generalizing whatever they learn over spatial locations, convolutional networks are substantially more data efficient and robust in comparison to non-convolutional models. This characteristic is especially important in domains like medical imaging, where training data is scarce. The independence from spatial locations is formally captured by the networks’ translation group equivariance, i.e. their property to commute with translations of their input signals. We show that the convolutional network design is not only sufficient for translation equivariance but is actually a necessary condition – convolutions can therefore be derived by demanding the model’s equivariance. The first part of this work leverages this insight to define generalized convolutional networks which are equivariant under larger symmetry groups. Such models generalize their inference over additional geometric transformations, for instance, rotations or reflections of patterns in images. We demonstrate empirically that they exhibit a significantly enhanced data efficiency, convergence rate, and final performance in comparison to conventional convolutional networks. Our publicly available implementation found wide use in the research community. In the second part, we extend convolutional networks further to process signals on Riemannian manifolds. Beyond flat Euclidean images, this setting includes, e.g., spherical signals like global weather patterns on the earth’s surface, or signals on general surfaces like artery walls or the cerebral cortex. We show that convolution kernels on manifolds are required to be equivariant under local gauge transformations if the networks’ inference is demanded to be coordinate independent. The resulting coordinate independent networks are proven to be equivariant with respect to the manifolds’ global symmetries (isometries). Our objective is not to propose yet another equivariant network design for a narrow application domain, but to devise a unifying mathematical framework for convolutional networks. The last part of this book demonstrates the generality of our differential geometric formulation of convolutional networks by showing that is able to explain a vast number of equivariant network architectures from the literature.},
langid = {american},
keywords = {/unread,CNN,covariant,educational,equivariant,gauge theory,General ML,geometric deep learning,GNN,group theory,invariance,learning material,ML,ML theory,physics-informed ML,review,review-of-GDL,steerable CNN,symmetry},
keywords = {/unread,CNN,covariant,educational,equivariant,gauge theory,General ML,geometric deep learning,GNN,group theory,invariance,learning material,ML,ML theory,online book,physics-informed ML,review,review-of-GDL,steerable CNN,symmetry,textbook},
file = {/Users/wasmer/Nextcloud/Zotero/Weiler et al_2023_Equivariant and Coordinate Independent Convolutional Networks - A Gauge Field.pdf;/Users/wasmer/Zotero/storage/U2AEW2RU/maurice-weiler.gitlab.io.html}
}
 
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment