@misc{17124, author = {Johannes M{\"u}ller and Marius Zeinhofer}, title = {Achieving High Accuracy with PINNs via Energy Natural Gradient Descent}, abstract = {We propose energy natural gradient descent, a natural gradient method with respect to a Hessian-induced Riemannian metric as an optimization algorithm for physics-informed neural networks (PINNs) and the deep Ritz method. As a main motivation we show that the update direction in function space resulting from the energy natural gradient corresponds to the Newton direction modulo an orthogonal projection onto the model{\textquoteright}s tangent space. We demonstrate experimentally that energy natural gradient descent yields highly accurate solutions with errors several orders of magnitude smaller than what is obtained when training PINNs with standard optimizers like gradient descent, Adam or BFGS, even when those are allowed significantly more computation time. We show that the approach can be combined with deterministic and stochastic discretizations of the integral terms and with deep networks allowing for an application in higher dimensional settings.}, year = {2023}, journal = {International Conference on Machine Learning (ICML)}, publisher = {PMLR}, }