@misc{14946, author = {Simon Funke and Sebastian Mitusch}, title = {Algorithmic differentiation for mixed FEniCS-Tensorflow models}, abstract = {In this talk, we present a recent addition to dolfin-adjoint: support for Tensorflow models. Tensorflow is a numerical computation library which is mostly used to conduct machine learning (ML) and deep neural networks research, but which can also be applied in various other domains. With Tensorflow support in dolfin-adjoint, users can now implement mixed PDE-ML models, where the PDEs are solved with FEniCS and the ML functions are computed with Tensorflow. The high-level differentiation capabilities of dolfin-adjoint automatically compute derivatives of these models, which may be used to optimise (train) model parameters from data.The implementation works as follows. During model execution, an algorithmic differentiation (AD) tool records a computation graph. In this graph, nodes represent high-level mathematical operations such as a PDE solve using FEniCS, or a ML function evaluation using Tensorflow. Edges in this graph represent variables, such as finite element functions or tensors, that flow between operations. Further, we implemented basic transfer functions to map tensors in Tensorflow to FEniCS data structures. From this computation graph, dolfin-adjoint derives the associated adjoint graph, in which each node is replaced by its assoiated adjoint. If a graph node represents a PDE solve, the AD capabilities in FEniCS is used to obtain its adjoint. Otherwise, if a node represents a Tensorflow operation, then Tensorflow{\textquoteright}s internal AD tool is used to obtain the adjoint version of that operation.}, year = {2018}, journal = {Oxford, UK}, }