@manuascript {32372, title = {Adaptive Group Lasso Neural Network Models for Functions of Few Variables and Time-Dependent Data}, journal = {Sampling Theory, Signal Processing, and Data Analysis}, year = {2023}, abstract = {In this paper, we propose an adaptive group Lasso deep neural network for high-dimensional function approximation where input data are generated from a dynamical system and the target function depends on few active variables or few linear combinations of variables. We approximate the target function by a deep neural network and enforce an adaptive group Lasso constraint to the weights of a suitable hidden layer in order to represent the constraint on the target function. We utilize the proximal algorithm to optimize the penalized loss function. Using the non-negative property of the Bregman distance, we prove that the proposed optimization procedure achieves loss decay. Our empirical studies show that the proposed method outperforms recent state-of-the-art methods including the sparse dictionary matrix method, neural networks with or without group Lasso penalty.}, url = {https://arxiv.org/abs/2108.10825}, author = {Nicholas Richardson and Lam Si Tung Ho and Giang Tran} }