@Article{JML-1-1, author = {Zhang, Yaoyu and Yuqing, Li and Zhongwang, Zhang and Tao, Luo and Zhi-Qin, Xu, John}, title = {Embedding Principle: A Hierarchical Structure of Loss Landscape of Deep Neural Networks}, journal = {Journal of Machine Learning}, year = {2022}, volume = {1}, number = {1}, pages = {60--113}, abstract = {

We prove a general Embedding Principle of loss landscape of deep neural networks (NNs) that unravels a hierarchical structure of the loss landscape of NNs, i.e., loss landscape of an NN contains all critical points of all the narrower NNs. This result is obtained by constructing a class of critical embeddings which map any critical point of a narrower NN to a critical point of the target NN with the same output function. By discovering a wide class of general compatible critical embeddings, we provide a gross estimate of the dimension of critical submanifolds embedded from critical points of narrower NNs. We further prove an irreversibility property of any critical embedding that the number of negative/zero/positive eigenvalues of the Hessian matrix of a critical point may increase but never decrease as an NN becomes wider through the embedding. Using a special realization of general compatible critical embedding, we prove a stringent necessary condition for being a “truly-bad” critical point that never becomes a strict-saddle point through any critical embedding. This result implies the commonplace of strict-saddle points in wide NNs, which may be an important reason underlying the easy optimization of wide NNs widely observed in practice.

}, issn = {2790-2048}, doi = {https://doi.org/10.4208/jml.220108}, url = {https://global-sci.com/article/87607/embedding-principle-a-hierarchical-structure-of-loss-landscape-of-deep-neural-networks} }