@Article{JML-1-373, author = {Yang , Hongkang}, title = {A Mathematical Framework for Learning Probability Distributions}, journal = {Journal of Machine Learning}, year = {2022}, volume = {1}, number = {4}, pages = {373--431}, abstract = {

The modeling of probability distributions, specifically generative modeling and density estimation, has become an immensely popular subject in recent years by virtue of its outstanding performance on sophisticated data such as images and texts. Nevertheless, a theoretical understanding of its success is still incomplete. One mystery is the paradox between memorization and generalization: In theory, the model is trained to be exactly the same as the empirical distribution of the finite samples, whereas in practice, the trained model can generate new samples or estimate the likelihood of unseen samples. Likewise, the overwhelming diversity of distribution learning models calls for a unified perspective on this subject. This paper provides a mathematical framework such that all the well-known models can be derived based on simple principles. To demonstrate its efficacy, we present a survey of our results on the approximation error, training error and generalization error of these models, which can all be established based on this framework. In particular, the aforementioned paradox is resolved by proving that these models enjoy implicit regularization during training, so that the generalization error at early-stopping avoids the curse of dimensionality. Furthermore, we provide some new results on landscape analysis and the mode collapse phenomenon.

}, issn = {2790-2048}, doi = {https://doi.org/10.4208/jml.221202}, url = {http://global-sci.org/intro/article_detail/jml/21298.html} }