@inproceedings{8eda975cf3334965b8a548ec6d07467d,
title = "Projectional Learning Laws for Differential Neural Networks Based on Double-Averaged Sub-Gradient Descent Technique",
abstract = "A new method to design learning laws for neural networks with continuous dynamics is proposed in this study. The learning method is based on the so-called double-averaged descendant technique (DASGDT), which is a variant of the gradient-descendant method. The learning law implements a double averaged algorithm which filters the effect of uncertainties of the states, which are continuously measurable. The learning law overcomes the classical assumption on the strict convexity of the functional with respect to the weights. The photocatalytic ozonation process of a single contaminant is estimated using the learning law design proposed in this study.",
keywords = "Differential neural networks, Double-averaged subgradient, Optimization, Ozonation processes, Projection",
author = "Isaac Chairez and Alexander Poznyak and Alexander Nazin and Tatyana Poznyak",
note = "Publisher Copyright: {\textcopyright} 2019, Springer Nature Switzerland AG.; 16th International Symposium on Neural Networks, ISNN 2019 ; Conference date: 10-07-2019 Through 12-07-2019",
year = "2019",
doi = "10.1007/978-3-030-22796-8_4",
language = "Ingl{\'e}s",
isbn = "9783030227951",
series = "Lecture Notes in Computer Science (including subseries Lecture Notes in Artificial Intelligence and Lecture Notes in Bioinformatics)",
publisher = "Springer Verlag",
pages = "28--38",
editor = "Huchuan Lu and Huajin Tang and Zhanshan Wang",
booktitle = "Advances in Neural Networks – ISNN 2019 - 16th International Symposium on Neural Networks, ISNN 2019, Proceedings",
address = "Alemania",
}