@article{TNNLS2021Victor, author = "V{\'i}ctor Manuel Vargas and Pedro Antonio Guti{\'e}rrez and Javier Barbero-G{\'o}mez and C{\'e}sar Herv{\'a}s-Mart{\'i}nez", abstract = "Activation functions lie at the core of every neural network model, from shallow to deep convolutional neural networks. Their properties and characteristics shape the output range of each layer and, thus, their capabilities. Modern approaches rely mostly on a single function choice for the whole network, usually ReLU or other similar alternatives. In this work, we propose two new activation functions, analyse their properties and compare them with 17 different function proposals from recent literature on six distinct problems with different characteristics. The objective is to shed some light about their comparative performance. The results show that the proposed functions achieved better performance than the most commonly used ones.", awards = "JCR(2022): 10.4 Position: 6/111 (Q1D1) Category: COMPUTER SCIENCE, THEORY {\&} METHODS", comments = "JCR(2022): 10.4 Position: 6/111 (Q1D1) Category: COMPUTER SCIENCE, THEORY {\&} METHODS", doi = "10.1109/TNNLS.2021.3105444", issn = "2162-237X", journal = "IEEE Transactions on Neural Networks and Learning Systems", keywords = "activation functions, convolutional networks, ELU", month = "March", note = "JCR(2022): 10.4 Position: 6/111 (Q1D1) Category: COMPUTER SCIENCE, THEORY {\&} METHODS", number = "3", pages = "1478--1488", title = "{A}ctivation functions for convolutional neural networks: proposals and experimental study", url = "doi.org/10.1109/TNNLS.2021.3105444", volume = "34", year = "2023", }