@article{POYATOS2023110757, title = {Multiobjective evolutionary pruning of Deep Neural Networks with Transfer Learning for improving their performance and robustness}, journal = {Applied Soft Computing}, volume = {147}, pages = {110757}, year = {2023}, issn = {1568-4946}, doi = {https://doi.org/10.1016/j.asoc.2023.110757}, url = {https://www.sciencedirect.com/science/article/pii/S1568494623007755}, author = {Javier Poyatos and Daniel Molina and Aitor Martínez-Seras and Javier {Del Ser} and Francisco Herrera}, keywords = {Evolutionary Deep Learning, Multi-objective algorithms, Pruning, Out of Distribution detection, Transfer Learning}, abstract = {Evolutionary Computation algorithms have been used to solve optimization problems in relation with architectural, hyper-parameter or training configuration, forging the field known today as Neural Architecture Search. These algorithms have been combined with other techniques such as the pruning of Neural Networks, which reduces the complexity of the network, and the Transfer Learning, which lets the import of knowledge from another problem related to the one at hand. The usage of several criteria to evaluate the quality of the evolutionary proposals is also a common case, in which the performance and complexity of the network are the most used criteria. This work proposes MO-EvoPruneDeepTL, a multi-objective evolutionary pruning algorithm. MO-EvoPruneDeepTL uses Transfer Learning to adapt the last layers of Deep Neural Networks, by replacing them with sparse layers evolved by a genetic algorithm, which guides the evolution based in the performance, complexity and robustness of the network, being the robustness a great quality indicator for the evolved models. We carry out different experiments with several datasets to assess the benefits of our proposal. Results show that our proposal achieves promising results in all the objectives, and direct relation are presented among them. The experiments also show that the most influential neurons help us explain which parts of the input images are the most relevant for the prediction of the pruned neural network. Lastly, by virtue of the diversity within the Pareto front of pruning patterns produced by the proposal, it is shown that an ensemble of differently pruned models improves the overall performance and robustness of the trained networks.} }