@inproceedings{b238dbf283284574a61c26dab69d86ac,
title = "Compression of dnns using magnitude pruning and nonlinear information bottleneck training",
abstract = "As Deep Neural Networks (DNNs) have achieved state-of- the-art performance in various scientific fields and applica- tions, the memory and computational complexity of DNNs have increased concurrently. The increased complexity re- quired by DNNs prohibits them from running on platforms with limited computational resources. This has sparked a re- newed interest in parameter pruning. We propose to replace the standard cross-entropy objective – typically used in clas- sification problems – with the Nonlinear Information Bottle- neck (NIB) objective to improve the accuracy of a pruned net- work. We demonstrate, that our proposal outperforms cross- entropy combined with global magnitude pruning for high compression rates on VGG-nets trained on CIFAR10. With approximately 97% of the parameters pruned, we obtain an accuracy of 87.63% and 88.22% for VGG-16 and VGG-19, respectively, where the baseline accuracy is 91.5% for the un- pruned networks. We observe that the majority of biases are pruned completely, and pruning parameters globally outper- forms layer-wise pruning.",
keywords = "deep learning, mutual information, parameter pruning, variational bottleneck",
author = "Nielsen, {Morten {\O}stergaard} and Jan {\O}stergaard and Jesper Jensen and Zheng-Hua Tan",
year = "2021",
month = oct,
doi = "10.1109/MLSP52302.2021.9596128",
language = "English",
isbn = "978-1-6654-1184-4",
series = "IEEE Workshop on Machine Learning for Signal Processing",
pages = "1--6",
booktitle = "2021 IEEE 31st International Workshop on Machine Learning for Signal Processing (MLSP)",
publisher = "IEEE",
address = "United States",
note = "2021 IEEE 31st International Workshop on Machine Learning for Signal Processing (MLSP) ; Conference date: 25-10-2021 Through 28-10-2021",
}