@inproceedings{6a340228fbef4746a39f8547b322c4df,
title = "ClosNets: Batchless DNN Training with On-Chip a Priori Sparse Neural Topologies",
abstract = "The deployment of deep neural network (DNN) models is generally hindered by their training time. DNN training throughput is commonly limited by the fully-connected layers. This is due to their large size and low data reuse. Large batch sizes are often used to mitigate some of the effects. Increasing batch size can however hurt model accuracy, creating a tradeoff between accuracy and efficiency. We tackle the problem of training DNNs in on-chip memory, allowing us to train models without the use of batching. Pruning and quantizing dense layers can greatly reduce network size, allowing models to fit on the chip, but can only be applied after training. We propose a fully-connected but sparse layer that reduces the memory requirements of DNNs without sacrificing accuracy. We replace a dense matrix with a sparse matrix product with a predetermined topology. This allows us to: (1) train significantly smaller networks without a loss in accuracy, and (2) store weights without having to store connection indices. We therefore achieve significant training speedups due to the fast access to on-chip weights, smaller network size, and a reduced amount of computation per epoch.",
keywords = "acceleration, hardware, neural network, sparsity",
author = "Mihailo Isakov and Alan Ehret and Michel Kinsy",
note = "Publisher Copyright: {\textcopyright} 2018 IEEE.; 28th International Conference on Field-Programmable Logic and Applications, FPL 2018 ; Conference date: 26-08-2018 Through 30-08-2018",
year = "2018",
month = nov,
day = "9",
doi = "10.1109/FPL.2018.00017",
language = "English (US)",
series = "Proceedings - 2018 International Conference on Field-Programmable Logic and Applications, FPL 2018",
publisher = "Institute of Electrical and Electronics Engineers Inc.",
pages = "55--59",
booktitle = "Proceedings - 2018 International Conference on Field-Programmable Logic and Applications, FPL 2018",
}