@article {1593, title = {How Important is Weight Symmetry in Backpropagation?}, year = {2015}, month = {11/29/2015}, abstract = {

Gradient backpropagation (BP) requires symmetric feedforward and feedback connections{\textemdash}the same weights must be used for forward and backward passes. This {\textquotedblleft}weight transport problem{\textquotedblright} [1] is thought to be one of the main reasons of BP{\textquoteright}s biological implausibility. Using 15 different classification datasets, we systematically study to what extent BP really depends on weight symmetry. In a study that turned out to be surprisingly similar in spirit to Lillicrap et al.{\textquoteright}s demonstration [2] but orthogonal in its results, our experiments indicate that: (1) the magnitudes of feedback weights do not matter to performance (2) the signs of feedback weights do matter{\textemdash}the more concordant signs between feedforward and their corresponding feedback connections, the better (3) with feedback weights having random magnitudes and 100\% concordant signs, we were able to achieve the same or even better performance than SGD. (4) some normalizations/stabilizations are indispensable for such asymmetric BP to work, namely Batch Normalization (BN) [3] and/or a {\textquotedblleft}Batch Manhattan{\textquotedblright} (BM) update rule.

}, author = {Qianli Liao and JZ. Leibo and Tomaso Poggio} }