@article {5268, title = {Preliminary evidence for selective cortical responses to music in one-month-old infants}, journal = {Developmental Science}, year = {2023}, month = {03/2023}, abstract = {

Prior studies have observed selective neural responses in the adult human auditory cortex to music and speech that cannot be explained by the differing lower-level acoustic properties of these stimuli. Does infant cortex exhibit similarly selective responses to music and speech shortly after birth? To answer this question, we attempted to collect functional magnetic resonance imaging (fMRI) data from 45 sleeping infants (2.0- to 11.9-weeks-old) while they listened to monophonic instrumental lullabies and infant-directed speech produced by a mother. To match acoustic variation between music and speech sounds we (1) recorded music from instruments that had a similar spectral range as female infant-directed speech, (2) used a novel excitation-matching algorithm to match the cochleagrams of music and speech stimuli, and (3) synthesized {\textquotedblleft}model-matched{\textquotedblright} stimuli that were matched in spectrotemporal modulation statistics to (yet perceptually distinct from) music or speech. Of the 36 infants we collected usable data from, 19 had significant activations to sounds overall compared to scanner noise. From these infants, we observed a set of voxels in non-primary auditory cortex (NPAC) but not in Heschl{\textquoteright}s Gyrus that responded significantly more to music than to each of the other three stimulus types (but not significantly more strongly than to the background scanner noise). In contrast, our planned analyses did not reveal voxels in NPAC that responded more to speech than to model-matched speech, although other unplanned analyses did. These preliminary findings suggest that music selectivity arises within the first month of life.

}, keywords = {auditory cortex, fMRI, infants, music, speech}, issn = {1363-755X}, doi = {10.1111/desc.13387}, url = {https://onlinelibrary.wiley.com/doi/10.1111/desc.13387}, author = {Heather L Kosakowski and Norman-Haignere, Samuel and Mynick, Anna and Takahashi, Atsushi and Saxe, Rebecca and Nancy Kanwisher} } @article {3573, title = {A task-optimized neural network replicates human auditory behavior, predicts brain responses, and reveals a cortical processing hierarchy}, journal = {Neuron}, volume = {98}, year = {2018}, month = {04/2018}, abstract = {

A core goal of auditory neuroscience is to build quantitative models that predict cortical responses to natural sounds. Reasoning that a complete model of auditory cortex must solve ecologically relevant tasks, we optimized hierarchical neural networks for speech and music recognition. The best-performing network contained separate music and speech pathways following early shared processing, potentially replicating human cortical organization. The network performed both tasks as well as humans and exhibited human-like errors despite not being optimized to do so, suggesting common constraints on network and human performance. The network predicted fMRI voxel responses substantially better than traditional spectrotemporal filter models throughout auditory cortex. It also provided a quantitative signature of cortical representational hierarchy{\textemdash}primary and non-primary responses were best predicted by intermediate and late network layers, respectively. The results suggest that task optimization provides a powerful set of tools for modeling sensory systems.

}, keywords = {auditory cortex, convolutional neural network, deep learning, deep neural network, encoding models, fMRI, Hierarchy, human auditory cortex, natural sounds, word recognition}, doi = {10.1016/j.neuron.2018.03.044}, url = {https://www.sciencedirect.com/science/article/pii/S0896627318302502}, author = {Alexander J. E. Kell and Daniel L K Yamins and Erica N Shook and Sam V Norman-Haignere and Josh H. McDermott} } @article {2639, title = {A Causal Relationship Between Face-Patch Activity and Face-Detection Behavior}, journal = {eLife}, year = {2017}, month = {04/2017}, abstract = {

The primate brain contains distinct areas densely populated by face-selective neurons. One of these, face-patch ML, contains neurons selective for contrast relationships between face parts. Such contrast-relationships can serve as powerful heuristics for face detection. However, it is unknown whether neurons with such selectivity actually support face-detection behavior. Here, we devised a naturalistic face-detection task and combined it with fMRI-guided pharmacological inactivation of ML to test whether ML is of critical importance for real-world face detection. We found that inactivation of ML impairs face detection. The effect was anatomically specific, as inactivation of areas outside ML did not affect face detection, and it was categorically specific, as inactivation of ML impaired face detection while sparing body and object detection. These results establish that ML function is crucial for detection of faces in natural scenes, performing a critical first step on which other face processing operations can build.

}, keywords = {face patch, fMRI, inactivation, Neuroscience}, doi = {https://doi.org/10.7554/eLife.18558.001}, url = {https://elifesciences.org/articles/18558}, author = {Srivatsun Sadagopan and Wilbert Zarco and W. A. Freiwald} }