@article {4255, title = {Divergence in the functional organization of human and macaque auditory cortex revealed by fMRI responses to harmonic tones}, journal = {Nature Neuroscience}, year = {2019}, month = {06/10/2019}, abstract = {

We report a difference between humans and macaque monkeys in the functional organization of cortical regions implicated in pitch perception. Humans but not macaques showed regions with a strong preference for harmonic sounds compared to noise, measured with both synthetic tones and macaque vocalizations. In contrast, frequency-selective tonotopic maps were similar between the two species. This species difference may be driven by the unique demands of speech and music perception in humans.

}, issn = {1097-6256}, doi = {10.1038/s41593-019-0410-7}, url = {https://www.nature.com/articles/s41593-019-0410-7}, author = {Sam V Norman-Haignere and Nancy Kanwisher and Josh H. McDermott and B. R. Conway} } @article {2572, title = {Color-Biased Regions of the Ventral Visual Pathway Lie between Face- and Place-Selective Regions in Humans, as in Macaques}, journal = {Journal of Neuroscience}, volume = {36}, year = {2016}, month = {02/2016}, pages = {1682 - 1697}, abstract = {

The existence of color-processing regions in the human ventral visual pathway (VVP) has long been known from patient and imaging studies, but their location in the cortex relative to other regions, their selectivity for color compared with other properties (shape and object category), and their relationship to color-processing regions found in nonhuman primates remain unclear. We addressed these questions by scanning 13 subjects with fMRI while they viewed two versions of movie clips (colored, achromatic) of five different object classes (faces, scenes, bodies, objects, scrambled objects). We identified regions in each subject that were selective for color, faces, places, and object shape, and measured responses within these regions to the 10 conditions in independently acquired data. We report two key findings. First, the three previously reported color-biased regions (located within a band running posterior{\textendash}anterior along the VVP, present in most of our subjects) were sandwiched between face-selective cortex and place-selective cortex, forming parallel bands of face, color, and place selectivity that tracked the fusiform gyrus/collateral sulcus. Second, the posterior color-biased regions showed little or no selectivity for object shape or for particular stimulus categories and showed no interaction of color preference with stimulus category, suggesting that they code color independently of shape or stimulus category; moreover, the shape-biased lateral occipital region showed no significant color bias. These observations mirror results in macaque inferior temporal cortex (Lafer-Sousa and Conway, 2013), and taken together, these results suggest a homology in which the entire tripartite face/color/place system of primates migrated onto the ventral surface in humans over the course of evolution.

SIGNIFICANCE STATEMENT Here we report that color-biased cortex is sandwiched between face-selective and place-selective cortex on the bottom surface of the brain in humans. This face/color/place organization mirrors that seen on the lateral surface of the temporal lobe in macaques, suggesting that the entire tripartite system is homologous between species. This result validates the use of macaques as a model for human vision, making possible more powerful investigations into the connectivity, precise neural codes, and development of this part of the brain. In addition, we find substantial segregation of color from shape selectivity in posterior regions, as observed in macaques, indicating a considerable dissociation of the processing of shape and color in both species.

}, issn = {0270-6474}, doi = {10.1523/JNEUROSCI.3164-15.2016}, url = {http://www.jneurosci.org/cgi/doi/10.1523/JNEUROSCI.3164-15.2016}, author = {R. Lafer-Sousa and B. R. Conway and Nancy Kanwisher} } @article {2765, title = {Mechanisms of color perception and cognition covered by$\#$ thedress}, volume = {16}, year = {2016}, month = {8/2016}, pages = {746-746}, address = {Journal of Vision}, abstract = {

Color is notoriously ambiguousmany color illusions existbut until now it has been thought that all people with normal color vision experience color illusions the same way. How does the visual system resolve color ambiguity? Here, we present work that addresses this question by quantifying peoples perception of a particularly ambiguous image, the dress photograph. The colors of the individual pixels in the photograph when viewed in isolation are light-blue or brown, but popular accounts suggest the dress appears either white/gold or blue/black. We tested more than 1400 people, both on-line and under controlled laboratory conditions. Subjects first completed the sentence: this is a ___and___dress. Then they performed a color-matching experiment that did not depend on language. Surprisingly, the results uncovered three groups of subjects: white/gold observers, blue/black observers and blue/brown observers. Our findings show that the brain resolves ambiguity in the dress into one of three stable states\; a minority of people switched which colors they saw (~11\%). It is clear that what we see depends on both retinal stimulation and internal knowledge about the world. Cases of multi-stability such as the dress provide a rare opportunity to investigate this interplay. In particular, we go on to demonstrate that the dress photograph can be used as a tool to discover that skin reflectance is a particularly important implicit cue used by the brain to estimate the color of the light source, to resolve color ambiguity, shedding light on the role of high-level cues in color perception.

}, doi = {10.1167/16.12.746}, author = {B. R. Conway and Rosa Lafer-Sousa and Katherine Hermann} } @article {2789, title = {Mechanisms of color perception and cognition covered by $\#$thedress}, volume = {16}, year = {2016}, month = {9/2016}, pages = {746}, type = {Conference Talk}, abstract = {

Color is notoriously ambiguousmany color illusions existbut until now it has been thought that all people with normal color vision experience color illusions the same way. How does the visual system resolve color ambiguity? Here, we present work that addresses this question by quantifying peoples perception of a particularly ambiguous image, the dress photograph. The colors of the individual pixels in the photograph when viewed in isolation are light-blue or brown, but popular accounts suggest the dress appears either white/gold or blue/black. We tested more than 1400 people, both on-line and under controlled laboratory conditions. Subjects first completed the sentence: this is a ___and___dress. Then they performed a color-matching experiment that did not depend on language. Surprisingly, the results uncovered three groups of subjects: white/gold observers, blue/black observers and blue/brown observers. Our findings show that the brain resolves ambiguity in the dress into one of three stable states\; a minority of people switched which colors they saw (~11\%). It is clear that what we see depends on both retinal stimulation and internal knowledge about the world. Cases of multi-stability such as the dress provide a rare opportunity to investigate this interplay. In particular, we go on to demonstrate that the dress photograph can be used as a tool to discover that skin reflectance is a particularly important implicit cue used by the brain to estimate the color of the light source, to resolve color ambiguity, shedding light on the role of high-level cues in color perception.

}, author = {B. R. Conway and R. Lafer-Sousa and Katherine Hermann} }