@article {5090, title = {Using child-friendly movie stimuli to study the development of face, place, and object regions from age 3 to 12 years}, journal = {Human Brain Mapping}, year = {2022}, month = {03/2022}, abstract = {

Scanning young children while they watch short, engaging, commercially-produced movies has emerged as a promising approach for increasing data retention and quality. Movie stimuli also evoke a richer variety of cognitive processes than traditional experiments, allowing the study of multiple aspects of brain development simultaneously. However, because these stimuli are uncontrolled, it is unclear how effectively distinct profiles of brain activity can be distinguished from the resulting data. Here we develop an approach for identifying multiple distinct subject-specific Regions of Interest (ssROIs) using fMRI data collected during movie-viewing. We focused on the test case of higher-level visual regions selective for faces, scenes, and objects. Adults (N = 13) were scanned while viewing a 5.6-min child-friendly movie, as well as a traditional localizer experiment with blocks of faces, scenes, and objects. We found that just 2.7 min of movie data could identify subject-specific face, scene, and object regions. While successful, movie-defined ssROIS still showed weaker domain selectivity than traditional ssROIs. Having validated our approach in adults, we then used the same methods on movie data collected from 3 to 12-year-old children (N = 122). Movie response timecourses in 3-year-old children{\textquoteright}s face, scene, and object regions were already significantly and specifically predicted by timecourses from the corresponding regions in adults. We also found evidence of continued developmental change, particularly in the face-selective posterior superior temporal sulcus. Taken together, our results reveal both early maturity and functional change in face, scene, and object regions, and more broadly highlight the promise of short, child-friendly movies for developmental cognitive neuroscience.

}, issn = {1065-9471}, doi = {10.1002/hbm.25815}, url = {https://onlinelibrary.wiley.com/doi/10.1002/hbm.25815}, author = {Kamps, Frederik S. and Richardson, Hilary and N. Apurva Ratan Murty and Nancy Kanwisher and Rebecca Saxe} } @article {2571, title = {The occipital place area represents the local elements of scenes}, journal = {NeuroImage}, volume = {132}, year = {2016}, month = {02/2016}, pages = {417 - 424}, abstract = {

Neuroimaging studies have identified three scene-selective regions in human cortex: parahippocampal place area (PPA), retrosplenial complex (RSC), and occipital place area (OPA). However, precisely what scene information each region represents is not clear, especially for the least studied, more posterior OPA. Here we hypothesized that OPA represents local elements of scenes within two independent, yet complementary scene descriptors: spatial boundary (i.e., the layout of external surfaces) and scene content (e.g., internal objects). If OPA processes the local elements of spatial boundary information, then it should respond to these local elements (e.g., walls) themselves, regardless of their spatial arrangement. Indeed, we found that OPA, but not PPA or RSC, responded similarly to images of intact rooms and these same rooms in which the surfaces were fractured and rearranged, disrupting the spatial boundary. Next, if OPA represents the local elements of scene content information, then it should respond more when more such local elements (e.g., furniture) are present. Indeed, we found that OPA, but not PPA or RSC, responded more to multiple than single pieces of furniture. Taken together, these findings reveal that OPA analyzes local scene elements - both in spatial boundary and scene content representation - while PPA and RSC represent global scene properties.

}, issn = {10538119}, doi = {10.1016/j.neuroimage.2016.02.062}, url = {https://www.ncbi.nlm.nih.gov/pubmed/26931815}, author = {Kamps, Frederik S. and Julian, Joshua B. and Jonas Kubilius and Nancy Kanwisher and Dilks, Daniel D.} }