diff --git a/.buildinfo b/.buildinfo
index dfc8a79db..888e5c3fa 100644
--- a/.buildinfo
+++ b/.buildinfo
@@ -1,4 +1,4 @@
# Sphinx build info version 1
# This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done.
-config: 13b17c5c373045c1725623acad9b96b1
+config: 0e5232a893a37e47ccfdc6e2362e71ce
tags: 645f666f9bcd5a90fca523b33c5a78b7
diff --git a/_downloads/0171ca85d45eca4cef1870c2e499de10/plot_make_gif.ipynb b/_downloads/0171ca85d45eca4cef1870c2e499de10/plot_make_gif.ipynb
index 99d239e2e..dffa0d196 100644
--- a/_downloads/0171ca85d45eca4cef1870c2e499de10/plot_make_gif.ipynb
+++ b/_downloads/0171ca85d45eca4cef1870c2e499de10/plot_make_gif.ipynb
@@ -1,16 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "%matplotlib inline"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
@@ -125,7 +114,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.9.12"
+ "version": "3.9.19"
}
},
"nbformat": 4,
diff --git a/_downloads/03727e1a4b42addea2598e45140498aa/static_with_custom_template.ipynb b/_downloads/03727e1a4b42addea2598e45140498aa/static_with_custom_template.ipynb
index 88cc5ca81..280219022 100644
--- a/_downloads/03727e1a4b42addea2598e45140498aa/static_with_custom_template.ipynb
+++ b/_downloads/03727e1a4b42addea2598e45140498aa/static_with_custom_template.ipynb
@@ -1,16 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "%matplotlib inline"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
@@ -46,7 +35,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.9.12"
+ "version": "3.9.19"
}
},
"nbformat": 4,
diff --git a/_downloads/07fcc19ba03226cd3d83d4e40ec44385/auto_examples_python.zip b/_downloads/07fcc19ba03226cd3d83d4e40ec44385/auto_examples_python.zip
index 4a86a80ba..0c756d864 100644
Binary files a/_downloads/07fcc19ba03226cd3d83d4e40ec44385/auto_examples_python.zip and b/_downloads/07fcc19ba03226cd3d83d4e40ec44385/auto_examples_python.zip differ
diff --git a/_downloads/09288dd002fd14fba94fc085d758d914/plot_roi_voxel_index_volume.ipynb b/_downloads/09288dd002fd14fba94fc085d758d914/plot_roi_voxel_index_volume.ipynb
index 02d74e73f..091daf5b5 100644
--- a/_downloads/09288dd002fd14fba94fc085d758d914/plot_roi_voxel_index_volume.ipynb
+++ b/_downloads/09288dd002fd14fba94fc085d758d914/plot_roi_voxel_index_volume.ipynb
@@ -1,16 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "%matplotlib inline"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
@@ -46,7 +35,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.9.12"
+ "version": "3.9.19"
}
},
"nbformat": 4,
diff --git a/_downloads/0f8be1e4c7dbdc2b5c70b92872c6b4bf/multiple_datasets.ipynb b/_downloads/0f8be1e4c7dbdc2b5c70b92872c6b4bf/multiple_datasets.ipynb
index 7bd3b0136..dc5ce6dc0 100644
--- a/_downloads/0f8be1e4c7dbdc2b5c70b92872c6b4bf/multiple_datasets.ipynb
+++ b/_downloads/0f8be1e4c7dbdc2b5c70b92872c6b4bf/multiple_datasets.ipynb
@@ -1,16 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "%matplotlib inline"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
@@ -46,7 +35,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.9.12"
+ "version": "3.9.19"
}
},
"nbformat": 4,
diff --git a/_downloads/123c014fb1951d73e7b1f19570a43c66/plot_get_roi_vertices.ipynb b/_downloads/123c014fb1951d73e7b1f19570a43c66/plot_get_roi_vertices.ipynb
index 7ceab5003..c5635fb44 100644
--- a/_downloads/123c014fb1951d73e7b1f19570a43c66/plot_get_roi_vertices.ipynb
+++ b/_downloads/123c014fb1951d73e7b1f19570a43c66/plot_get_roi_vertices.ipynb
@@ -1,16 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "%matplotlib inline"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
@@ -46,7 +35,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.9.12"
+ "version": "3.9.19"
}
},
"nbformat": 4,
diff --git a/_downloads/1bc319bfa2fe17d7880c31c36c39b670/plot_thickness_nanmean.ipynb b/_downloads/1bc319bfa2fe17d7880c31c36c39b670/plot_thickness_nanmean.ipynb
index e2f0f021a..53082745f 100644
--- a/_downloads/1bc319bfa2fe17d7880c31c36c39b670/plot_thickness_nanmean.ipynb
+++ b/_downloads/1bc319bfa2fe17d7880c31c36c39b670/plot_thickness_nanmean.ipynb
@@ -1,16 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "%matplotlib inline"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
@@ -46,7 +35,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.9.12"
+ "version": "3.9.19"
}
},
"nbformat": 4,
diff --git a/_downloads/2adc80653ddc362ff5e8f800f300132f/subject_to_mni.py b/_downloads/2adc80653ddc362ff5e8f800f300132f/subject_to_mni.py
index c35f02a7a..73c93b735 100644
--- a/_downloads/2adc80653ddc362ff5e8f800f300132f/subject_to_mni.py
+++ b/_downloads/2adc80653ddc362ff5e8f800f300132f/subject_to_mni.py
@@ -32,7 +32,7 @@
mni_data = mni.transform_to_mni(data, s1_to_mni)
# mni_data is a nibabel Nifti1Image
-mni_data_vol = mni_data.get_data() # the actual array, shape=(182,218,182)
+mni_data_vol = mni_data.get_fdata() # the actual array, shape=(182,218,182)
# That was the manual method. pycortex can also cache these transforms for you
# if you get them using the pycortex database
diff --git a/_downloads/2b386e6cf21fd3a3a1c01882f5073cd7/mni_to_subject.py b/_downloads/2b386e6cf21fd3a3a1c01882f5073cd7/mni_to_subject.py
index 10fd20133..1650920ba 100644
--- a/_downloads/2b386e6cf21fd3a3a1c01882f5073cd7/mni_to_subject.py
+++ b/_downloads/2b386e6cf21fd3a3a1c01882f5073cd7/mni_to_subject.py
@@ -37,7 +37,7 @@
data.data, s1_to_mni)
# subject_data is a nibabel Nifti1Image
-subject_data_vol = mni_data.get_data() # the actual array, shape=(100,100,31)
+subject_data_vol = mni_data.get_fdata() # the actual array, shape=(100,100,31)
# That was the manual method. pycortex can also cache these transforms for you
# if you get them using the pycortex database
diff --git a/_downloads/2c1d4ccc800c14e88c119609426fa416/plot_geodesic_path.py b/_downloads/2c1d4ccc800c14e88c119609426fa416/plot_geodesic_path.py
index 2bea6ff50..e70c60775 100644
--- a/_downloads/2c1d4ccc800c14e88c119609426fa416/plot_geodesic_path.py
+++ b/_downloads/2c1d4ccc800c14e88c119609426fa416/plot_geodesic_path.py
@@ -24,6 +24,8 @@
surfs = [cortex.polyutils.Surface(*d)
for d in cortex.db.get_surf(subject, "fiducial")]
numl = surfs[0].pts.shape[0]
+numr = surfs[1].pts.shape[0]
+num_vertices = numl + numr
# Now we need to pick the start and end points of the line we will draw
pt_a = 100
@@ -33,12 +35,12 @@
path = surfs[0].geodesic_path(pt_a, pt_b)
# In order to plot this on the cortical surface, we need an array that is the
-# same size as the number of vertices in the left hemisphere
-path_data = np.zeros(numl)
+# same size as the number of vertices
+path_data = np.zeros(num_vertices) * np.nan
for v in path:
path_data[v] = 1
# And now plot these distances onto the cortical surface
-path_verts = cortex.Vertex(path_data, subject, cmap="Blues_r")
-cortex.quickshow(path_verts, with_colorbar=False)
+path_verts = cortex.Vertex(path_data, subject, cmap="Reds", vmin=0, vmax=1)
+cortex.quickshow(path_verts, with_colorbar=False, with_curvature=True)
plt.show()
diff --git a/_downloads/344a9c20fa2e5b85144a3143c290594a/static.ipynb b/_downloads/344a9c20fa2e5b85144a3143c290594a/static.ipynb
index 783b1a889..bd3c91e49 100644
--- a/_downloads/344a9c20fa2e5b85144a3143c290594a/static.ipynb
+++ b/_downloads/344a9c20fa2e5b85144a3143c290594a/static.ipynb
@@ -1,16 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "%matplotlib inline"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
@@ -46,7 +35,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.9.12"
+ "version": "3.9.19"
}
},
"nbformat": 4,
diff --git a/_downloads/34bc427b5d07c6d8727ed358794c8cc9/plot_voxel_distance_from_surface.ipynb b/_downloads/34bc427b5d07c6d8727ed358794c8cc9/plot_voxel_distance_from_surface.ipynb
index b5cc89c1d..bf3b4dd06 100644
--- a/_downloads/34bc427b5d07c6d8727ed358794c8cc9/plot_voxel_distance_from_surface.ipynb
+++ b/_downloads/34bc427b5d07c6d8727ed358794c8cc9/plot_voxel_distance_from_surface.ipynb
@@ -1,16 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "%matplotlib inline"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
@@ -46,7 +35,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.9.12"
+ "version": "3.9.19"
}
},
"nbformat": 4,
diff --git a/_downloads/387ca9e74b721456c024802f5fb1210c/plot_subsurfaces.ipynb b/_downloads/387ca9e74b721456c024802f5fb1210c/plot_subsurfaces.ipynb
index 362e2214f..4201e184b 100644
--- a/_downloads/387ca9e74b721456c024802f5fb1210c/plot_subsurfaces.ipynb
+++ b/_downloads/387ca9e74b721456c024802f5fb1210c/plot_subsurfaces.ipynb
@@ -1,16 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "%matplotlib inline"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
@@ -143,7 +132,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.9.12"
+ "version": "3.9.19"
}
},
"nbformat": 4,
diff --git a/_downloads/38bd34ef15da8508933ae951daa9dfc6/plot_dropout.ipynb b/_downloads/38bd34ef15da8508933ae951daa9dfc6/plot_dropout.ipynb
index beadbe64f..32d688b3e 100644
--- a/_downloads/38bd34ef15da8508933ae951daa9dfc6/plot_dropout.ipynb
+++ b/_downloads/38bd34ef15da8508933ae951daa9dfc6/plot_dropout.ipynb
@@ -1,16 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "%matplotlib inline"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
@@ -46,7 +35,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.9.12"
+ "version": "3.9.19"
}
},
"nbformat": 4,
diff --git a/_downloads/3a293f5fef99cd1b3cbd8e7036b48449/plot_roi_voxel_mask.ipynb b/_downloads/3a293f5fef99cd1b3cbd8e7036b48449/plot_roi_voxel_mask.ipynb
index 20d52bfa3..0de064e2d 100644
--- a/_downloads/3a293f5fef99cd1b3cbd8e7036b48449/plot_roi_voxel_mask.ipynb
+++ b/_downloads/3a293f5fef99cd1b3cbd8e7036b48449/plot_roi_voxel_mask.ipynb
@@ -1,21 +1,10 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "%matplotlib inline"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
"source": [
- "\n# Get ROI Voxel Mask\n\nGet proportion of each voxel that exists within a named ROI (this \nconstitutes a probability map for the ROI, with values ranging from\n0-1). Plot this probablistic roi mask onto a flatmap. \n\nIn order for this to work, the specified ROI must exist in the\noverlays.svg file in the pycortex filestore for this subject.\n"
+ "\n# Get ROI Voxel Mask\n\nGet proportion of each voxel that exists within a named ROI (this \nconstitutes a probability map for the ROI, with values ranging from\n0-1). Plot this probabilistic roi mask onto a flatmap. \n\nIn order for this to work, the specified ROI must exist in the\noverlays.svg file in the pycortex filestore for this subject.\n"
]
},
{
@@ -26,7 +15,7 @@
},
"outputs": [],
"source": [
- "import cortex\nimport matplotlib.pyplot as plt\n\nsubject = \"S1\"\nxfm = \"fullhead\"\nroi = \"EBA\"\n\n# Get the map of which voxels are inside of our ROI\nroi_masks = cortex.utils.get_roi_masks(subject, xfm, \n roi_list=[roi],\n gm_sampler='cortical-conservative', # Select only voxels mostly within cortex\n split_lr=False, # No separate left/right ROIs\n threshold=None, # Leave roi mask values as probabilites / fractions\n return_dict=True\n )\n\n# Plot the mask for one ROI onto a flatmap\nroi_data = cortex.Volume(roi_masks[roi], subject, xfm, \n vmin=0, # This is a probability mask, so only\n vmax=1, # so scale btw zero and one\n cmap=\"inferno\", # For pretty\n )\n\ncortex.quickflat.make_figure(roi_data,\n thick=1, # select a single depth (btw white matter & pia)\n sampler='nearest', # no interpolation\n with_curvature=True,\n with_colorbar=True,\n )\n\nplt.show()"
+ "import cortex\nimport matplotlib.pyplot as plt\n\nsubject = \"S1\"\nxfm = \"fullhead\"\nroi = \"EBA\"\n\n# Get the map of which voxels are inside of our ROI\nroi_masks = cortex.utils.get_roi_masks(subject, xfm, \n roi_list=[roi],\n gm_sampler='cortical-conservative', # Select only voxels mostly within cortex\n split_lr=False, # No separate left/right ROIs\n threshold=None, # Leave roi mask values as probabilities / fractions\n return_dict=True\n )\n\n# Plot the mask for one ROI onto a flatmap\nroi_data = cortex.Volume(roi_masks[roi], subject, xfm, \n vmin=0, # This is a probability mask, so only\n vmax=1, # so scale btw zero and one\n cmap=\"inferno\", # For pretty\n )\n\ncortex.quickflat.make_figure(roi_data,\n thick=1, # select a single depth (btw white matter & pia)\n sampler='nearest', # no interpolation\n with_curvature=True,\n with_colorbar=True,\n )\n\nplt.show()"
]
}
],
@@ -46,7 +35,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.9.12"
+ "version": "3.9.19"
}
},
"nbformat": 4,
diff --git a/_downloads/3dca224e23400dbb33007465a7b359a6/plot_connected_vertices.ipynb b/_downloads/3dca224e23400dbb33007465a7b359a6/plot_connected_vertices.ipynb
index 14b5f117b..3f936201d 100644
--- a/_downloads/3dca224e23400dbb33007465a7b359a6/plot_connected_vertices.ipynb
+++ b/_downloads/3dca224e23400dbb33007465a7b359a6/plot_connected_vertices.ipynb
@@ -1,16 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "%matplotlib inline"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
@@ -46,7 +35,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.9.12"
+ "version": "3.9.19"
}
},
"nbformat": 4,
diff --git a/_downloads/42acd7dd246f3d3b2522140c0edb86ef/retinotopy_webgl.ipynb b/_downloads/42acd7dd246f3d3b2522140c0edb86ef/retinotopy_webgl.ipynb
index 850be8134..0f081b769 100644
--- a/_downloads/42acd7dd246f3d3b2522140c0edb86ef/retinotopy_webgl.ipynb
+++ b/_downloads/42acd7dd246f3d3b2522140c0edb86ef/retinotopy_webgl.ipynb
@@ -1,16 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "%matplotlib inline"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
@@ -26,7 +15,7 @@
},
"outputs": [],
"source": [
- "import cortex\ntry: # python 2\n from urllib import urlretrieve\nexcept ImportError: # python 3\n from urllib.request import urlretrieve\n\n\n# Download and load in retinotopy data\n_ = urlretrieve(\"http://gallantlab.org/pycortex/S1_retinotopy.hdf\",\n \"S1_retinotopy.hdf\")\nret_data = cortex.load(\"S1_retinotopy.hdf\")\n\n# Open the webviewer\ncortex.webshow(ret_data)"
+ "import cortex\nfrom urllib.request import urlretrieve\n\n\n# Download and load in retinotopy data\n_ = urlretrieve(\n \"https://s3.us-west-1.wasabisys.com/glab-public-datasets/S1_retinotopy.hdf\", \n \"S1_retinotopy.hdf\"\n)\nret_data = cortex.load(\"S1_retinotopy.hdf\")\n\n# Open the webviewer\ncortex.webshow(ret_data)"
]
}
],
@@ -46,7 +35,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.9.12"
+ "version": "3.9.19"
}
},
"nbformat": 4,
diff --git a/_downloads/52e9abd80b9e9d8a38724eb135ea1ab6/multi_panels_plots.ipynb b/_downloads/52e9abd80b9e9d8a38724eb135ea1ab6/multi_panels_plots.ipynb
index 1b2e138fe..62aa4c878 100644
--- a/_downloads/52e9abd80b9e9d8a38724eb135ea1ab6/multi_panels_plots.ipynb
+++ b/_downloads/52e9abd80b9e9d8a38724eb135ea1ab6/multi_panels_plots.ipynb
@@ -1,21 +1,10 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "%matplotlib inline"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
"source": [
- "\n# Multi-panels figures\n\nThe function `cortex.export.plot_panels` plots a number of 3d views of a given\nvolume, in the same matplotlib figure. It does that by saving a temporary image\nfor each view, and then aggregating them in the same figure.\n\nThe function needs to be run on a system with a display, since it will launch\na webgl viewer. The best way to get the expected results is to keep the webgl\nviewer visible during the process.\n\nThe selection of views and the aggregation is controled by a list of \"panels\".\nExamples of panels can be imported with:\n\n from cortex.export import params_flatmap_lateral_medial\n from cortex.export import params_occipital_triple_view\n"
+ "\n# Multi-panels figures\n\nThe function `cortex.export.plot_panels` plots a number of 3d views of a given\nvolume, in the same matplotlib figure. It does that by saving a temporary image\nfor each view, and then aggregating them in the same figure.\n\nThe function needs to be run on a system with a display, since it will launch\na webgl viewer. The best way to get the expected results is to keep the webgl\nviewer visible during the process.\n\nThe selection of views and the aggregation is controlled by a list of \"panels\".\nExamples of panels can be imported with:\n\n from cortex.export import params_flatmap_lateral_medial\n from cortex.export import params_occipital_triple_view\n"
]
},
{
@@ -118,7 +107,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.9.12"
+ "version": "3.9.19"
}
},
"nbformat": 4,
diff --git a/_downloads/55ff50ca9850387e3d30490b06ab18eb/multi_panels_plots.py b/_downloads/55ff50ca9850387e3d30490b06ab18eb/multi_panels_plots.py
index 037de9f7c..87a544b90 100644
--- a/_downloads/55ff50ca9850387e3d30490b06ab18eb/multi_panels_plots.py
+++ b/_downloads/55ff50ca9850387e3d30490b06ab18eb/multi_panels_plots.py
@@ -11,7 +11,7 @@
a webgl viewer. The best way to get the expected results is to keep the webgl
viewer visible during the process.
-The selection of views and the aggregation is controled by a list of "panels".
+The selection of views and the aggregation is controlled by a list of "panels".
Examples of panels can be imported with:
from cortex.export import params_flatmap_lateral_medial
diff --git a/_downloads/56b9122102ee96a8240f7eef88bfd884/plot_roi_voxel_mask.py b/_downloads/56b9122102ee96a8240f7eef88bfd884/plot_roi_voxel_mask.py
index 32cf50904..b0747167b 100644
--- a/_downloads/56b9122102ee96a8240f7eef88bfd884/plot_roi_voxel_mask.py
+++ b/_downloads/56b9122102ee96a8240f7eef88bfd884/plot_roi_voxel_mask.py
@@ -5,7 +5,7 @@
Get proportion of each voxel that exists within a named ROI (this
constitutes a probability map for the ROI, with values ranging from
-0-1). Plot this probablistic roi mask onto a flatmap.
+0-1). Plot this probabilistic roi mask onto a flatmap.
In order for this to work, the specified ROI must exist in the
overlays.svg file in the pycortex filestore for this subject.
@@ -23,7 +23,7 @@
roi_list=[roi],
gm_sampler='cortical-conservative', # Select only voxels mostly within cortex
split_lr=False, # No separate left/right ROIs
- threshold=None, # Leave roi mask values as probabilites / fractions
+ threshold=None, # Leave roi mask values as probabilities / fractions
return_dict=True
)
diff --git a/_downloads/598490201234cf5d8e4a3f5a8dc8c372/plot_retinotopy_flatmap.ipynb b/_downloads/598490201234cf5d8e4a3f5a8dc8c372/plot_retinotopy_flatmap.ipynb
index 519c5d89f..ee5ff2d3d 100644
--- a/_downloads/598490201234cf5d8e4a3f5a8dc8c372/plot_retinotopy_flatmap.ipynb
+++ b/_downloads/598490201234cf5d8e4a3f5a8dc8c372/plot_retinotopy_flatmap.ipynb
@@ -1,16 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "%matplotlib inline"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
@@ -26,7 +15,7 @@
},
"outputs": [],
"source": [
- "import six\nimport cortex\nimport matplotlib.pyplot as plt\nif six.PY2:\n from urllib import urlretrieve\nelif six.PY3:\n from urllib.request import urlretrieve\n\n\n# Download the dataset and load it\n_ = urlretrieve(\"http://gallantlab.org/pycortex/S1_retinotopy.hdf\",\n \"S1_retinotopy.hdf\")\nret_data = cortex.load(\"S1_retinotopy.hdf\")\n\n# The retinotopy data has to be divided into left and right hemispheres\nleft_data = ret_data.angle_left\ncortex.quickshow(left_data, with_curvature=True, \n\t\t\t\t\t\t\tcurvature_contrast=0.5, \n\t\t\t\t\t\t\tcurvature_brightness=0.5, \n\t\t\t\t\t\t\tcurvature_threshold=True)\nplt.show()\n\nright_data = ret_data.angle_right\ncortex.quickshow(right_data, with_curvature=True, \n\t\t\t\t\t\t\t curvature_contrast=0.5, \n\t\t\t\t\t\t\t curvature_brightness=0.5, \n\t\t\t\t\t\t\t curvature_threshold=True)\nplt.show()"
+ "import cortex\nimport matplotlib.pyplot as plt\nfrom urllib.request import urlretrieve\n\n\n# Download the dataset and load it\n_ = urlretrieve(\n \"https://s3.us-west-1.wasabisys.com/glab-public-datasets/S1_retinotopy.hdf\", \n \"S1_retinotopy.hdf\"\n)\nret_data = cortex.load(\"S1_retinotopy.hdf\")\n\n# The retinotopy data has to be divided into left and right hemispheres\nleft_data = ret_data.angle_left\ncortex.quickshow(left_data, with_curvature=True, \n\t\t\t\t\t\t\tcurvature_contrast=0.5, \n\t\t\t\t\t\t\tcurvature_brightness=0.5, \n\t\t\t\t\t\t\tcurvature_threshold=True)\nplt.show()\n\nright_data = ret_data.angle_right\ncortex.quickshow(right_data, with_curvature=True, \n\t\t\t\t\t\t\t curvature_contrast=0.5, \n\t\t\t\t\t\t\t curvature_brightness=0.5, \n\t\t\t\t\t\t\t curvature_threshold=True)\nplt.show()"
]
}
],
@@ -46,7 +35,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.9.12"
+ "version": "3.9.19"
}
},
"nbformat": 4,
diff --git a/_downloads/5bd5d1eeadaf8c455dfa68ed54b8beaf/plot_make_figure.ipynb b/_downloads/5bd5d1eeadaf8c455dfa68ed54b8beaf/plot_make_figure.ipynb
index ef2270e44..490bd83a6 100644
--- a/_downloads/5bd5d1eeadaf8c455dfa68ed54b8beaf/plot_make_figure.ipynb
+++ b/_downloads/5bd5d1eeadaf8c455dfa68ed54b8beaf/plot_make_figure.ipynb
@@ -1,16 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "%matplotlib inline"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
@@ -46,7 +35,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.9.12"
+ "version": "3.9.19"
}
},
"nbformat": 4,
diff --git a/_downloads/5df5411f55d86f3318a24a2deffe16fe/plot_retinotopy_flatmap.py b/_downloads/5df5411f55d86f3318a24a2deffe16fe/plot_retinotopy_flatmap.py
index 66145685f..332b642e7 100644
--- a/_downloads/5df5411f55d86f3318a24a2deffe16fe/plot_retinotopy_flatmap.py
+++ b/_downloads/5df5411f55d86f3318a24a2deffe16fe/plot_retinotopy_flatmap.py
@@ -9,24 +9,22 @@
command that is included.
-.. _dataset: http://gallantlab.org/pycortex/S1_retinotopy.hdf
+.. _dataset: https://s3.us-west-1.wasabisys.com/glab-public-datasets/S1_retinotopy.hdf
S1 is the example subject that comes with pycortex, but if you want to plot
data onto a different subject, you will need to have them in your filestore,
and you will also need a flatmap for them.
"""
-import six
import cortex
import matplotlib.pyplot as plt
-if six.PY2:
- from urllib import urlretrieve
-elif six.PY3:
- from urllib.request import urlretrieve
+from urllib.request import urlretrieve
# Download the dataset and load it
-_ = urlretrieve("http://gallantlab.org/pycortex/S1_retinotopy.hdf",
- "S1_retinotopy.hdf")
+_ = urlretrieve(
+ "https://s3.us-west-1.wasabisys.com/glab-public-datasets/S1_retinotopy.hdf",
+ "S1_retinotopy.hdf"
+)
ret_data = cortex.load("S1_retinotopy.hdf")
# The retinotopy data has to be divided into left and right hemispheres
diff --git a/_downloads/5ec582772cb6c68c66aedc35da2aec38/show_config.ipynb b/_downloads/5ec582772cb6c68c66aedc35da2aec38/show_config.ipynb
index 6c8e30a76..ace4a5462 100644
--- a/_downloads/5ec582772cb6c68c66aedc35da2aec38/show_config.ipynb
+++ b/_downloads/5ec582772cb6c68c66aedc35da2aec38/show_config.ipynb
@@ -1,16 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "%matplotlib inline"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
@@ -118,7 +107,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.9.12"
+ "version": "3.9.19"
}
},
"nbformat": 4,
diff --git a/_downloads/5eeaabd38fb75e278bcc70a86663c196/subject_to_mni.ipynb b/_downloads/5eeaabd38fb75e278bcc70a86663c196/subject_to_mni.ipynb
index 8dc3ba81f..d94c4c1a7 100644
--- a/_downloads/5eeaabd38fb75e278bcc70a86663c196/subject_to_mni.ipynb
+++ b/_downloads/5eeaabd38fb75e278bcc70a86663c196/subject_to_mni.ipynb
@@ -1,16 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "%matplotlib inline"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
@@ -26,7 +15,7 @@
},
"outputs": [],
"source": [
- "import cortex\n\n# First let's do this \"manually\", using cortex.mni\nfrom cortex import mni\n\nimport numpy as np\nnp.random.seed(1234)\n\n# This transform is gonna be from one specific functional space for a subject\n# which is defined by the transform (xfm)\ns1_to_mni = mni.compute_mni_transform(subject='S1', xfm='fullhead')\n# s1_to_mni is a 4x4 array describing the transformation in homogeneous corods\n\n# Transform data from subject to MNI space\n# first we will create a dataset to transform\ndata = cortex.Volume.random('S1', 'fullhead')\n\n# then transform it!\nmni_data = mni.transform_to_mni(data, s1_to_mni)\n# mni_data is a nibabel Nifti1Image\n\nmni_data_vol = mni_data.get_data() # the actual array, shape=(182,218,182)\n\n# That was the manual method. pycortex can also cache these transforms for you\n# if you get them using the pycortex database\ns1_to_mni_db = cortex.db.get_mnixfm('S1', 'fullhead')\n# this is the same as s1_to_mni, but will return instantly on subsequent calls"
+ "import cortex\n\n# First let's do this \"manually\", using cortex.mni\nfrom cortex import mni\n\nimport numpy as np\nnp.random.seed(1234)\n\n# This transform is gonna be from one specific functional space for a subject\n# which is defined by the transform (xfm)\ns1_to_mni = mni.compute_mni_transform(subject='S1', xfm='fullhead')\n# s1_to_mni is a 4x4 array describing the transformation in homogeneous corods\n\n# Transform data from subject to MNI space\n# first we will create a dataset to transform\ndata = cortex.Volume.random('S1', 'fullhead')\n\n# then transform it!\nmni_data = mni.transform_to_mni(data, s1_to_mni)\n# mni_data is a nibabel Nifti1Image\n\nmni_data_vol = mni_data.get_fdata() # the actual array, shape=(182,218,182)\n\n# That was the manual method. pycortex can also cache these transforms for you\n# if you get them using the pycortex database\ns1_to_mni_db = cortex.db.get_mnixfm('S1', 'fullhead')\n# this is the same as s1_to_mni, but will return instantly on subsequent calls"
]
}
],
@@ -46,7 +35,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.9.12"
+ "version": "3.9.19"
}
},
"nbformat": 4,
diff --git a/_downloads/5f330fb3660509634926cd0332d7cbc6/plot_vertex.ipynb b/_downloads/5f330fb3660509634926cd0332d7cbc6/plot_vertex.ipynb
index d3b932661..32817d081 100644
--- a/_downloads/5f330fb3660509634926cd0332d7cbc6/plot_vertex.ipynb
+++ b/_downloads/5f330fb3660509634926cd0332d7cbc6/plot_vertex.ipynb
@@ -1,16 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "%matplotlib inline"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
@@ -46,7 +35,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.9.12"
+ "version": "3.9.19"
}
},
"nbformat": 4,
diff --git a/_downloads/6346ec4b05719b3c67f1d74a7c94ee9d/plot_rois.ipynb b/_downloads/6346ec4b05719b3c67f1d74a7c94ee9d/plot_rois.ipynb
index 3b16fd994..bc5823014 100644
--- a/_downloads/6346ec4b05719b3c67f1d74a7c94ee9d/plot_rois.ipynb
+++ b/_downloads/6346ec4b05719b3c67f1d74a7c94ee9d/plot_rois.ipynb
@@ -1,16 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "%matplotlib inline"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
@@ -46,7 +35,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.9.12"
+ "version": "3.9.19"
}
},
"nbformat": 4,
diff --git a/_downloads/638db29a2498956b03e341d19f7c71df/plot_interpolate_data.ipynb b/_downloads/638db29a2498956b03e341d19f7c71df/plot_interpolate_data.ipynb
index 78a9403fd..ebe594139 100644
--- a/_downloads/638db29a2498956b03e341d19f7c71df/plot_interpolate_data.ipynb
+++ b/_downloads/638db29a2498956b03e341d19f7c71df/plot_interpolate_data.ipynb
@@ -1,16 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "%matplotlib inline"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
@@ -46,7 +35,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.9.12"
+ "version": "3.9.19"
}
},
"nbformat": 4,
diff --git a/_downloads/6c38c4cf54bcaedfa877a93db30d0119/plot_volume.ipynb b/_downloads/6c38c4cf54bcaedfa877a93db30d0119/plot_volume.ipynb
index 510ebcdb4..61a749afe 100644
--- a/_downloads/6c38c4cf54bcaedfa877a93db30d0119/plot_volume.ipynb
+++ b/_downloads/6c38c4cf54bcaedfa877a93db30d0119/plot_volume.ipynb
@@ -1,16 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "%matplotlib inline"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
@@ -46,7 +35,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.9.12"
+ "version": "3.9.19"
}
},
"nbformat": 4,
diff --git a/_downloads/6f1e7a639e0699d6164445b55e6c116d/auto_examples_jupyter.zip b/_downloads/6f1e7a639e0699d6164445b55e6c116d/auto_examples_jupyter.zip
index 014a5c405..f481ab9a6 100644
Binary files a/_downloads/6f1e7a639e0699d6164445b55e6c116d/auto_examples_jupyter.zip and b/_downloads/6f1e7a639e0699d6164445b55e6c116d/auto_examples_jupyter.zip differ
diff --git a/_downloads/712cc7a3913a50341baae9f6b000d797/plot_mosaic.ipynb b/_downloads/712cc7a3913a50341baae9f6b000d797/plot_mosaic.ipynb
index 0fccb125d..fe0b197ad 100644
--- a/_downloads/712cc7a3913a50341baae9f6b000d797/plot_mosaic.ipynb
+++ b/_downloads/712cc7a3913a50341baae9f6b000d797/plot_mosaic.ipynb
@@ -1,16 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "%matplotlib inline"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
@@ -26,7 +15,7 @@
},
"outputs": [],
"source": [
- "# sphinx_gallery_thumbnail_number = 3\n\nimport cortex\nimport matplotlib.pyplot as plt\n\n# load reference functional image for test purposes\nvolume_arr = cortex.db.get_xfm('S1', 'fullhead').reference.get_data().T\n# volume_arr is a (31,100,100) ndarray\n\n\n# with no args mosaic slices this volume in the first dimension\nplt.figure()\ncortex.mosaic(volume_arr)\n\n# slices along a different dimension can be plotted using the dim param\n# here coronal\nplt.figure()\ncortex.mosaic(volume_arr, dim=1)\n\n# kwargs are passed through to imshow\nplt.figure()\ncortex.mosaic(volume_arr, cmap=plt.cm.gray, vmin=0, vmax=1500)\n\n# mosaic always returns the mosaic image along with info about its shape\n# here show=False so we don't generate another plot\nmosaic_arr, (nwide, ntall) = cortex.mosaic(volume_arr, show=False)\n# mosaic_arr is 607 x 607, with nwide = 6, ntall = 6"
+ "# sphinx_gallery_thumbnail_number = 3\n\nimport cortex\nimport matplotlib.pyplot as plt\n\n# load reference functional image for test purposes\nvolume_arr = cortex.db.get_xfm('S1', 'fullhead').reference.get_fdata().T\n# volume_arr is a (31,100,100) ndarray\n\n\n# with no args mosaic slices this volume in the first dimension\nplt.figure()\ncortex.mosaic(volume_arr)\n\n# slices along a different dimension can be plotted using the dim param\n# here coronal\nplt.figure()\ncortex.mosaic(volume_arr, dim=1)\n\n# kwargs are passed through to imshow\nplt.figure()\ncortex.mosaic(volume_arr, cmap=plt.cm.gray, vmin=0, vmax=1500)\n\n# mosaic always returns the mosaic image along with info about its shape\n# here show=False so we don't generate another plot\nmosaic_arr, (nwide, ntall) = cortex.mosaic(volume_arr, show=False)\n# mosaic_arr is 607 x 607, with nwide = 6, ntall = 6"
]
}
],
@@ -46,7 +35,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.9.12"
+ "version": "3.9.19"
}
},
"nbformat": 4,
diff --git a/_downloads/71eedfa1e15eeae1d50e837a5d00378d/plot_tissots_indicatrix.ipynb b/_downloads/71eedfa1e15eeae1d50e837a5d00378d/plot_tissots_indicatrix.ipynb
index ecff2a48f..2a9fdf8e4 100644
--- a/_downloads/71eedfa1e15eeae1d50e837a5d00378d/plot_tissots_indicatrix.ipynb
+++ b/_downloads/71eedfa1e15eeae1d50e837a5d00378d/plot_tissots_indicatrix.ipynb
@@ -1,16 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "%matplotlib inline"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
@@ -46,7 +35,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.9.12"
+ "version": "3.9.19"
}
},
"nbformat": 4,
diff --git a/_downloads/754fde0603a864be35c06d16e10d60fc/plot_vertex2D.py b/_downloads/754fde0603a864be35c06d16e10d60fc/plot_vertex2D.py
index f5f75ca99..5eb602bab 100644
--- a/_downloads/754fde0603a864be35c06d16e10d60fc/plot_vertex2D.py
+++ b/_downloads/754fde0603a864be35c06d16e10d60fc/plot_vertex2D.py
@@ -9,7 +9,7 @@
The cortex.Vertex2D object is instantiated with two numpy arrays of the same
size as the total number of vertices in that subject's flatmap. Each pixel is
-colored according to both vlaues given for the nearest vertex in the flatmap.
+colored according to both values given for the nearest vertex in the flatmap.
Instead of random test data, you can replace these with any arrays that are
the length of the all the vertices in the subject.
diff --git a/_downloads/774806b543a93da25908653d426c15c1/plot_cutouts.ipynb b/_downloads/774806b543a93da25908653d426c15c1/plot_cutouts.ipynb
index 0a760c45c..0b588c145 100644
--- a/_downloads/774806b543a93da25908653d426c15c1/plot_cutouts.ipynb
+++ b/_downloads/774806b543a93da25908653d426c15c1/plot_cutouts.ipynb
@@ -1,16 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "%matplotlib inline"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
@@ -46,7 +35,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.9.12"
+ "version": "3.9.19"
}
},
"nbformat": 4,
diff --git a/_downloads/7e89eee46d99514db8efb119976e80f8/upsample_to_fsaverage.ipynb b/_downloads/7e89eee46d99514db8efb119976e80f8/upsample_to_fsaverage.ipynb
new file mode 100644
index 000000000..c232a5237
--- /dev/null
+++ b/_downloads/7e89eee46d99514db8efb119976e80f8/upsample_to_fsaverage.ipynb
@@ -0,0 +1,43 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "\n# Upsample data from a lower resolution fsaverage template to fsaverage for visualization\n\nThis example shows how data in a lower resolution fsaverage template \n(e.g., fsaverage5 or fsaverage6) can be upsampled to the high resolution fsaverage \ntemplate for visualization.\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "collapsed": false
+ },
+ "outputs": [],
+ "source": [
+ "import matplotlib.pyplot as plt\nimport numpy as np\n\nimport cortex\n\nsubject = \"fsaverage\"\n\n# First we check if the fsaverage template is already in the pycortex filestore. If not,\n# we download the template from the web and add it to the filestore.\nif subject not in cortex.db.subjects:\n cortex.download_subject(subject)\n\n# Next we create some data on fsaverage5. Each hemisphere has 10242 vertices.\nn_vertices_fsaverage5 = 10242\ndata_fs5 = np.arange(1, n_vertices_fsaverage5 + 1)\n# We concatenate the data to itself to create a vector of length 20484, corresponding to\n# the two hemispheres together.\ndata_fs5 = np.concatenate((data_fs5, data_fs5))\n# Finally, we upsample the data to fsaverage.\ndata_fs7 = cortex.freesurfer.upsample_to_fsaverage(data_fs5, \"fsaverage5\")\n\n# Now that the data is in the fsaverage template, we can visualize it in PyCortex as any\n# other vertex dataset.\nvtx = cortex.Vertex(data_fs7, subject, vmin=0, vmax=n_vertices_fsaverage5, cmap=\"turbo\")\ncortex.quickshow(vtx, with_curvature=False, with_colorbar=False)\nplt.show()"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.9.19"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 0
+}
\ No newline at end of file
diff --git a/_downloads/813e10fd805fb08e209798753ce45729/plot_volume_to_vertex.ipynb b/_downloads/813e10fd805fb08e209798753ce45729/plot_volume_to_vertex.ipynb
index 7b862fdeb..50c06113f 100644
--- a/_downloads/813e10fd805fb08e209798753ce45729/plot_volume_to_vertex.ipynb
+++ b/_downloads/813e10fd805fb08e209798753ce45729/plot_volume_to_vertex.ipynb
@@ -1,16 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "%matplotlib inline"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
@@ -46,7 +35,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.9.12"
+ "version": "3.9.19"
}
},
"nbformat": 4,
diff --git a/_downloads/85a1ab66519fd897a9f08ef615f4ac6b/plot_sulci.ipynb b/_downloads/85a1ab66519fd897a9f08ef615f4ac6b/plot_sulci.ipynb
index c427e69f8..89e2bdb42 100644
--- a/_downloads/85a1ab66519fd897a9f08ef615f4ac6b/plot_sulci.ipynb
+++ b/_downloads/85a1ab66519fd897a9f08ef615f4ac6b/plot_sulci.ipynb
@@ -1,16 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "%matplotlib inline"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
@@ -46,7 +35,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.9.12"
+ "version": "3.9.19"
}
},
"nbformat": 4,
diff --git a/_downloads/88045a5ca44795160381b3e40115ed85/plot_vertexRGB.ipynb b/_downloads/88045a5ca44795160381b3e40115ed85/plot_vertexRGB.ipynb
index 15959159f..14a9de97f 100644
--- a/_downloads/88045a5ca44795160381b3e40115ed85/plot_vertexRGB.ipynb
+++ b/_downloads/88045a5ca44795160381b3e40115ed85/plot_vertexRGB.ipynb
@@ -1,16 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "%matplotlib inline"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
@@ -46,7 +35,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.9.12"
+ "version": "3.9.19"
}
},
"nbformat": 4,
diff --git a/_downloads/8cf11f78d8c4186f5276b30e9ad94586/plot_vertex2D.ipynb b/_downloads/8cf11f78d8c4186f5276b30e9ad94586/plot_vertex2D.ipynb
index 9676ad756..24c0652cd 100644
--- a/_downloads/8cf11f78d8c4186f5276b30e9ad94586/plot_vertex2D.ipynb
+++ b/_downloads/8cf11f78d8c4186f5276b30e9ad94586/plot_vertex2D.ipynb
@@ -1,21 +1,10 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "%matplotlib inline"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
"source": [
- "\n# Plot 2D Vertex Data\n\nThis plots example 2D vertex data onto an example subject, S1, onto a flatmap\nusing quickflat. In order for this to run, you have to have a flatmap for this\nsubject in the pycortex filestore.\n\nThe cortex.Vertex2D object is instantiated with two numpy arrays of the same\nsize as the total number of vertices in that subject's flatmap. Each pixel is\ncolored according to both vlaues given for the nearest vertex in the flatmap.\n\nInstead of random test data, you can replace these with any arrays that are\nthe length of the all the vertices in the subject.\n"
+ "\n# Plot 2D Vertex Data\n\nThis plots example 2D vertex data onto an example subject, S1, onto a flatmap\nusing quickflat. In order for this to run, you have to have a flatmap for this\nsubject in the pycortex filestore.\n\nThe cortex.Vertex2D object is instantiated with two numpy arrays of the same\nsize as the total number of vertices in that subject's flatmap. Each pixel is\ncolored according to both values given for the nearest vertex in the flatmap.\n\nInstead of random test data, you can replace these with any arrays that are\nthe length of the all the vertices in the subject.\n"
]
},
{
@@ -46,7 +35,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.9.12"
+ "version": "3.9.19"
}
},
"nbformat": 4,
diff --git a/_downloads/93df78f3116af947a5f503a4a33c9ddd/plot_make_svg.ipynb b/_downloads/93df78f3116af947a5f503a4a33c9ddd/plot_make_svg.ipynb
index c540a6ab5..3729bb7db 100644
--- a/_downloads/93df78f3116af947a5f503a4a33c9ddd/plot_make_svg.ipynb
+++ b/_downloads/93df78f3116af947a5f503a4a33c9ddd/plot_make_svg.ipynb
@@ -1,16 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "%matplotlib inline"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
@@ -46,7 +35,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.9.12"
+ "version": "3.9.19"
}
},
"nbformat": 4,
diff --git a/_downloads/967bde64d496491cb07a443c9ede7fc9/plot_advanced_compositing.ipynb b/_downloads/967bde64d496491cb07a443c9ede7fc9/plot_advanced_compositing.ipynb
index 21006a193..ef0c9efd6 100644
--- a/_downloads/967bde64d496491cb07a443c9ede7fc9/plot_advanced_compositing.ipynb
+++ b/_downloads/967bde64d496491cb07a443c9ede7fc9/plot_advanced_compositing.ipynb
@@ -1,16 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "%matplotlib inline"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
@@ -46,7 +35,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.9.12"
+ "version": "3.9.19"
}
},
"nbformat": 4,
diff --git a/_downloads/a075cfaa8cd8b107253dda362b33178e/plot_flatmap_distortion.ipynb b/_downloads/a075cfaa8cd8b107253dda362b33178e/plot_flatmap_distortion.ipynb
index afb5e4cec..c18536e36 100644
--- a/_downloads/a075cfaa8cd8b107253dda362b33178e/plot_flatmap_distortion.ipynb
+++ b/_downloads/a075cfaa8cd8b107253dda362b33178e/plot_flatmap_distortion.ipynb
@@ -1,16 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "%matplotlib inline"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
@@ -46,7 +35,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.9.12"
+ "version": "3.9.19"
}
},
"nbformat": 4,
diff --git a/_downloads/adbb113238e9e6eed6db37c575b87085/upsample_to_fsaverage.py b/_downloads/adbb113238e9e6eed6db37c575b87085/upsample_to_fsaverage.py
new file mode 100644
index 000000000..36593725e
--- /dev/null
+++ b/_downloads/adbb113238e9e6eed6db37c575b87085/upsample_to_fsaverage.py
@@ -0,0 +1,36 @@
+"""
+===================
+Upsample data from a lower resolution fsaverage template to fsaverage for visualization
+===================
+
+This example shows how data in a lower resolution fsaverage template
+(e.g., fsaverage5 or fsaverage6) can be upsampled to the high resolution fsaverage
+template for visualization.
+"""
+
+import matplotlib.pyplot as plt
+import numpy as np
+
+import cortex
+
+subject = "fsaverage"
+
+# First we check if the fsaverage template is already in the pycortex filestore. If not,
+# we download the template from the web and add it to the filestore.
+if subject not in cortex.db.subjects:
+ cortex.download_subject(subject)
+
+# Next we create some data on fsaverage5. Each hemisphere has 10242 vertices.
+n_vertices_fsaverage5 = 10242
+data_fs5 = np.arange(1, n_vertices_fsaverage5 + 1)
+# We concatenate the data to itself to create a vector of length 20484, corresponding to
+# the two hemispheres together.
+data_fs5 = np.concatenate((data_fs5, data_fs5))
+# Finally, we upsample the data to fsaverage.
+data_fs7 = cortex.freesurfer.upsample_to_fsaverage(data_fs5, "fsaverage5")
+
+# Now that the data is in the fsaverage template, we can visualize it in PyCortex as any
+# other vertex dataset.
+vtx = cortex.Vertex(data_fs7, subject, vmin=0, vmax=n_vertices_fsaverage5, cmap="turbo")
+cortex.quickshow(vtx, with_curvature=False, with_colorbar=False)
+plt.show()
diff --git a/_downloads/b2aa18c33412ed6283b23512eb23c3b9/retinotopy_webgl.py b/_downloads/b2aa18c33412ed6283b23512eb23c3b9/retinotopy_webgl.py
index 6cbe32fa6..167a07c5f 100644
--- a/_downloads/b2aa18c33412ed6283b23512eb23c3b9/retinotopy_webgl.py
+++ b/_downloads/b2aa18c33412ed6283b23512eb23c3b9/retinotopy_webgl.py
@@ -8,7 +8,7 @@
dataset_, but that can also be done automatically through the `urllib`
command that is included.
-.. _dataset: http://gallantlab.org/pycortex/S1_retinotopy.hdf
+.. _dataset: https://s3.us-west-1.wasabisys.com/glab-public-datasets/S1_retinotopy.hdf
S1 is the example subject that comes with pycortex, but if you want to plot
data onto a different subject, you will need to have them in your filestore.
@@ -21,15 +21,14 @@
"""
import cortex
-try: # python 2
- from urllib import urlretrieve
-except ImportError: # python 3
- from urllib.request import urlretrieve
+from urllib.request import urlretrieve
# Download and load in retinotopy data
-_ = urlretrieve("http://gallantlab.org/pycortex/S1_retinotopy.hdf",
- "S1_retinotopy.hdf")
+_ = urlretrieve(
+ "https://s3.us-west-1.wasabisys.com/glab-public-datasets/S1_retinotopy.hdf",
+ "S1_retinotopy.hdf"
+)
ret_data = cortex.load("S1_retinotopy.hdf")
# Open the webviewer
diff --git a/_downloads/bcb26d7471e165be66ecc1ea4a79a747/import_fmriprep.ipynb b/_downloads/bcb26d7471e165be66ecc1ea4a79a747/import_fmriprep.ipynb
index 43cdc1819..d6d00c547 100644
--- a/_downloads/bcb26d7471e165be66ecc1ea4a79a747/import_fmriprep.ipynb
+++ b/_downloads/bcb26d7471e165be66ecc1ea4a79a747/import_fmriprep.ipynb
@@ -1,16 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "%matplotlib inline"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
@@ -46,7 +35,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.9.12"
+ "version": "3.9.19"
}
},
"nbformat": 4,
diff --git a/_downloads/c47834bb21df7cc049ba6afc8a9f9184/plot_dataset_arithmetic.ipynb b/_downloads/c47834bb21df7cc049ba6afc8a9f9184/plot_dataset_arithmetic.ipynb
index 8164e92e2..864d9df1c 100644
--- a/_downloads/c47834bb21df7cc049ba6afc8a9f9184/plot_dataset_arithmetic.ipynb
+++ b/_downloads/c47834bb21df7cc049ba6afc8a9f9184/plot_dataset_arithmetic.ipynb
@@ -1,16 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "%matplotlib inline"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
@@ -46,7 +35,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.9.12"
+ "version": "3.9.19"
}
},
"nbformat": 4,
diff --git a/_downloads/c4a1569322d832cb845e73fb64382cd3/plot_volume2D.ipynb b/_downloads/c4a1569322d832cb845e73fb64382cd3/plot_volume2D.ipynb
index e8c76002a..6f72544b2 100644
--- a/_downloads/c4a1569322d832cb845e73fb64382cd3/plot_volume2D.ipynb
+++ b/_downloads/c4a1569322d832cb845e73fb64382cd3/plot_volume2D.ipynb
@@ -1,16 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "%matplotlib inline"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
@@ -46,7 +35,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.9.12"
+ "version": "3.9.19"
}
},
"nbformat": 4,
diff --git a/_downloads/c74b122a2f122b3931ba518c8dce3b97/mni_to_subject.ipynb b/_downloads/c74b122a2f122b3931ba518c8dce3b97/mni_to_subject.ipynb
index 4a8443b5e..024a7a01b 100644
--- a/_downloads/c74b122a2f122b3931ba518c8dce3b97/mni_to_subject.ipynb
+++ b/_downloads/c74b122a2f122b3931ba518c8dce3b97/mni_to_subject.ipynb
@@ -1,16 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "%matplotlib inline"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
@@ -26,7 +15,7 @@
},
"outputs": [],
"source": [
- "import cortex\n\n# First let's do this \"manually\", using cortex.mni\nfrom cortex import mni\n\nimport numpy as np\nnp.random.seed(1234)\n\n\n# This transform is gonna be from one specific functional space for a subject\n# which is defined by the transform (xfm)\ns1_to_mni = mni.compute_mni_transform(subject='S1', xfm='fullhead')\n# s1_to_mni is a 4x4 array describing the transformation in homogeneous corods\n\n# Transform data from MNI to subject space\n# first we will create a dataset to transform\n# this uses the implicitly created \"identity\" transform, which is used for data\n# in the native anatomical space (i.e. same dims as the base anatomical image,\n# and in the same space as the surface)\ndata = cortex.Volume.random('MNI', 'identity')\n\n# then transform it into the space defined by the 'fullhead' transform for 'S1'\nsubject_data = mni.transform_mni_to_subject('S1', 'fullhead', \n data.data, s1_to_mni)\n# subject_data is a nibabel Nifti1Image\n\nsubject_data_vol = mni_data.get_data() # the actual array, shape=(100,100,31)\n\n# That was the manual method. pycortex can also cache these transforms for you\n# if you get them using the pycortex database\ns1_to_mni_db = cortex.db.get_mnixfm('S1', 'fullhead')\n# this is the same as s1_to_mni, but will return instantly on subsequent calls"
+ "import cortex\n\n# First let's do this \"manually\", using cortex.mni\nfrom cortex import mni\n\nimport numpy as np\nnp.random.seed(1234)\n\n\n# This transform is gonna be from one specific functional space for a subject\n# which is defined by the transform (xfm)\ns1_to_mni = mni.compute_mni_transform(subject='S1', xfm='fullhead')\n# s1_to_mni is a 4x4 array describing the transformation in homogeneous corods\n\n# Transform data from MNI to subject space\n# first we will create a dataset to transform\n# this uses the implicitly created \"identity\" transform, which is used for data\n# in the native anatomical space (i.e. same dims as the base anatomical image,\n# and in the same space as the surface)\ndata = cortex.Volume.random('MNI', 'identity')\n\n# then transform it into the space defined by the 'fullhead' transform for 'S1'\nsubject_data = mni.transform_mni_to_subject('S1', 'fullhead', \n data.data, s1_to_mni)\n# subject_data is a nibabel Nifti1Image\n\nsubject_data_vol = mni_data.get_fdata() # the actual array, shape=(100,100,31)\n\n# That was the manual method. pycortex can also cache these transforms for you\n# if you get them using the pycortex database\ns1_to_mni_db = cortex.db.get_mnixfm('S1', 'fullhead')\n# this is the same as s1_to_mni, but will return instantly on subsequent calls"
]
}
],
@@ -46,7 +35,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.9.12"
+ "version": "3.9.19"
}
},
"nbformat": 4,
diff --git a/_downloads/c83c63a97afe485326cd1fdab357fff8/plot_mosaic.py b/_downloads/c83c63a97afe485326cd1fdab357fff8/plot_mosaic.py
index 23c1e4ac2..2e8d363c6 100644
--- a/_downloads/c83c63a97afe485326cd1fdab357fff8/plot_mosaic.py
+++ b/_downloads/c83c63a97afe485326cd1fdab357fff8/plot_mosaic.py
@@ -14,7 +14,7 @@
import matplotlib.pyplot as plt
# load reference functional image for test purposes
-volume_arr = cortex.db.get_xfm('S1', 'fullhead').reference.get_data().T
+volume_arr = cortex.db.get_xfm('S1', 'fullhead').reference.get_fdata().T
# volume_arr is a (31,100,100) ndarray
diff --git a/_downloads/cc502342f659cf0c536754003ffae538/dynamic_with_custom_template.ipynb b/_downloads/cc502342f659cf0c536754003ffae538/dynamic_with_custom_template.ipynb
index b1d29b54e..034cbd9ab 100644
--- a/_downloads/cc502342f659cf0c536754003ffae538/dynamic_with_custom_template.ipynb
+++ b/_downloads/cc502342f659cf0c536754003ffae538/dynamic_with_custom_template.ipynb
@@ -1,16 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "%matplotlib inline"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
@@ -46,7 +35,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.9.12"
+ "version": "3.9.19"
}
},
"nbformat": 4,
diff --git a/_downloads/d9b4e151ce75aace67a450abc15967b6/plot_make_png.ipynb b/_downloads/d9b4e151ce75aace67a450abc15967b6/plot_make_png.ipynb
index 3b6b75134..f035b87d4 100644
--- a/_downloads/d9b4e151ce75aace67a450abc15967b6/plot_make_png.ipynb
+++ b/_downloads/d9b4e151ce75aace67a450abc15967b6/plot_make_png.ipynb
@@ -1,16 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "%matplotlib inline"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
@@ -46,7 +35,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.9.12"
+ "version": "3.9.19"
}
},
"nbformat": 4,
diff --git a/_downloads/dafaff1c5c813c5150e44a7314911857/plot_volumeRGB.ipynb b/_downloads/dafaff1c5c813c5150e44a7314911857/plot_volumeRGB.ipynb
index 86f856fb9..981179517 100644
--- a/_downloads/dafaff1c5c813c5150e44a7314911857/plot_volumeRGB.ipynb
+++ b/_downloads/dafaff1c5c813c5150e44a7314911857/plot_volumeRGB.ipynb
@@ -1,16 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "%matplotlib inline"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
@@ -46,7 +35,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.9.12"
+ "version": "3.9.19"
}
},
"nbformat": 4,
diff --git a/_downloads/e01e45ad3d31a19a9f9f8dff3847cf51/plot_zoom_to_roi.ipynb b/_downloads/e01e45ad3d31a19a9f9f8dff3847cf51/plot_zoom_to_roi.ipynb
index e61efbe07..a287cb7b7 100644
--- a/_downloads/e01e45ad3d31a19a9f9f8dff3847cf51/plot_zoom_to_roi.ipynb
+++ b/_downloads/e01e45ad3d31a19a9f9f8dff3847cf51/plot_zoom_to_roi.ipynb
@@ -1,16 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "%matplotlib inline"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
@@ -46,7 +35,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.9.12"
+ "version": "3.9.19"
}
},
"nbformat": 4,
diff --git a/_downloads/efc62904d6eba92714248fce62fca073/single_dataset.ipynb b/_downloads/efc62904d6eba92714248fce62fca073/single_dataset.ipynb
index 97df1aafe..28303a57b 100644
--- a/_downloads/efc62904d6eba92714248fce62fca073/single_dataset.ipynb
+++ b/_downloads/efc62904d6eba92714248fce62fca073/single_dataset.ipynb
@@ -1,16 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "%matplotlib inline"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
@@ -46,7 +35,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.9.12"
+ "version": "3.9.19"
}
},
"nbformat": 4,
diff --git a/_downloads/f23fbcea3c616b715df90ee0cbac9bdf/plot_geodesic_path.ipynb b/_downloads/f23fbcea3c616b715df90ee0cbac9bdf/plot_geodesic_path.ipynb
index c929d6152..e2991776f 100644
--- a/_downloads/f23fbcea3c616b715df90ee0cbac9bdf/plot_geodesic_path.ipynb
+++ b/_downloads/f23fbcea3c616b715df90ee0cbac9bdf/plot_geodesic_path.ipynb
@@ -1,16 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "%matplotlib inline"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
@@ -26,7 +15,7 @@
},
"outputs": [],
"source": [
- "import cortex\nimport cortex.polyutils\nimport numpy as np\nimport matplotlib.pyplot as plt\n\nsubject = \"S1\"\n\n# First we need to import the surfaces for this subject\nsurfs = [cortex.polyutils.Surface(*d)\n for d in cortex.db.get_surf(subject, \"fiducial\")]\nnuml = surfs[0].pts.shape[0]\n\n# Now we need to pick the start and end points of the line we will draw\npt_a = 100\npt_b = 50000\n\n# Then we find the geodesic path between these points\npath = surfs[0].geodesic_path(pt_a, pt_b)\n\n# In order to plot this on the cortical surface, we need an array that is the\n# same size as the number of vertices in the left hemisphere\npath_data = np.zeros(numl)\nfor v in path:\n path_data[v] = 1\n\n# And now plot these distances onto the cortical surface\npath_verts = cortex.Vertex(path_data, subject, cmap=\"Blues_r\")\ncortex.quickshow(path_verts, with_colorbar=False)\nplt.show()"
+ "import cortex\nimport cortex.polyutils\nimport numpy as np\nimport matplotlib.pyplot as plt\n\nsubject = \"S1\"\n\n# First we need to import the surfaces for this subject\nsurfs = [cortex.polyutils.Surface(*d)\n for d in cortex.db.get_surf(subject, \"fiducial\")]\nnuml = surfs[0].pts.shape[0]\nnumr = surfs[1].pts.shape[0]\nnum_vertices = numl + numr\n\n# Now we need to pick the start and end points of the line we will draw\npt_a = 100\npt_b = 50000\n\n# Then we find the geodesic path between these points\npath = surfs[0].geodesic_path(pt_a, pt_b)\n\n# In order to plot this on the cortical surface, we need an array that is the\n# same size as the number of vertices\npath_data = np.zeros(num_vertices) * np.nan\nfor v in path:\n path_data[v] = 1\n\n# And now plot these distances onto the cortical surface\npath_verts = cortex.Vertex(path_data, subject, cmap=\"Reds\", vmin=0, vmax=1)\ncortex.quickshow(path_verts, with_colorbar=False, with_curvature=True)\nplt.show()"
]
}
],
@@ -46,7 +35,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.9.12"
+ "version": "3.9.19"
}
},
"nbformat": 4,
diff --git a/_downloads/fc8b05be95bda3f3431b5b5752f2c0fd/plot_geodesic_distance.ipynb b/_downloads/fc8b05be95bda3f3431b5b5752f2c0fd/plot_geodesic_distance.ipynb
index 5ff5df1de..44843378e 100644
--- a/_downloads/fc8b05be95bda3f3431b5b5752f2c0fd/plot_geodesic_distance.ipynb
+++ b/_downloads/fc8b05be95bda3f3431b5b5752f2c0fd/plot_geodesic_distance.ipynb
@@ -1,16 +1,5 @@
{
"cells": [
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false
- },
- "outputs": [],
- "source": [
- "%matplotlib inline"
- ]
- },
{
"cell_type": "markdown",
"metadata": {},
@@ -46,7 +35,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.9.12"
+ "version": "3.9.19"
}
},
"nbformat": 4,
diff --git a/_images/sphx_glr_plot_advanced_compositing_001.png b/_images/sphx_glr_plot_advanced_compositing_001.png
index 535254666..12d7ac026 100644
Binary files a/_images/sphx_glr_plot_advanced_compositing_001.png and b/_images/sphx_glr_plot_advanced_compositing_001.png differ
diff --git a/_images/sphx_glr_plot_advanced_compositing_thumb.png b/_images/sphx_glr_plot_advanced_compositing_thumb.png
index 4c5c957df..8ec11c64e 100644
Binary files a/_images/sphx_glr_plot_advanced_compositing_thumb.png and b/_images/sphx_glr_plot_advanced_compositing_thumb.png differ
diff --git a/_images/sphx_glr_plot_connected_vertices_001.png b/_images/sphx_glr_plot_connected_vertices_001.png
index 6c8331860..51cd68b53 100644
Binary files a/_images/sphx_glr_plot_connected_vertices_001.png and b/_images/sphx_glr_plot_connected_vertices_001.png differ
diff --git a/_images/sphx_glr_plot_connected_vertices_thumb.png b/_images/sphx_glr_plot_connected_vertices_thumb.png
index c7c69171c..ab20ba10f 100644
Binary files a/_images/sphx_glr_plot_connected_vertices_thumb.png and b/_images/sphx_glr_plot_connected_vertices_thumb.png differ
diff --git a/_images/sphx_glr_plot_cutouts_001.png b/_images/sphx_glr_plot_cutouts_001.png
index 864afa261..40a9b8e06 100644
Binary files a/_images/sphx_glr_plot_cutouts_001.png and b/_images/sphx_glr_plot_cutouts_001.png differ
diff --git a/_images/sphx_glr_plot_cutouts_thumb.png b/_images/sphx_glr_plot_cutouts_thumb.png
index fd1eb39b6..c46dfec24 100644
Binary files a/_images/sphx_glr_plot_cutouts_thumb.png and b/_images/sphx_glr_plot_cutouts_thumb.png differ
diff --git a/_images/sphx_glr_plot_dataset_arithmetic_001.png b/_images/sphx_glr_plot_dataset_arithmetic_001.png
index 2da2d7ce3..9b621f6a9 100644
Binary files a/_images/sphx_glr_plot_dataset_arithmetic_001.png and b/_images/sphx_glr_plot_dataset_arithmetic_001.png differ
diff --git a/_images/sphx_glr_plot_dataset_arithmetic_002.png b/_images/sphx_glr_plot_dataset_arithmetic_002.png
index 27e9a971e..03c884d31 100644
Binary files a/_images/sphx_glr_plot_dataset_arithmetic_002.png and b/_images/sphx_glr_plot_dataset_arithmetic_002.png differ
diff --git a/_images/sphx_glr_plot_dataset_arithmetic_003.png b/_images/sphx_glr_plot_dataset_arithmetic_003.png
index 36135c076..d25867c37 100644
Binary files a/_images/sphx_glr_plot_dataset_arithmetic_003.png and b/_images/sphx_glr_plot_dataset_arithmetic_003.png differ
diff --git a/_images/sphx_glr_plot_dataset_arithmetic_thumb.png b/_images/sphx_glr_plot_dataset_arithmetic_thumb.png
index 5e9b2c845..f6b7e2d40 100644
Binary files a/_images/sphx_glr_plot_dataset_arithmetic_thumb.png and b/_images/sphx_glr_plot_dataset_arithmetic_thumb.png differ
diff --git a/_images/sphx_glr_plot_dropout_001.png b/_images/sphx_glr_plot_dropout_001.png
index 7d6e8e706..26ea0bd52 100644
Binary files a/_images/sphx_glr_plot_dropout_001.png and b/_images/sphx_glr_plot_dropout_001.png differ
diff --git a/_images/sphx_glr_plot_dropout_thumb.png b/_images/sphx_glr_plot_dropout_thumb.png
index f88e8a923..e0fdf75b5 100644
Binary files a/_images/sphx_glr_plot_dropout_thumb.png and b/_images/sphx_glr_plot_dropout_thumb.png differ
diff --git a/_images/sphx_glr_plot_flatmap_distortion_001.png b/_images/sphx_glr_plot_flatmap_distortion_001.png
index 1425542e0..2e52f1f8e 100644
Binary files a/_images/sphx_glr_plot_flatmap_distortion_001.png and b/_images/sphx_glr_plot_flatmap_distortion_001.png differ
diff --git a/_images/sphx_glr_plot_flatmap_distortion_002.png b/_images/sphx_glr_plot_flatmap_distortion_002.png
index fc3304945..7479c5cd4 100644
Binary files a/_images/sphx_glr_plot_flatmap_distortion_002.png and b/_images/sphx_glr_plot_flatmap_distortion_002.png differ
diff --git a/_images/sphx_glr_plot_geodesic_distance_001.png b/_images/sphx_glr_plot_geodesic_distance_001.png
index b2283dda2..d527fd4ef 100644
Binary files a/_images/sphx_glr_plot_geodesic_distance_001.png and b/_images/sphx_glr_plot_geodesic_distance_001.png differ
diff --git a/_images/sphx_glr_plot_geodesic_distance_002.png b/_images/sphx_glr_plot_geodesic_distance_002.png
index 58e6d3488..786aa245c 100644
Binary files a/_images/sphx_glr_plot_geodesic_distance_002.png and b/_images/sphx_glr_plot_geodesic_distance_002.png differ
diff --git a/_images/sphx_glr_plot_geodesic_distance_thumb.png b/_images/sphx_glr_plot_geodesic_distance_thumb.png
index 285bb9615..1cfbb9f08 100644
Binary files a/_images/sphx_glr_plot_geodesic_distance_thumb.png and b/_images/sphx_glr_plot_geodesic_distance_thumb.png differ
diff --git a/_images/sphx_glr_plot_geodesic_path_001.png b/_images/sphx_glr_plot_geodesic_path_001.png
index c963e8c73..89c8e8a77 100644
Binary files a/_images/sphx_glr_plot_geodesic_path_001.png and b/_images/sphx_glr_plot_geodesic_path_001.png differ
diff --git a/_images/sphx_glr_plot_geodesic_path_thumb.png b/_images/sphx_glr_plot_geodesic_path_thumb.png
index 8564e05ce..42767fc08 100644
Binary files a/_images/sphx_glr_plot_geodesic_path_thumb.png and b/_images/sphx_glr_plot_geodesic_path_thumb.png differ
diff --git a/_images/sphx_glr_plot_get_roi_vertices_001.png b/_images/sphx_glr_plot_get_roi_vertices_001.png
index 1d2ebe2f1..7cc44d362 100644
Binary files a/_images/sphx_glr_plot_get_roi_vertices_001.png and b/_images/sphx_glr_plot_get_roi_vertices_001.png differ
diff --git a/_images/sphx_glr_plot_get_roi_vertices_thumb.png b/_images/sphx_glr_plot_get_roi_vertices_thumb.png
index b3181de3e..ac1b8f49c 100644
Binary files a/_images/sphx_glr_plot_get_roi_vertices_thumb.png and b/_images/sphx_glr_plot_get_roi_vertices_thumb.png differ
diff --git a/_images/sphx_glr_plot_interpolate_data_001.png b/_images/sphx_glr_plot_interpolate_data_001.png
index bff8ac833..18e7de869 100644
Binary files a/_images/sphx_glr_plot_interpolate_data_001.png and b/_images/sphx_glr_plot_interpolate_data_001.png differ
diff --git a/_images/sphx_glr_plot_make_figure_001.png b/_images/sphx_glr_plot_make_figure_001.png
index 401f1a5a8..b2efaecfd 100644
Binary files a/_images/sphx_glr_plot_make_figure_001.png and b/_images/sphx_glr_plot_make_figure_001.png differ
diff --git a/_images/sphx_glr_plot_make_figure_002.png b/_images/sphx_glr_plot_make_figure_002.png
index 401f1a5a8..b2efaecfd 100644
Binary files a/_images/sphx_glr_plot_make_figure_002.png and b/_images/sphx_glr_plot_make_figure_002.png differ
diff --git a/_images/sphx_glr_plot_make_figure_003.png b/_images/sphx_glr_plot_make_figure_003.png
index b36766c37..7917d77d6 100644
Binary files a/_images/sphx_glr_plot_make_figure_003.png and b/_images/sphx_glr_plot_make_figure_003.png differ
diff --git a/_images/sphx_glr_plot_make_figure_004.png b/_images/sphx_glr_plot_make_figure_004.png
index 27fe9f986..2eaee3d1c 100644
Binary files a/_images/sphx_glr_plot_make_figure_004.png and b/_images/sphx_glr_plot_make_figure_004.png differ
diff --git a/_images/sphx_glr_plot_make_figure_005.png b/_images/sphx_glr_plot_make_figure_005.png
index 04129ab37..dffd8854b 100644
Binary files a/_images/sphx_glr_plot_make_figure_005.png and b/_images/sphx_glr_plot_make_figure_005.png differ
diff --git a/_images/sphx_glr_plot_make_figure_006.png b/_images/sphx_glr_plot_make_figure_006.png
index 7ac2b4e56..bd16805d1 100644
Binary files a/_images/sphx_glr_plot_make_figure_006.png and b/_images/sphx_glr_plot_make_figure_006.png differ
diff --git a/_images/sphx_glr_plot_make_figure_thumb.png b/_images/sphx_glr_plot_make_figure_thumb.png
index 69b0da65d..e1f079c47 100644
Binary files a/_images/sphx_glr_plot_make_figure_thumb.png and b/_images/sphx_glr_plot_make_figure_thumb.png differ
diff --git a/_images/sphx_glr_plot_make_gif_001.png b/_images/sphx_glr_plot_make_gif_001.png
index d34fa5f67..c857f2db8 100644
Binary files a/_images/sphx_glr_plot_make_gif_001.png and b/_images/sphx_glr_plot_make_gif_001.png differ
diff --git a/_images/sphx_glr_plot_make_gif_002.png b/_images/sphx_glr_plot_make_gif_002.png
index 72411f3a7..638330182 100644
Binary files a/_images/sphx_glr_plot_make_gif_002.png and b/_images/sphx_glr_plot_make_gif_002.png differ
diff --git a/_images/sphx_glr_plot_make_gif_thumb.png b/_images/sphx_glr_plot_make_gif_thumb.png
index cc159adbe..448bf90f8 100644
Binary files a/_images/sphx_glr_plot_make_gif_thumb.png and b/_images/sphx_glr_plot_make_gif_thumb.png differ
diff --git a/_images/sphx_glr_plot_make_png_001.png b/_images/sphx_glr_plot_make_png_001.png
index 19e192568..bcad42457 100644
Binary files a/_images/sphx_glr_plot_make_png_001.png and b/_images/sphx_glr_plot_make_png_001.png differ
diff --git a/_images/sphx_glr_plot_make_png_thumb.png b/_images/sphx_glr_plot_make_png_thumb.png
index f34e10a2c..9e3647222 100644
Binary files a/_images/sphx_glr_plot_make_png_thumb.png and b/_images/sphx_glr_plot_make_png_thumb.png differ
diff --git a/_images/sphx_glr_plot_make_svg_001.png b/_images/sphx_glr_plot_make_svg_001.png
index 19e192568..bcad42457 100644
Binary files a/_images/sphx_glr_plot_make_svg_001.png and b/_images/sphx_glr_plot_make_svg_001.png differ
diff --git a/_images/sphx_glr_plot_make_svg_thumb.png b/_images/sphx_glr_plot_make_svg_thumb.png
index f34e10a2c..9e3647222 100644
Binary files a/_images/sphx_glr_plot_make_svg_thumb.png and b/_images/sphx_glr_plot_make_svg_thumb.png differ
diff --git a/_images/sphx_glr_plot_mosaic_001.png b/_images/sphx_glr_plot_mosaic_001.png
index 0fd3b7f14..a7d29bb19 100644
Binary files a/_images/sphx_glr_plot_mosaic_001.png and b/_images/sphx_glr_plot_mosaic_001.png differ
diff --git a/_images/sphx_glr_plot_mosaic_002.png b/_images/sphx_glr_plot_mosaic_002.png
index 0851388bb..c07e01048 100644
Binary files a/_images/sphx_glr_plot_mosaic_002.png and b/_images/sphx_glr_plot_mosaic_002.png differ
diff --git a/_images/sphx_glr_plot_mosaic_003.png b/_images/sphx_glr_plot_mosaic_003.png
index 47cae3486..d2c03272e 100644
Binary files a/_images/sphx_glr_plot_mosaic_003.png and b/_images/sphx_glr_plot_mosaic_003.png differ
diff --git a/_images/sphx_glr_plot_retinotopy_flatmap_001.png b/_images/sphx_glr_plot_retinotopy_flatmap_001.png
index b0daaae2d..c74d38a1f 100644
Binary files a/_images/sphx_glr_plot_retinotopy_flatmap_001.png and b/_images/sphx_glr_plot_retinotopy_flatmap_001.png differ
diff --git a/_images/sphx_glr_plot_retinotopy_flatmap_002.png b/_images/sphx_glr_plot_retinotopy_flatmap_002.png
index 495a9666b..06af899cd 100644
Binary files a/_images/sphx_glr_plot_retinotopy_flatmap_002.png and b/_images/sphx_glr_plot_retinotopy_flatmap_002.png differ
diff --git a/_images/sphx_glr_plot_retinotopy_flatmap_thumb.png b/_images/sphx_glr_plot_retinotopy_flatmap_thumb.png
index 3b4a2cc82..e3c0fa18f 100644
Binary files a/_images/sphx_glr_plot_retinotopy_flatmap_thumb.png and b/_images/sphx_glr_plot_retinotopy_flatmap_thumb.png differ
diff --git a/_images/sphx_glr_plot_roi_voxel_index_volume_001.png b/_images/sphx_glr_plot_roi_voxel_index_volume_001.png
index 83c4ca699..761d12b37 100644
Binary files a/_images/sphx_glr_plot_roi_voxel_index_volume_001.png and b/_images/sphx_glr_plot_roi_voxel_index_volume_001.png differ
diff --git a/_images/sphx_glr_plot_roi_voxel_index_volume_thumb.png b/_images/sphx_glr_plot_roi_voxel_index_volume_thumb.png
index 84a8eb32f..e500e0072 100644
Binary files a/_images/sphx_glr_plot_roi_voxel_index_volume_thumb.png and b/_images/sphx_glr_plot_roi_voxel_index_volume_thumb.png differ
diff --git a/_images/sphx_glr_plot_roi_voxel_mask_001.png b/_images/sphx_glr_plot_roi_voxel_mask_001.png
index 183bc1432..f99abfa87 100644
Binary files a/_images/sphx_glr_plot_roi_voxel_mask_001.png and b/_images/sphx_glr_plot_roi_voxel_mask_001.png differ
diff --git a/_images/sphx_glr_plot_roi_voxel_mask_thumb.png b/_images/sphx_glr_plot_roi_voxel_mask_thumb.png
index ada211879..9d99e7ded 100644
Binary files a/_images/sphx_glr_plot_roi_voxel_mask_thumb.png and b/_images/sphx_glr_plot_roi_voxel_mask_thumb.png differ
diff --git a/_images/sphx_glr_plot_rois_001.png b/_images/sphx_glr_plot_rois_001.png
index 19e192568..bcad42457 100644
Binary files a/_images/sphx_glr_plot_rois_001.png and b/_images/sphx_glr_plot_rois_001.png differ
diff --git a/_images/sphx_glr_plot_rois_002.png b/_images/sphx_glr_plot_rois_002.png
index f266c64c4..193289382 100644
Binary files a/_images/sphx_glr_plot_rois_002.png and b/_images/sphx_glr_plot_rois_002.png differ
diff --git a/_images/sphx_glr_plot_rois_003.png b/_images/sphx_glr_plot_rois_003.png
index cfd2f3a0f..9f95285f7 100644
Binary files a/_images/sphx_glr_plot_rois_003.png and b/_images/sphx_glr_plot_rois_003.png differ
diff --git a/_images/sphx_glr_plot_rois_thumb.png b/_images/sphx_glr_plot_rois_thumb.png
index f34e10a2c..9e3647222 100644
Binary files a/_images/sphx_glr_plot_rois_thumb.png and b/_images/sphx_glr_plot_rois_thumb.png differ
diff --git a/_images/sphx_glr_plot_subsurfaces_001.png b/_images/sphx_glr_plot_subsurfaces_001.png
index 223e71f63..9f770d6ef 100644
Binary files a/_images/sphx_glr_plot_subsurfaces_001.png and b/_images/sphx_glr_plot_subsurfaces_001.png differ
diff --git a/_images/sphx_glr_plot_subsurfaces_002.png b/_images/sphx_glr_plot_subsurfaces_002.png
index 9dfbd7cab..bd0accd4c 100644
Binary files a/_images/sphx_glr_plot_subsurfaces_002.png and b/_images/sphx_glr_plot_subsurfaces_002.png differ
diff --git a/_images/sphx_glr_plot_subsurfaces_003.png b/_images/sphx_glr_plot_subsurfaces_003.png
index 09fe20fdd..a84d1936b 100644
Binary files a/_images/sphx_glr_plot_subsurfaces_003.png and b/_images/sphx_glr_plot_subsurfaces_003.png differ
diff --git a/_images/sphx_glr_plot_subsurfaces_004.png b/_images/sphx_glr_plot_subsurfaces_004.png
index a3c7d637b..22908c02c 100644
Binary files a/_images/sphx_glr_plot_subsurfaces_004.png and b/_images/sphx_glr_plot_subsurfaces_004.png differ
diff --git a/_images/sphx_glr_plot_subsurfaces_005.png b/_images/sphx_glr_plot_subsurfaces_005.png
index 824753cb6..8944afd75 100644
Binary files a/_images/sphx_glr_plot_subsurfaces_005.png and b/_images/sphx_glr_plot_subsurfaces_005.png differ
diff --git a/_images/sphx_glr_plot_subsurfaces_thumb.png b/_images/sphx_glr_plot_subsurfaces_thumb.png
index 86ba92c8c..9a45ea642 100644
Binary files a/_images/sphx_glr_plot_subsurfaces_thumb.png and b/_images/sphx_glr_plot_subsurfaces_thumb.png differ
diff --git a/_images/sphx_glr_plot_sulci_001.png b/_images/sphx_glr_plot_sulci_001.png
index 255d11c87..aee55d08d 100644
Binary files a/_images/sphx_glr_plot_sulci_001.png and b/_images/sphx_glr_plot_sulci_001.png differ
diff --git a/_images/sphx_glr_plot_sulci_thumb.png b/_images/sphx_glr_plot_sulci_thumb.png
index e6ca4ed8a..4873564ab 100644
Binary files a/_images/sphx_glr_plot_sulci_thumb.png and b/_images/sphx_glr_plot_sulci_thumb.png differ
diff --git a/_images/sphx_glr_plot_thickness_nanmean_001.png b/_images/sphx_glr_plot_thickness_nanmean_001.png
index c33b24408..3219b6569 100644
Binary files a/_images/sphx_glr_plot_thickness_nanmean_001.png and b/_images/sphx_glr_plot_thickness_nanmean_001.png differ
diff --git a/_images/sphx_glr_plot_thickness_nanmean_002.png b/_images/sphx_glr_plot_thickness_nanmean_002.png
index 2e7029878..3ad94817d 100644
Binary files a/_images/sphx_glr_plot_thickness_nanmean_002.png and b/_images/sphx_glr_plot_thickness_nanmean_002.png differ
diff --git a/_images/sphx_glr_plot_thickness_nanmean_thumb.png b/_images/sphx_glr_plot_thickness_nanmean_thumb.png
index 68db30609..131def965 100644
Binary files a/_images/sphx_glr_plot_thickness_nanmean_thumb.png and b/_images/sphx_glr_plot_thickness_nanmean_thumb.png differ
diff --git a/_images/sphx_glr_plot_tissots_indicatrix_001.png b/_images/sphx_glr_plot_tissots_indicatrix_001.png
index b29c17369..823ba152d 100644
Binary files a/_images/sphx_glr_plot_tissots_indicatrix_001.png and b/_images/sphx_glr_plot_tissots_indicatrix_001.png differ
diff --git a/_images/sphx_glr_plot_vertex2D_001.png b/_images/sphx_glr_plot_vertex2D_001.png
index 9553170f3..1cc3ba436 100644
Binary files a/_images/sphx_glr_plot_vertex2D_001.png and b/_images/sphx_glr_plot_vertex2D_001.png differ
diff --git a/_images/sphx_glr_plot_vertex2D_thumb.png b/_images/sphx_glr_plot_vertex2D_thumb.png
index 3530c2b98..eb3efa5e4 100644
Binary files a/_images/sphx_glr_plot_vertex2D_thumb.png and b/_images/sphx_glr_plot_vertex2D_thumb.png differ
diff --git a/_images/sphx_glr_plot_vertexRGB_001.png b/_images/sphx_glr_plot_vertexRGB_001.png
index 9435e4e59..aa7a143dd 100644
Binary files a/_images/sphx_glr_plot_vertexRGB_001.png and b/_images/sphx_glr_plot_vertexRGB_001.png differ
diff --git a/_images/sphx_glr_plot_vertexRGB_thumb.png b/_images/sphx_glr_plot_vertexRGB_thumb.png
index def70e131..9448e7bdd 100644
Binary files a/_images/sphx_glr_plot_vertexRGB_thumb.png and b/_images/sphx_glr_plot_vertexRGB_thumb.png differ
diff --git a/_images/sphx_glr_plot_vertex_001.png b/_images/sphx_glr_plot_vertex_001.png
index 82a6587c8..adf3efbc9 100644
Binary files a/_images/sphx_glr_plot_vertex_001.png and b/_images/sphx_glr_plot_vertex_001.png differ
diff --git a/_images/sphx_glr_plot_vertex_002.png b/_images/sphx_glr_plot_vertex_002.png
index 7844c8651..6afebcca5 100644
Binary files a/_images/sphx_glr_plot_vertex_002.png and b/_images/sphx_glr_plot_vertex_002.png differ
diff --git a/_images/sphx_glr_plot_vertex_thumb.png b/_images/sphx_glr_plot_vertex_thumb.png
index b9820f54a..3d0e7482a 100644
Binary files a/_images/sphx_glr_plot_vertex_thumb.png and b/_images/sphx_glr_plot_vertex_thumb.png differ
diff --git a/_images/sphx_glr_plot_volume2D_001.png b/_images/sphx_glr_plot_volume2D_001.png
index ad00989d8..52fa029c4 100644
Binary files a/_images/sphx_glr_plot_volume2D_001.png and b/_images/sphx_glr_plot_volume2D_001.png differ
diff --git a/_images/sphx_glr_plot_volume2D_002.png b/_images/sphx_glr_plot_volume2D_002.png
index 2f49fb659..064c647e9 100644
Binary files a/_images/sphx_glr_plot_volume2D_002.png and b/_images/sphx_glr_plot_volume2D_002.png differ
diff --git a/_images/sphx_glr_plot_volume2D_003.png b/_images/sphx_glr_plot_volume2D_003.png
index c33fbb3ca..c37e393fe 100644
Binary files a/_images/sphx_glr_plot_volume2D_003.png and b/_images/sphx_glr_plot_volume2D_003.png differ
diff --git a/_images/sphx_glr_plot_volume2D_thumb.png b/_images/sphx_glr_plot_volume2D_thumb.png
index 32e5c5c06..e59f71142 100644
Binary files a/_images/sphx_glr_plot_volume2D_thumb.png and b/_images/sphx_glr_plot_volume2D_thumb.png differ
diff --git a/_images/sphx_glr_plot_volumeRGB_001.png b/_images/sphx_glr_plot_volumeRGB_001.png
index 2ebca3b3e..fb53a9517 100644
Binary files a/_images/sphx_glr_plot_volumeRGB_001.png and b/_images/sphx_glr_plot_volumeRGB_001.png differ
diff --git a/_images/sphx_glr_plot_volumeRGB_002.png b/_images/sphx_glr_plot_volumeRGB_002.png
index 3fa707ee6..c0bf0311d 100644
Binary files a/_images/sphx_glr_plot_volumeRGB_002.png and b/_images/sphx_glr_plot_volumeRGB_002.png differ
diff --git a/_images/sphx_glr_plot_volumeRGB_003.png b/_images/sphx_glr_plot_volumeRGB_003.png
index 5dea7da79..37f02fce1 100644
Binary files a/_images/sphx_glr_plot_volumeRGB_003.png and b/_images/sphx_glr_plot_volumeRGB_003.png differ
diff --git a/_images/sphx_glr_plot_volumeRGB_thumb.png b/_images/sphx_glr_plot_volumeRGB_thumb.png
index cdf5b0bde..e35b6cba4 100644
Binary files a/_images/sphx_glr_plot_volumeRGB_thumb.png and b/_images/sphx_glr_plot_volumeRGB_thumb.png differ
diff --git a/_images/sphx_glr_plot_volume_001.png b/_images/sphx_glr_plot_volume_001.png
index 19e192568..bcad42457 100644
Binary files a/_images/sphx_glr_plot_volume_001.png and b/_images/sphx_glr_plot_volume_001.png differ
diff --git a/_images/sphx_glr_plot_volume_002.png b/_images/sphx_glr_plot_volume_002.png
index 4258e736f..f1cefa9e9 100644
Binary files a/_images/sphx_glr_plot_volume_002.png and b/_images/sphx_glr_plot_volume_002.png differ
diff --git a/_images/sphx_glr_plot_volume_003.png b/_images/sphx_glr_plot_volume_003.png
index 7a5be9d73..ef5fe2091 100644
Binary files a/_images/sphx_glr_plot_volume_003.png and b/_images/sphx_glr_plot_volume_003.png differ
diff --git a/_images/sphx_glr_plot_volume_thumb.png b/_images/sphx_glr_plot_volume_thumb.png
index f34e10a2c..9e3647222 100644
Binary files a/_images/sphx_glr_plot_volume_thumb.png and b/_images/sphx_glr_plot_volume_thumb.png differ
diff --git a/_images/sphx_glr_plot_volume_to_vertex_001.png b/_images/sphx_glr_plot_volume_to_vertex_001.png
index 19e192568..bcad42457 100644
Binary files a/_images/sphx_glr_plot_volume_to_vertex_001.png and b/_images/sphx_glr_plot_volume_to_vertex_001.png differ
diff --git a/_images/sphx_glr_plot_volume_to_vertex_002.png b/_images/sphx_glr_plot_volume_to_vertex_002.png
index e74641183..7ef04d094 100644
Binary files a/_images/sphx_glr_plot_volume_to_vertex_002.png and b/_images/sphx_glr_plot_volume_to_vertex_002.png differ
diff --git a/_images/sphx_glr_plot_volume_to_vertex_thumb.png b/_images/sphx_glr_plot_volume_to_vertex_thumb.png
index f34e10a2c..9e3647222 100644
Binary files a/_images/sphx_glr_plot_volume_to_vertex_thumb.png and b/_images/sphx_glr_plot_volume_to_vertex_thumb.png differ
diff --git a/_images/sphx_glr_plot_voxel_distance_from_surface_001.png b/_images/sphx_glr_plot_voxel_distance_from_surface_001.png
index ef06e6bd3..1cceb73d1 100644
Binary files a/_images/sphx_glr_plot_voxel_distance_from_surface_001.png and b/_images/sphx_glr_plot_voxel_distance_from_surface_001.png differ
diff --git a/_images/sphx_glr_plot_zoom_to_roi_001.png b/_images/sphx_glr_plot_zoom_to_roi_001.png
index d9d680ef2..0f38a1449 100644
Binary files a/_images/sphx_glr_plot_zoom_to_roi_001.png and b/_images/sphx_glr_plot_zoom_to_roi_001.png differ
diff --git a/_images/sphx_glr_plot_zoom_to_roi_002.png b/_images/sphx_glr_plot_zoom_to_roi_002.png
index 6315e071b..1b9749d6a 100644
Binary files a/_images/sphx_glr_plot_zoom_to_roi_002.png and b/_images/sphx_glr_plot_zoom_to_roi_002.png differ
diff --git a/_images/sphx_glr_plot_zoom_to_roi_thumb.png b/_images/sphx_glr_plot_zoom_to_roi_thumb.png
index 463b37f81..13e54ed86 100644
Binary files a/_images/sphx_glr_plot_zoom_to_roi_thumb.png and b/_images/sphx_glr_plot_zoom_to_roi_thumb.png differ
diff --git a/_images/sphx_glr_upsample_to_fsaverage_thumb.png b/_images/sphx_glr_upsample_to_fsaverage_thumb.png
new file mode 100644
index 000000000..8a5fed589
Binary files /dev/null and b/_images/sphx_glr_upsample_to_fsaverage_thumb.png differ
diff --git a/_modules/cortex/align.html b/_modules/cortex/align.html
index feef1b67d..cb2c49c30 100644
--- a/_modules/cortex/align.html
+++ b/_modules/cortex/align.html
@@ -1,28 +1,27 @@
-
-
+
"""Contains functions for making alignments between functional data and the surface, or, finding where the brain is."""importos
-importnumpyasnp
-frombuiltinsimportinput
+importshutil
+importsubprocessassp
+importtempfileimportwarnings
+frombuiltinsimportinput
+
+importnumpyasnp
+
+from.databaseimportdb
+from.optionsimportconfig
+from.xfmimportTransform
+
defmayavi_manual(subject,xfmname,reference=None,**kwargs):
- """Open GUI for manually aligning a functional volume to the cortical surface for `subject`. This
+"""Open GUI for manually aligning a functional volume to the cortical surface for `subject`. This creates a new transform called `xfm`. The name of a nibabel-readable file (e.g. nii) should be supplied as `reference`. This image will be copied into the database.
@@ -129,16 +137,18 @@
Source code for cortex.align
deffs_manual(subject,xfmname,**kwargs):
- """Legacy name for cortex.align.manual. Please use that function, and see the help there."""
+"""Legacy name for cortex.align.manual. Please use that function, and see the help there."""warnings.warn(("Deprecated name - function has been renamed cortex.align.manual""This function name will be removed in a future release."),DeprecationWarning)returnmanual(subject,xfmname,**kwargs)
-
+[docs]
+defmanual(subject,xfmname,output_name="register.lta",wm_color="yellow",pial_color="blue",wm_surface='white',noclean=False,reference=None,inspect_only=False):
- """Open Freesurfer FreeView GUI for manually aligning/adjusting a functional
+"""Open Freesurfer FreeView GUI for manually aligning/adjusting a functional volume to the cortical surface for `subject`. This creates a new transform called `xfmname`. The name of a nibabel-readable file (e.g. NIfTI) should be supplied as `reference`. This image will be copied into the database.
@@ -192,11 +202,12 @@
[docs]defautomatic(subject,xfmname,reference,noclean=False,bbrtype="signed",
- pre_flirt_args='',use_fs_bbr=True,epi_mask=False,intermediate=None):
- """Create an automatic alignment using the FLIRT boundary-based alignment (BBR) from FSL.
- If `noclean`, intermediate files will not be removed from /tmp. The `reference` image and resulting
- transform called `xfmname` will be automatically stored in the database.
+defautomatic_fsl(
+ subject,xfmname,reference,noclean=False,bbrtype="signed",pre_flirt_args=""
+):
+"""Perform automatic alignment using the FLIRT boundary-based alignment (BBR) from
+ FSL. The `reference` image and resulting transform called `xfmname` will be
+ automatically stored in the database.
- It's good practice to open up this transform afterward in the manual aligner and check how it worked.
- Do that using the following (with the same `subject` and `xfmname` used here, no need for `reference`):
- > align.manual(subject, xfmname)
+ If `noclean` is set to True, intermediate files will not be removed from /tmp.
- If automatic alignment gives you a very bad answer, you can try giving the pre-BBR FLIRT
- some hints by passing '-usesqform' in as `pre_flirt_args`.
+ It's good practice to open up this transform afterward in the manual aligner and
+ check how it worked. Do that using the following code (passing the same `subject`
+ and `xfmname` used here, no need for `reference`):
+
+ > cortex.align.manual(subject, xfmname)
+
+ If automatic alignment with FSL fails, you can try giving the pre-BBR FLIRT
+ some hints by passing '-usesqform' in as `pre_flirt_args`. Alternatively, you can
+ use `cortex.align.automatic` to use Freesurfer's bbregister, which tends to work
+ better than FSL. Parameters ---------- subject : str Subject identifier. xfmname : str
- String identifying the transform to be created.
+ Name of the transform to be created and stored in the database. reference : str
- Path to a nibabel-readable image that will be used as the reference for this transform.
- Usually, this is a single (3D) functional data volume.
+ Path to a nibabel-readable image that will be used as the reference for
+ this transform. Usually, this is a single (3D) functional data volume. noclean : bool, optional
- If True intermediate files will not be removed from /tmp (this is useful for debugging things),
- and the returned value will be the name of the temp directory. Default False.
+ If True, intermediate files will not be removed from /tmp (this is useful
+ for debugging things), and the returned value will be the name of the
+ temp directory. Default False. bbrtype : str, optional The 'bbrtype' argument that is passed to FLIRT. pre_flirt_args : str, optional Additional arguments that are passed to the FLIRT pre-alignment step (not BBR).
- use_fs_bbr : bool, optional
- If True will use freesurfer bbregister instead of FSL BBR. (default, True)
- epi_mask : bool, optional
- If True, and use_fs_bbr is True, then the flag --epi-mask is passed to bbregister
- to mask out areas with spatial distortions. This setting is to be used whenever
- the reference was not distortion corrected.
- intermediate : str
- Path to a nibabel-readable image that will be used as intermediate for the alignment.
- Usually, this is a single (3D) functional data volume. Returns -------
- Nothing unless `noclean` is True.
+ None or path to temp directory if `noclean` is True. """
- warnings.warn("Defaults changed in pycortex 1.3. Now automatic alignment "
- "uses Freesurfer's bbregister and mri_coreg for "
- "initialization.")
- importshlex
- importshutil
- importtempfile
- importsubprocessassp
-
- from.databaseimportdb
- from.xfmimportTransform
- from.optionsimportconfig
-
- fsl_prefix=config.get("basic","fsl_prefix")
- schfile=os.path.join(os.path.split(os.path.abspath(__file__))[0],"bbr.sch")
-
retval=Nonetry:cache=tempfile.mkdtemp()absreference=os.path.abspath(reference)
+ fsl_prefix=config.get("basic","fsl_prefix")
+ schfile=os.path.join(os.path.split(os.path.abspath(__file__))[0],"bbr.sch")
+ raw=db.get_anat(subject,type="raw").get_filename()
+ bet=db.get_anat(subject,type="brainmask").get_filename()
+ wmseg=db.get_anat(subject,type="whitematter").get_filename()
+ # Compute anatomical-to-epi transform
+ print("FLIRT pre-alignment")
+ cmd=(
+ "{fslpre}flirt -in {epi} -ref {bet} -dof 6 {pre_flirt_args} "
+ "-omat {cache}/init.mat"
+ )
+ cmd=cmd.format(
+ fslpre=fsl_prefix,
+ cache=cache,
+ epi=absreference,
+ bet=bet,
+ pre_flirt_args=pre_flirt_args,
+ )
+ ifsp.call(cmd,shell=True)!=0:
+ raiseIOError("Error calling initial FLIRT")
+
+ print("Running BBR")
+ # Run epi-to-anat transform (this is more stable than anat-to-epi in FSL!)
+ cmd=(
+ "{fslpre}flirt -in {epi} -ref {raw} -dof 6 -cost bbr -wmseg {wmseg} "
+ "-init {cache}/init.mat -omat {cache}/out.mat -schedule {schfile} "
+ "-bbrtype {bbrtype}"
+ )
+ cmd=cmd.format(
+ fslpre=fsl_prefix,
+ cache=cache,
+ raw=bet,
+ wmseg=wmseg,
+ epi=absreference,
+ schfile=schfile,
+ bbrtype=bbrtype,
+ )
+ ifsp.call(cmd,shell=True)!=0:
+ raiseIOError("Error calling BBR flirt")
- ifuse_fs_bbr:
- print('Running freesurfer BBR')
- cmd='bbregister --s {sub} --mov {absref} --init-coreg --reg {cache}/register.dat --t2'
- ifepi_mask:
- cmd+=' --epi-mask'
- ifintermediateisnotNone:
- cmd+=f' --int {intermediate}'
- cmd=cmd.format(sub=subject,absref=absreference,cache=cache)
-
- ifsp.call(cmd,shell=True)!=0:
- raiseIOError('Error calling freesurfer BBR!')
+ x=np.loadtxt(os.path.join(cache,"out.mat"))
+ # Pass transform as FROM epi TO anat; transform will be inverted
+ # back to anat-to-epi, standard direction for pycortex internal
+ # storage by from_fsl
+ xfm=Transform.from_fsl(x,absreference,raw)
+ xfm.save(subject,xfmname,"coord")
+ print("Success")
- xfm=Transform.from_freesurfer(os.path.join(cache,"register.dat"),absreference,subject)
+ finally:
+ ifnotnoclean:
+ shutil.rmtree(cache)else:
- raw=db.get_anat(subject,type='raw').get_filename()
- bet=db.get_anat(subject,type='brainmask').get_filename()
- wmseg=db.get_anat(subject,type='whitematter').get_filename()
- #Compute anatomical-to-epi transform
- print('FLIRT pre-alignment')
- cmd='{fslpre}flirt -in {epi} -ref {bet} -dof 6 {pre_flirt_args} -omat {cache}/init.mat'.format(
- fslpre=fsl_prefix,cache=cache,epi=absreference,bet=bet,pre_flirt_args=pre_flirt_args)
- ifsp.call(cmd,shell=True)!=0:
- raiseIOError('Error calling initial FLIRT')
-
-
- print('Running BBR')
- # Run epi-to-anat transform (this is more stable than anat-to-epi in FSL!)
- cmd='{fslpre}flirt -in {epi} -ref {raw} -dof 6 -cost bbr -wmseg {wmseg} -init {cache}/init.mat -omat {cache}/out.mat -schedule {schfile} -bbrtype {bbrtype}'
- cmd=cmd.format(fslpre=fsl_prefix,cache=cache,raw=bet,wmseg=wmseg,epi=absreference,schfile=schfile,bbrtype=bbrtype)
- ifsp.call(cmd,shell=True)!=0:
- raiseIOError('Error calling BBR flirt')
-
- x=np.loadtxt(os.path.join(cache,"out.mat"))
- # Pass transform as FROM epi TO anat; transform will be inverted
- # back to anat-to-epi, standard direction for pycortex internal
- # storage by from_fsl
- xfm=Transform.from_fsl(x,absreference,raw)
+ retval=cache
+ returnretval
- # Save as pycortex 'coord' transform
- xfm.save(subject,xfmname,'coord')
- print('Success')
+
+[docs]
+defautomatic(
+ subject,
+ xfmname,
+ reference,
+ init="coreg",
+ epi_mask=False,
+ intermediate=None,
+ reference_contrast="t2",
+ noclean=False,
+):
+"""Perform automatic alignment using Freesurfer's boundary-based registration.
+ The `reference` image and resulting transform called `xfmname` will be automatically
+ stored in the database.
+
+ If `noclean` is set to True, intermediate files will not be removed from /tmp.
+
+ It's good practice to open up this transform afterward in the manual aligner and
+ check how it worked. Do that using the following code (passing the same `subject`
+ and `xfmname` used here, no need for `reference`):
+
+ > cortex.align.manual(subject, xfmname)
+
+ Parameters
+ ----------
+ subject : str
+ Subject identifier.
+ xfmname : str
+ Name of the transform to be created and stored in the database.
+ reference : str
+ Path to a nibabel-readable image that will be used as the reference for
+ this transform. Usually, this is a single (3D) functional data volume.
+ init : str, optional
+ One of "coreg", "fsl", "header", or a path to a transform from the reference
+ volume to the anatomical volume.
+ Specifies the initialization method to obtain a first alignment that will be
+ refined with bbregister. Default is "coreg", which uses Freesurfer's `mri_coreg`
+ and it generally performs best. Other options include "fsl" to use FSL's FLIRT
+ or "header" to assume that the reference and the anatomical are already close
+ enough (this option can be used when the reference and the anatomical are
+ acquired in the same session). Finally, you can also specify a path to a
+ transform file (in DAT or LTA format) that will be used as initialization.
+ epi_mask : bool, optional
+ If True, then the flag --epi-mask is passed to bbregister to mask out areas
+ with spatial distortions. This setting recommended whenever the reference was
+ not distortion corrected.
+ intermediate : str
+ Path to a nibabel-readable image that will be used as intermediate volume
+ for alignment. This is useful if the reference image has a small field-of-view
+ and a whole-brain image was acquired in the same session.
+ reference_contrast : str
+ Contrast of the reference image. This is used to determine the appropriate
+ contrast for the reference image in the bbregister command. Default is "t2"
+ (for BOLD): gray matter is brighter than white matter.
+ The alternative option is "t1": white matter is brighter than gray matter.
+ noclean : bool, optional
+ If True, intermediate files will not be removed from /tmp (this is useful
+ for debugging things), and the returned value will be the name of the
+ temp directory. Default False.
+
+ Returns
+ -------
+ None or path to temp directory if `noclean` is True.
+ """
+ warnings.warn(
+ "Defaults changed in pycortex 1.2.8. Now automatic alignment uses Freesurfer's "
+ "bbregister and mri_coreg for initialization. If you want to use FSL's BBR, "
+ "use the function `cortex.align.automatic_fsl` instead."
+ )
+
+ retval=None
+ try:
+ cache=tempfile.mkdtemp()
+ reference=os.path.abspath(reference)
+ print("Running freesurfer BBR")
+ # Start building the command
+ cmd=f"bbregister --s {subject} --mov {reference} --reg {cache}/register.dat "
+ cmd+=f"--{reference_contrast}"
+ ifinitin["coreg","fsl","header"]:
+ cmd+=f" --init-{init}"
+ else:
+ init=os.path.abspath(init)
+ cmd+=f" --init-reg {init}"
+ ifepi_mask:
+ cmd+=" --epi-mask"
+ ifintermediateisnotNone:
+ intermediate=os.path.abspath(intermediate)
+ cmd+=f" --int {intermediate}"
+ cmd=cmd.format(sub=subject,absref=reference,cache=cache)
+
+ ifsp.call(cmd.split())!=0:
+ raiseIOError("Error calling freesurfer BBR!")
+
+ xfm=Transform.from_freesurfer(
+ os.path.join(cache,"register.dat"),reference,subject
+ )
+ # Save as pycortex 'coord' transform
+ xfm.save(subject,xfmname,"coord")
+ # Load mincost to provide some information
+ # The four values stored in .mincost are
+ # 1. quality of the registration (0 to 1, with 0 is perfect)
+ # 2. mean of WM just below the surface
+ # 3. mean of GM just above the surface
+ # 4. percent contrast
+ # https://surfer.nmr.mgh.harvard.edu/fswiki/MultiModalTutorialV6.0/MultiModalRegistration
+ # Let's return only the quality of the registration
+ mincost=np.loadtxt(os.path.join(cache,"register.dat.mincost"))
+ print(
+ f"bbregister finished with a mincost of {mincost[0]:.2f}\n"
+ "Values between 0 and 0.5 indicate a good registration.\n"
+ "But you should manually inspect the alignment anyway."
+ )finally:ifnotnoclean:shutil.rmtree(cache)else:retval=cache
-
returnretval
-
[docs]defautotweak(subject,xfmname):
- """Tweak an alignment using the FLIRT boundary-based alignment (BBR) from FSL.
+
+
+
+[docs]
+defautotweak(subject,xfmname):
+"""Tweak an alignment using the FLIRT boundary-based alignment (BBR) from FSL. Ideally this function should actually use a limited search range, but it doesn't. It's probably not very useful.
@@ -400,12 +524,12 @@
print("Attempting to segment the brain with freesurfer...")bet2=db.get_anat(subject,type='raw_wm').get_filename()vol=nibabel.load('{bet2}'.format(bet2=bet2))
- vol_data=vol.get_data()
+ vol_data=vol.get_fdata()print(vol_data.shape)new_data=vol_data.copy()new_data[new_data==250]=0
@@ -87,7 +91,7 @@
Source code for cortex.anat
assertsp.call(cmd,shell=True)==0,"Error calling fsl-fast"wmfl='fast_pve_2'
- arr=np.asarray(nibabel.load('{cache}/{wmseg}.nii.gz'.format(cache=cache,wmseg=wmfl)).get_data())
+ arr=np.asarray(nibabel.load('{cache}/{wmseg}.nii.gz'.format(cache=cache,wmseg=wmfl)).get_fdata())ifarr.sum()==0:fromwarningsimportwarnwarn('"fsl-fast" with default settings failed. Trying no pve, no bias correction...')
@@ -99,27 +103,31 @@
[docs]defvoxelize(outfile,subject,surf='wm',mp=True):
- '''Voxelize the whitematter surface to generate the white matter mask'''
+
+
+[docs]
+defvoxelize(outfile,subject,surf='wm',mp=True):
+'''Voxelize the whitematter surface to generate the white matter mask'''importnibabelfrom.importpolyutilsnib=db.get_anat(subject,"raw")shape=nib.get_shape()vox=np.zeros(shape,dtype=bool)forpts,polysindb.get_surf(subject,surf,nudge=False):
- xfm=Transform(np.linalg.inv(nib.get_affine()),nib)
+ xfm=Transform(np.linalg.inv(nib.affine),nib)vox+=polyutils.voxelize(xfm(pts),polys,shape=shape,center=(0,0,0),mp=mp).astype('bool')
- nib=nibabel.Nifti1Image(vox,nib.get_affine(),header=nib.get_header())
+ nib=nibabel.Nifti1Image(vox,nib.affine,header=nib.header)nib.to_filename(outfile)returnvox.T
"""Contains a singleton object `db` of type `Database` which allows easy access to surface files, anatomical images, and transforms that are stored in the pycortex filestore."""
-importos
-importreimportcopy
+importfunctoolsimportglobimportjson
+importos
+importreimportshutil
-importwarningsimporttempfile
-importfunctools
-importnumpyasnp
-fromhashlibimportsha1
+importwarningsfrombuiltinsimportinput
+fromhashlibimportsha1
+
+importnumpyasnpfrom.importoptions
@@ -141,7 +141,8 @@
[docs]defreload_subjects(self):
- """Force the reload of the subject dictionary."""
+
+[docs]
+ defreload_subjects(self):
+"""Force the reload of the subject dictionary."""self._subjects=Noneself.subjects
-
[docs]defget_anat(self,subject,type='raw',xfmname=None,recache=False,order=1,**kwargs):
- """Return anatomical information from the filestore. Anatomical information is defined as
+
+
+[docs]
+ defget_anat(self,subject,type='raw',xfmname=None,recache=False,order=1,**kwargs):
+"""Return anatomical information from the filestore. Anatomical information is defined as any volume-space anatomical information pertaining to the subject, such as T1 image, white matter masks, etc. Volumes not found in the database will be automatically generated.
@@ -259,10 +271,13 @@
[docs]defget_surfinfo(self,subject,type="curvature",recache=False,**kwargs):
- """Return auxillary surface information from the filestore. Surface info is defined as
+
+
+[docs]
+ defget_surfinfo(self,subject,type="curvature",recache=False,**kwargs):
+"""Return auxiliary surface information from the filestore. Surface info is defined as anatomical information specific to a subject in surface space. A Vertex class will be returned as necessary. Info not found in the filestore will be automatically generated.
@@ -312,11 +327,14 @@
+[docs]
+ defget_mri_surf2surf_matrix(self,subject,surface_type,hemi='both',fs_subj=None,target_subj='fsaverage',**kwargs):
- """Get matrix generated by surf2surf to map one subject's surface to another's
+"""Get matrix generated by surf2surf to map one subject's surface to another's Parameters ----------
@@ -355,8 +373,12 @@
+[docs]
+ defget_overlay(self,subject,overlay_file=None,**kwargs):from.importsvgoverlaypts,polys=self.get_surf(subject,"flat",merge=True,nudge=True)paths=self.get_paths(subject)
- # NOTE: This try loop is broken, in that it does nothing for the inteded
+ # NOTE: This try loop is broken, in that it does nothing for the intended # use case (loading an overlay from a packed subject) - needs fixing. # This hasn't come up yet due to very rare use of packed subjects.ifself.auxfileisnotNone:
@@ -403,9 +428,12 @@
+[docs]
+ defsave_xfm(self,subject,name,xfm,xfmtype="magnet",reference=None):
+""" Load a transform into the surface database. If the transform exists already, update it If it does not exist, copy the reference epi into the filestore and insert.
@@ -440,12 +468,12 @@
Source code for cortex.database
raiseValueError("Please specify a reference")fpath=os.path.join(path,"reference.nii.gz")nib=nibabel.load(reference)
- data=nib.get_data()
+ data=nib.get_fdata()iflen(data.shape)>3:importwarningswarnings.warn('You are importing a 4D dataset, automatically selecting the first volume as reference')data=data[...,0]
- out=nibabel.Nifti1Image(data,nib.get_affine(),header=nib.get_header())
+ out=nibabel.Nifti1Image(data,nib.affine,header=nib.header)nibabel.save(out,fpath)jsdict=dict()
@@ -453,10 +481,10 @@
+[docs]
+ @_memodefget_surf(self,subject,type,hemisphere="both",merge=False,nudge=False):
- '''Return the surface pair for the given subject, surface type, and hemisphere.
+'''Return the surface pair for the given subject, surface type, and hemisphere. Parameters ----------
@@ -558,7 +592,10 @@
xfm=self.get_xfm(subject,xfmname)ifxfm.shape!=mask.shape:raiseValueError("Invalid mask shape: must match shape of reference image")
- affine=xfm.reference.get_affine()
+ affine=xfm.reference.affinenib=nibabel.Nifti1Image(mask.astype(np.uint8).T,affine)nib.to_filename(fname)
[docs]defget_shared_voxels(self,subject,xfmname,hemi="both",merge=True,use_astar=True,recache=False):
- """Get an array indicating which vertices are inappropriately mapped to the same voxel.
+
+
+[docs]
+ defget_shared_voxels(self,subject,xfmname,hemi="both",merge=True,use_astar=True,recache=False):
+"""Get an array indicating which vertices are inappropriately mapped to the same voxel. For a given transform and surface, returns an array containing a list of vertices which are spatially distant on the cortical surface but that map to the same voxels (this occurs
@@ -618,8 +661,11 @@
Source code for cortex.database
voxels=np.load(shared_voxel_file)returnvoxels
-
[docs]defget_coords(self,subject,xfmname,hemisphere="both",magnet=None):
- """Calculate the coordinates of each vertex in the epi space by transforming the fiducial to the coordinate space
+
+
+[docs]
+ defget_coords(self,subject,xfmname,hemisphere="both",magnet=None):
+"""Calculate the coordinates of each vertex in the epi space by transforming the fiducial to the coordinate space Parameters ----------
@@ -649,7 +695,10 @@
+[docs]
+ defget_cache(self,subject):try:self.auxfile.get_surf(subject,"fiducial")#generate the hashed name of the filename and subject as the directory name
@@ -669,8 +718,11 @@
Source code for cortex.database
os.makedirs(cachedir)returncachedir
-
[docs]defclear_cache(self,subject,clear_all_caches=True):
- """Clears config-specified and default file caches for a subject.
+
+
+[docs]
+ defclear_cache(self,subject,clear_all_caches=True):
+"""Clears config-specified and default file caches for a subject. """local_cachedir=self.get_cache(subject)
@@ -690,8 +742,11 @@
[docs]defget_paths(self,subject):
- """Get a dictionary with a list of all candidate filenames for associated data, such as roi overlays, flatmap caches, and ctm caches.
+
+
+[docs]
+ defget_paths(self,subject):
+"""Get a dictionary with a list of all candidate filenames for associated data, such as roi overlays, flatmap caches, and ctm caches. """surfpath=os.path.join(self.filestore,subject,"surfaces")
@@ -723,12 +778,15 @@
+[docs]
+ defmake_subj(self,subject):ifos.path.exists(os.path.join(self.filestore,subject)):ifinput("Are you sure you want to overwrite this existing subject?\n""This will delete all files for this subject in the filestore, "
@@ -743,9 +801,12 @@
Source code for cortex.database
os.makedirs(path)exceptOSError:print("Error making directory %s"%path)
+
-
[docs]defsave_view(self,vw,subject,name,is_overwrite=False):
- """Set the view for an open webshow instance from a saved view
+
+[docs]
+ defsave_view(self,vw,subject,name,is_overwrite=False):
+"""Set the view for an open webshow instance from a saved view Sets the view in a currently-open cortex.webshow instance (with handle `vw`) to the saved view named `name`
@@ -772,8 +833,11 @@
Source code for cortex.database
withopen(sName,'w')asfp:json.dump(view,fp)
-
[docs]defget_view(self,vw,subject,name):
- """Set the view for an open webshow instance from a saved view
+
+
+[docs]
+ defget_view(self,vw,subject,name):
+"""Set the view for an open webshow instance from a saved view Sets the view in a currently-open cortex.webshow instance (with handle `vw`) to the saved view named `name`
@@ -796,8 +860,11 @@
Source code for cortex.database
view=json.load(fp)vw._set_view(**view)
-
[docs]defget_mnixfm(self,subject,xfm,template=None):
- """Get transform from the space specified by `xfm` to MNI space.
+
+
+[docs]
+ defget_mnixfm(self,subject,xfm,template=None):
+"""Get transform from the space specified by `xfm` to MNI space. Parameters ----------
@@ -845,7 +912,9 @@
Source code for cortex.database
# Cache the resultmni._save_fsl_xfm(mnixfmfile,mnixfm)
- returnmnixfm
+[docs]
+classDataset(object):
+""" Wrapper for multiple data objects. This often does not need to be used explicitly--for example, if a dictionary of data objects is passed to `cortex.webshow`, it will automatically be converted into a `Dataset`. All kwargs should be `BrainData` or `Dataset` objects. """
-
[docs]defappend(self,**kwargs):
- """Add the `BrainData` or `Dataset` objects in `kwargs` into this
+
+
+[docs]
+ defappend(self,**kwargs):
+"""Add the `BrainData` or `Dataset` objects in `kwargs` into this dataset. """forname,datainkwargs.items():
@@ -76,6 +82,7 @@
+[docs]
+ @classmethod
+ deffrom_file(cls,filename,subject=None):
+"""Load a pycortex Dataset (cortex.Dataset class) from a file
+
+ Parameters
+ ----------
+ filename : str
+ .hdf file from which to load
+ subject : str, optional
+ string ID for pycortex subject, if subject name has changed
+ since the Dataset was created. `None` input assumes subject
+ name in saved file is a subject in your current pycortex
+ filestore. By default None
+
+ Returns
+ -------
+ Dataset
+ pycortex Dataset
+ """ds=cls()ds.h5=h5py.File(filename,'r')
@@ -113,14 +139,17 @@
Source code for cortex.dataset.dataset
ifnamein("data","subjects","views"):continuetry:
- ds.views[name]=_from_hdf_data(ds.h5,name)
+ ds.views[name]=_from_hdf_data(ds.h5,name,subject=subject)exceptKeyError:print('No metadata found for "%s", skipping...'%name)#load up the views generated by pycortexforname,nodeinds.h5['views'].items():try:
- ds.views[name]=Dataview.from_hdf(node)
+ ds.views[name]=Dataview.from_hdf(node,subject=subject)
+ exceptFileNotFoundError:
+ print("Could not load file; old subject name? Try using `subject` kwarg to specify a current pycortex subject")
+ raiseexceptException:importtracebacktraceback.print_exc()
@@ -128,9 +157,12 @@
Source code for cortex.dataset.dataset
db.auxfile=Nonereturnds
+
-
[docs]defuniques(self,collapse=False):
- """Return the set of unique BrainData objects contained by this dataset"""
+
+[docs]
+ defuniques(self,collapse=False):
+"""Return the set of unique BrainData objects contained by this dataset"""uniques=set()forname,viewinself:forsvinview.uniques(collapse=collapse):
@@ -138,7 +170,10 @@
+[docs]
+ defget_xfm(self,subject,xfmname):try:group=self.h5['subjects'][subject]['transforms'][xfmname]returnTransform(group['xfm'][:],tuple(group['xfm'].attrs['shape']))except(KeyError,TypeError):raiseIOError('Transform not found in package')
+[docs]
+ defget_mask(self,subject,xfmname,maskname):try:group=self.h5['subjects'][subject]['transforms'][xfmname]['masks']returngroup[maskname]except(KeyError,TypeError):raiseIOError('Mask not found in package')
[docs]defprepend(self,prefix):
- """Adds the given `prefix` to the name of every data object and returns
+
+
+[docs]
+ defprepend(self,prefix):
+"""Adds the given `prefix` to the name of every data object and returns a new Dataset. """ds=dict()forname,datainself:ds[prefix+name]=data
- returnDataset(**ds)
default_cmap2D=options.config.get("basic","default_cmap2D")classDataview2D(Dataview):
- """Abstract base class for 2-dimensional data views.
+"""Abstract base class for 2-dimensional data views. """def__init__(self,description="",cmap=None,vmin=None,vmax=None,vmin2=None,vmax2=None,state=None,**kwargs):self.cmap=cmapordefault_cmap2D
@@ -99,6 +100,7 @@
+[docs]
+classVolume2D(Dataview2D):
+""" Contains two 3D volumes for simultaneous visualization. Includes information on how the volumes should be jointly colormapped.
@@ -174,7 +178,9 @@
+
def__repr__(self):return"<2D volumetric data for (%s, %s)>"%(self.dim1.subject,self.dim1.xfmname)
@@ -205,7 +212,7 @@
Source code for cortex.dataset.view2D
@propertydefraw(self):
- """VolumeRGB object containing the colormapped data from this object.
+"""VolumeRGB object containing the colormapped data from this object. """ifself.dim1.xfmname!=self.dim2.xfmname:raiseValueError("Both Volumes must have same xfmname to generate single raw volume")
@@ -228,8 +235,11 @@
+[docs]
+classVertex2D(Dataview2D):
+""" Contains two vertex maps for simultaneous visualization. Includes information on how the maps should be jointly colormapped.
@@ -264,7 +274,9 @@
+
def__repr__(self):return"<2D vertex data for (%s)>"%self.dim1.subject@propertydefraw(self):
- """VertexRGB object containing the colormapped data from this object.
+"""VertexRGB object containing the colormapped data from this object. """r,g,b,a=self._to_raw(self.dim1.data,self.dim2.data)# Allow manual override of alpha channel
@@ -303,6 +316,21 @@
classDataviewRGB(Dataview):
- """Abstract base class for RGB data views.
+"""Abstract base class for RGB data views. """def__init__(self,subject=None,alpha=None,description="",state=None,**kwargs):self.alpha=alpha
@@ -155,8 +155,14 @@
+[docs]
+classVolumeRGB(DataviewRGB):
+""" Contains RGB (or RGBA) colors for each voxel in a volumetric dataset. Includes information about the subject and transform for the data.
@@ -211,7 +217,7 @@
Source code for cortex.dataset.viewRGB
Use the same vmin and vmax for all three color channels? shared_vmin : float, optional Predetermined shared vmin. Does nothing if shared_range == False. If not given,
- will be the 1st percentil of all values across all three channels.
+ will be the 1st percentile of all values across all three channels. shared_vmax : float, optional Predetermined shared vmax. Does nothing if shared_range == False. If not given, will be the 99th percentile of all values across all three channels
@@ -222,7 +228,9 @@
+
@propertydefvolume(self):
- """5-dimensional volume (t, z, y, x, rgba) with data that has been mapped
+"""5-dimensional volume (t, z, y, x, rgba) with data that has been mapped into 8-bit unsigned integers that correspond to colors. """volume=[]
@@ -340,11 +377,13 @@
+[docs]
+ @staticmethoddefcolor_voxels(channel1,channel2,channel3,channel1color,channel2color,channel3Color,value_max,saturation_max,common_range,
- common_min,common_max):
- """
+ common_min,common_max,alpha=None):
+""" Colors voxels in 3 color dimensions but not necessarily canonical red, green, and blue Parameters ----------
@@ -373,25 +412,41 @@
Source code for cortex.dataset.viewRGB
common_max : float, optional Predetermined shared vmax. Does nothing if shared_range == False. If not given, will be the 99th percentile of all values across all three channels
+ alpha : ndarray or Volume, optional
+ Alpha values for each voxel. If None, alpha is set to 1 for all voxels. Returns ------- red : ndarray of channel1.shape uint8 array of red values
- green : ndarray of data2.shape
+ green : ndarray of channel1.shape uint8 array of green values
- blue : ndarray of data3.shape
+ blue : ndarray of channel1.shape uint8 array of blue values
-
+ alpha : ndarray
+ If alpha=None, uint8 array of alpha values with alpha=1 for every voxel.
+ Otherwise, the same alpha values that were passed in. Additionally,
+ voxels with NaNs will have an alpha value of 0. """# normalize each channel to [0, 1]
- data1=np.nan_to_num(channel1.dataifisinstance(channel1,VolumeData)elsechannel1).astype(np.float)
- data2=np.nan_to_num(channel2.dataifisinstance(channel2,VolumeData)elsechannel2).astype(np.float)
- data3=np.nan_to_num(channel3.dataifisinstance(channel3,VolumeData)elsechannel3).astype(np.float)
+ data1=channel1.dataifisinstance(channel1,VolumeData)elsechannel1
+ data1=data1.astype(float)
+ data2=channel2.dataifisinstance(channel2,VolumeData)elsechannel2
+ data2=data2.astype(float)
+ data3=channel3.dataifisinstance(channel3,VolumeData)elsechannel3
+ data3=data3.astype(float)if(data1.shape!=data2.shape)or(data2.shape!=data3.shape):raiseValueError('Volumes are of different shapes')
+ # Create an alpha mask now, before casting nans to 0
+ # Voxels with at least one channel equal to NaN will be masked out.
+ mask=np.isnan(np.array([data1,data2,data3])).any(axis=0)
+ # Now convert to NaNs to num for all channels
+ data1=np.nan_to_num(data1)
+ data2=np.nan_to_num(data2)
+ data3=np.nan_to_num(data3)
+
ifcommon_range:ifcommon_minisNone:ifcommon_maxisNone:
@@ -452,11 +507,20 @@
+[docs]
+classVertexRGB(DataviewRGB):
+""" Contains RGB (or RGBA) colors for each vertex in a surface dataset. Includes information about the subject.
@@ -495,7 +559,9 @@
else:raiseTypeError("Invalid input for Dataview")
-def_from_hdf_data(h5,name,xfmname=None,**kwargs):
- """Decodes a __hash named node from an HDF file into the
+def_from_hdf_data(h5,name,xfmname=None,subject=None,**kwargs):
+"""Decodes a __hash named node from an HDF file into the constituent Vertex or Volume object"""dnode=h5.get("/data/%s"%name)ifdnodeisNone:dnode=h5.get(name)attrs={k:u(v)for(k,v)indnode.attrs.items()}
- subj=attrs['subject']
+ ifsubjectisNone:
+ subject=attrs['subject']#support old style xfmname saving as attributeifxfmnameisNoneand'xfmname'inattrs:xfmname=attrs['xfmname']mask=Noneif'mask'inattrs:ifattrs['mask'].startswith("__"):
- mask=h5['/subjects/%s/transforms/%s/masks/%s'%(attrs['subject'],xfmname,attrs['mask'])].value
+ mask=h5['/subjects/%s/transforms/%s/masks/%s'%
+ (attrs['subject'],xfmname,attrs['mask'])].valueelse:mask=attrs['mask']
@@ -87,37 +89,37 @@
+[docs]
+classVolume(VolumeData,Dataview):
+""" Encapsulates a 3D volume or 4D volumetric movie. Includes information on how the volume should be colormapped for display purposes.
@@ -303,7 +316,9 @@
Source code for cortex.dataset.views
All additional arguments in kwargs are passed to the VolumeData and Dataview """
-
+[docs]
+classVertex(VertexData,Dataview):
+""" Encapsulates a 1D vertex map or 2D vertex movie. Includes information on how the data should be colormapped for display purposes.
@@ -357,7 +376,9 @@
Source code for cortex.dataset.views
All additional arguments in kwargs are passed to the VolumeData and Dataview """
-
+[docs]
+ defmap(self,target_subj,surface_type='fiducial',hemi='both',fs_subj=None,**kwargs):
- """Map this data from this surface to another surface
+"""Map this data from this surface to another surface Calls `cortex.freesurfer.vertex_to_vertex()` with this vertex object as the first argument.
@@ -414,7 +438,9 @@
[docs]defget_paths(fs_subject,hemi,type="patch",freesurfer_subject_dir=None):
- """Retrive paths for all surfaces for a subject processed by freesurfer
+
+[docs]
+defget_paths(fs_subject,hemi,type="patch",freesurfer_subject_dir=None):
+"""Retrieve paths for all surfaces for a subject processed by freesurfer Parameters ----------
@@ -90,8 +90,11 @@
[docs]defautorecon(fs_subject,type="all",parallel=True,n_cores=None):
- """Run Freesurfer's autorecon-all command for a given freesurfer subject
+
+
+[docs]
+defautorecon(fs_subject,type="all",parallel=True,n_cores=None):
+"""Run Freesurfer's autorecon-all command for a given freesurfer subject Parameters ----------
@@ -132,8 +135,11 @@
Source code for cortex.freesurfer
sp.check_call(shlex.split(cmd))
-
[docs]defflatten(fs_subject,hemi,patch,freesurfer_subject_dir=None,save_every=None):
- """Perform flattening of a brain using freesurfer
+
+
+[docs]
+defflatten(fs_subject,hemi,patch,freesurfer_subject_dir=None,save_every=None):
+"""Perform flattening of a brain using freesurfer Parameters ----------
@@ -180,8 +186,16 @@
Source code for cortex.freesurfer
returnFalse
-
[docs]defimport_subj(fs_subject,cx_subject=None,freesurfer_subject_dir=None,whitematter_surf='smoothwm'):
- """Imports a subject from freesurfer
+
+
+[docs]
+defimport_subj(
+ freesurfer_subject,
+ pycortex_subject=None,
+ freesurfer_subject_dir=None,
+ whitematter_surf="smoothwm",
+):
+"""Imports a subject from freesurfer This will overwrite (after giving a warning and an option to continue) the pre-existing subject, including all blender cuts, masks, transforms, etc., and
@@ -191,67 +205,119 @@
Source code for cortex.freesurfer
Parameters ----------
- fs_subject : string
+ freesurfer_subject : str Freesurfer subject name
- cx_subject : string, optional
- Pycortex subject name (These variable names should be changed). By default uses
- the same name as the freesurfer subject. Best to stick to that convention, if
- possible (your life will go more smoothly.) This optional kwarg is for edge cases.
- freesurfer_subject_dir : string, optional
+ pycortex_subject : str, optional
+ Pycortex subject name. By default it uses the freesurfer subject name.
+ It is advised to stick to that convention, if possible
+ (your life will go more smoothly.)
+ freesurfer_subject_dir : str, optional Freesurfer subject directory to pull data from. By default uses the directory given by the environment variable $SUBJECTS_DIR.
- whitematter_surf : string, optional
- Which whitematter surface to import as 'wm'. By default uses 'smoothwm', but that
- surface is smoothed and may not be appropriate. A good alternative is 'white'.
- """
- ifcx_subjectisNone:
- cx_subject=fs_subject
- # Create and/or replace extant subject. Throws a warning that this will happen.
- database.db.make_subj(cx_subject)
+ whitematter_surf : str, optional
+ Which whitematter surface to import as 'wm'. By default uses 'smoothwm', but
+ that surface is smoothed and may not be appropriate.
+ A good alternative is 'white'.
+
+ Notes
+ -----
+ This function uses command line functions from freesurfer, so you should make sure
+ to have freesurfer sourced before running this function.
- importnibabel
- surfs=os.path.join(database.default_filestore,cx_subject,"surfaces","{name}_{hemi}.gii")
- anats=os.path.join(database.default_filestore,cx_subject,"anatomicals","{name}.nii.gz")
- surfinfo=os.path.join(database.default_filestore,cx_subject,"surface-info","{name}.npz")
+ This function will also generate the fiducial surfaces for the subject, which are
+ halfway between the white matter and pial surfaces. The surfaces will be stored
+ in the freesurfer subject's directory. These fiducial surfaces are used for
+ cutting and flattening.
+ """
+ # Check if freesurfer is sourced or if subjects dir is passediffreesurfer_subject_dirisNone:
- freesurfer_subject_dir=os.environ['SUBJECTS_DIR']
- fspath=os.path.join(freesurfer_subject_dir,fs_subject,'mri')
- curvs=os.path.join(freesurfer_subject_dir,fs_subject,'surf','{hemi}.{name}')
-
- #import anatomicals
- forfsname,nameindict(T1="raw",aseg="aseg",wm="raw_wm").items():
- path=os.path.join(fspath,"{fsname}.mgz").format(fsname=fsname)
- out=anats.format(subj=cx_subject,name=name)
- cmd="mri_convert {path}{out}".format(path=path,out=out)
+ if"SUBJECTS_DIR"inos.environ:
+ freesurfer_subject_dir=os.environ["SUBJECTS_DIR"]
+ else:
+ raiseValueError(
+ "Please source freesurfer before running this function, "
+ "or pass a path to the freesurfer subjects directory in "
+ "`freesurfer_subject_dir`"
+ )
+ fs_mri_path=os.path.join(freesurfer_subject_dir,freesurfer_subject,"mri")
+ fs_surf_path=os.path.join(freesurfer_subject_dir,freesurfer_subject,"surf")
+ fs_anat_template=os.path.join(fs_mri_path,"{name}.mgz")
+ fs_surf_template=os.path.join(fs_surf_path,"{hemi}.{name}")
+
+ # Now deal with pycortex
+ ifpycortex_subjectisNone:
+ pycortex_subject=freesurfer_subject
+ # Create and/or replace extant subject. Throws a warning that this will happen.
+ database.db.make_subj(pycortex_subject)
+
+ filestore=os.path.join(database.default_filestore,pycortex_subject)
+ anat_template=os.path.join(filestore,"anatomicals","{name}.nii.gz")
+ surf_template=os.path.join(filestore,"surfaces","{name}_{hemi}.gii")
+ surfinfo_template=os.path.join(filestore,"surface-info","{name}.npz")
+
+ # Dictionary mapping for volumes to be imported over from freesurfer
+ volumes_fs2pycortex={"T1":"raw","aseg":"aseg","wm":"raw_wm"}
+ # Import volumes
+ forfsname,nameinvolumes_fs2pycortex.items():
+ in_volume=fs_anat_template.format(name=fsname)
+ out_volume=anat_template.format(name=name)
+ cmd="mri_convert {path}{out}".format(path=in_volume,out=out_volume)sp.check_output(shlex.split(cmd))# (Re-)Make the fiducial files# NOTE: these are IN THE FREESURFER $SUBJECTS_DIR !! which can cause confusion.
- make_fiducial(fs_subject,freesurfer_subject_dir=freesurfer_subject_dir)
-
- # Freesurfer uses FOV/2 for center, let's set the surfaces to use the
- # magnet isocenter
- trans=nibabel.load(out).get_affine()[:3,-1]
- surfmove=trans-np.sign(trans)*[128,128,128]
-
- from.importformats
- forfsname,namein[(whitematter_surf,"wm"),('pial',"pia"),('inflated',"inflated")]:
+ make_fiducial(freesurfer_subject,freesurfer_subject_dir=freesurfer_subject_dir)
+
+ # Dictionary mapping for surfaces to be imported over from freesurfer
+ surfaces_fs2pycortex={
+ whitematter_surf:"wm",
+ "pial":"pia",
+ "inflated":"inflated",
+ }
+ # Import surfaces
+ forfsname,nameinsurfaces_fs2pycortex.items():forhemiin("lh","rh"):
- pts,polys,_=get_surf(fs_subject,hemi,fsname,freesurfer_subject_dir=freesurfer_subject_dir)
- fname=str(surfs.format(subj=cx_subject,name=name,hemi=hemi))
- formats.write_gii(fname,pts=pts+surfmove,polys=polys)
-
- forcurv,infoindict(sulc="sulcaldepth",thickness="thickness",curv="curvature").items():
- lh,rh=[parse_curv(curvs.format(hemi=hemi,name=curv))forhemiin['lh','rh']]
- np.savez(surfinfo.format(subj=cx_subject,name=info),left=-lh,right=-rh)
-
+ in_surface=fs_surf_template.format(hemi=hemi,name=fsname)
+ out_surface=surf_template.format(name=name,hemi=hemi)
+ # Use the --to-scanner flag to store the surfaces with the same coordinate
+ # system as the volume data, rather than the TKR coordinate system, which
+ # has the center set to FOV/2.
+ # NOTE: the resulting gifti surfaces will look misaligned with respect to
+ # the anatomical volumes when visualized in freeview, because freeview
+ # expects the surfaces to be in TKR coordinates (with center set to FOV/2).
+ # But the surfaces stored in the pycortex database are only to be used by
+ # pycortex, so that's fine.
+ cmd=f"mris_convert --to-scanner {in_surface}{out_surface}"
+ sp.check_output(shlex.split(cmd))
+
+ # Dictionary mapping for curvature and extra info to be imported
+ info_fs2pycortex={
+ "sulc":"sulcaldepth",
+ "thickness":"thickness",
+ "curv":"curvature",
+ }
+ # Import curvature and extra information
+ forfsname,nameininfo_fs2pycortex.items():
+ in_info_lhrh=[
+ fs_surf_template.format(hemi=hemi,name=fsname)forhemiin["lh","rh"]
+ ]
+ lh,rh=[parse_curv(in_info)forin_infoinin_info_lhrh]
+ np.savez(
+ surfinfo_template.format(name=name),
+ left=-lh,
+ right=-rh
+ )
+ # Finally update the database by re-initializing itdatabase.db=database.Database()
+[docs]
+defimport_flat(fs_subject,patch,hemis=['lh','rh'],cx_subject=None,flat_type='freesurfer',auto_overwrite=False,freesurfer_subject_dir=None,clean=True):
- """Imports a flat brain from freesurfer
+"""Imports a flat brain from freesurfer NOTE: This will delete the overlays.svg file for this subject, since THE FLATMAPS WILL CHANGE, as well as all cached information (e.g. old flatmap
@@ -268,7 +334,7 @@
Source code for cortex.freesurfer
cx_subject : str Pycortex subject name freesurfer_subject_dir : str
- directory for freesurfer subjects. None defaults to evironment variable
+ directory for freesurfer subjects. None defaults to environment variable $SUBJECTS_DIR clean : bool If True, the flat surface is cleaned to remove the disconnected polys.
@@ -318,11 +384,12 @@
+
# Regenerate it? def_remove_disconnected_polys(polys):
- """Remove polygons that are not in the main connected component.
+"""Remove polygons that are not in the main connected component. This function creates a sparse graph based on edges in the input. Then it computes the connected components, and returns only the polygons
@@ -361,7 +428,7 @@
Source code for cortex.freesurfer
def_move_disconnect_points_to_zero(pts,polys):
- """Change coordinates of points not in polygons to zero.
+"""Change coordinates of points not in polygons to zero. This cleaning step is useful after _remove_disconnected_polys, to avoid using this points in boundaries computations (through pts.max(axis=0)
@@ -373,8 +440,10 @@
Source code for cortex.freesurfer
returnpts
-
[docs]defmake_fiducial(fs_subject,freesurfer_subject_dir=None):
- """Make fiducial surface (halfway between white matter and pial surfaces)
+
+[docs]
+defmake_fiducial(fs_subject,freesurfer_subject_dir=None):
+"""Make fiducial surface (halfway between white matter and pial surfaces) """forhemiin['lh','rh']:spts,polys,_=get_surf(fs_subject,hemi,"smoothwm",freesurfer_subject_dir=freesurfer_subject_dir)
@@ -383,8 +452,11 @@
defwrite_patch(filename,pts,edges=None):
- """Writes a patch file that is readable by freesurfer.
+"""Writes a patch file that is readable by freesurfer. Note this function is duplicated here and in blendlib. This function writes freesurfer format, so seems natural to place here, but it
@@ -442,16 +515,21 @@
+
def_move_labels(subject,label,hemisphere=('lh','rh'),fs_dir=None,src_subject='fsaverage'):
- """subject is a freesurfer subject"""
+"""subject is a freesurfer subject"""iffs_dirisNone:fs_dir=os.environ['SUBJECTS_DIR']forhemiinhemisphere:
@@ -532,7 +614,7 @@
Source code for cortex.freesurfer
def_parse_labels(label_files,cx_subject):
- """Extract values from freesurfer label file(s) and map to vertices
+"""Extract values from freesurfer label file(s) and map to vertices Parameters ----------
@@ -561,7 +643,7 @@
Source code for cortex.freesurfer
returnverts,valuesdefget_label(cx_subject,label,fs_subject=None,fs_dir=None,src_subject='fsaverage',hemisphere=('lh','rh'),**kwargs):
- """Get data from a label file for fsaverage subject
+"""Get data from a label file for fsaverage subject Parameters ----------
@@ -592,7 +674,7 @@
returncmd
+def_check_datatype(data):
+ dtype=data.dtype
+ ifdtype==np.int64:
+ returnnp.int32
+ elifdtype==np.float64:
+ returnnp.float32
+ else:
+ returndtype
+
defmri_surf2surf(data,source_subj,target_subj,hemi,subjects_dir=None):
- """Uses freesurfer mri_surf2surf to transfer vertex data between
+"""Uses freesurfer mri_surf2surf to transfer vertex data between two freesurfer subjects Parameters
@@ -633,7 +724,8 @@
Source code for cortex.freesurfer
===== Requires path to mri_surf2surf or freesurfer environment to be active. """
- data_arrays=[gifti.GiftiDataArray(d)fordindata]
+ datatype=_check_datatype(data)
+ data_arrays=[gifti.GiftiDataArray(d,datatype=datatype)fordindata]gifti_image=gifti.GiftiImage(darrays=data_arrays)tf_in=NamedTemporaryFile(suffix=".gii")
@@ -678,7 +770,7 @@
Source code for cortex.freesurfer
n_test_images=40,coef_threshold=None,renormalize=True):
- """Creates a matrix implementing freesurfer mri_surf2surf command.
+"""Creates a matrix implementing freesurfer mri_surf2surf command. A surface-to-surface transform is a linear transform between vertex spaces. Such a transform must be highly localized in the sense that a vertex in the
@@ -796,8 +888,10 @@
Source code for cortex.freesurfer
returnmatrix
-
[docs]defget_curv(fs_subject,hemi,type='wm',freesurfer_subject_dir=None):
- """Load freesurfer curv file for a freesurfer subject
+
+[docs]
+defget_curv(fs_subject,hemi,type='wm',freesurfer_subject_dir=None):
+"""Load freesurfer curv file for a freesurfer subject Parameters ----------
@@ -819,8 +913,11 @@
Source code for cortex.freesurfer
returnparse_curv(curv_file)
-
[docs]defshow_surf(subject,hemi,type,patch=None,curv=True,freesurfer_subject_dir=None):
- """Show a surface from a Freesurfer subject directory
+
+
+[docs]
+defshow_surf(subject,hemi,type,patch=None,curv=True,freesurfer_subject_dir=None):
+"""Show a surface from a Freesurfer subject directory Parameters ----------
@@ -877,8 +974,11 @@
+
+
+defupsample_to_fsaverage(
+ data,data_space="fsaverage6",freesurfer_subjects_dir=None
+):
+"""Project data from fsaverage6 (or other fsaverage surface) to fsaverage to
+ visualize it in pycortex.
+
+ Parameters
+ ----------
+ data : array (n_samples, n_vertices)
+ Data in space `space`. The first n_vertices/2 vertices correspond to the left
+ hemisphere, and the last n_vertices/2 vertices correspond to the right
+ hemisphere.
+ data_space : str
+ One of fsaverage[1-6], corresponding to the source template space of `data`.
+ freesurfer_subjects_dir : str or None
+ Path to Freesurfer subjects directory. If None, defaults to the value of the
+ environment variable $SUBJECTS_DIR.
+
+ Returns
+ -------
+ projected_data : array (n_samples, 327684)
+ Data projected to fsaverage(7).
+
+ Notes
+ -----
+ Data in the lower resolution fsaverage template is upsampled to the full resolution
+ fsaverage template by nearest-neighbor interpolation. To project the data from a
+ lower resolution version of fsaverage, this code exploits the structure of fsaverage
+ surfaces. (That is, each hemisphere in fsaverage6 corresponds to the first
+ 40,962 vertices of fsaverage; fsaverage5 corresponds to the first 10,242 vertices of
+ fsaverage, etc.)
+ """
+
+
+ defget_n_vertices_ico(icoorder):
+ return4**icoorder*10+2
+
+ ico_order=int(data_space[-1])
+ n_ico_vertices=get_n_vertices_ico(ico_order)
+ ndim=data.ndim
+ data=np.atleast_2d(data)
+ _,n_vertices=data.shape
+ ifn_vertices!=2*n_ico_vertices:
+ raiseValueError(
+ f"data has {n_vertices} vertices, but {2*n_ico_vertices} "
+ f"are expected for both hemispheres in {data_space}"
+ )
+
+ iffreesurfer_subjects_dirisNone:
+ freesurfer_subjects_dir=os.environ.get("SUBJECTS_DIR",None)
+ iffreesurfer_subjects_dirisNone:
+ raiseValueError(
+ "freesurfer_subjects_dir must be specified or $SUBJECTS_DIR must be set"
+ )
+
+ data_hemi=np.split(data,2,axis=-1)
+ hemis=["lh","rh"]
+ projected_data=[]
+ fori,(hemi,dt)inenumerate(zip(hemis,data_hemi)):
+ # Load fsaverage sphere for this hemisphere
+ pts,faces=nibabel.freesurfer.read_geometry(
+ os.path.join(
+ freesurfer_subjects_dir,"fsaverage","surf",f"{hemi}.sphere.reg"
+ )
+ )
+ # build kdtree using only vertices in reduced fsaverage surface
+ kdtree=KDTree(pts[:n_ico_vertices])
+ # figure out neighbors in reduced version for all other vertices in fsaverage
+ _,neighbors=kdtree.query(pts[n_ico_vertices:],k=1)
+ # now simply fill remaining vertices with original values
+ projected_data.append(
+ np.concatenate([dt,dt[:,neighbors]],axis=-1)
+ )
+ projected_data=np.hstack(projected_data)
+ ifndim==1:
+ projected_data=projected_data[0]
+ returnprojected_data
+
+
# aseg partition labels (up to 256 only)fs_aseg_dict={'Unknown':0,'Left-Cerebral-Exterior':1,
@@ -1293,7 +1489,7 @@
returndataset.Vertex(np.hstack(mapped).squeeze(),data.subject)defbackwards(self,vertexdata):
- '''Projects vertex data back into volume space.
+'''Projects vertex data back into volume space. Parameters ----------
@@ -167,6 +171,7 @@
+[docs]
+defcompute_mni_transform(subject,xfm,template=default_template):
- """
+""" Compute transform from the space specified by `xfm` to MNI standard space. Parameters
@@ -110,9 +111,12 @@
+[docs]
+deftransform_to_mni(volumedata,func_to_mni,template=default_template):
- """
+""" Transform data in `volumedata` to MNI space, resample at the resolution of the atlas image.
@@ -150,8 +154,11 @@
+[docs]
+deftransform_surface_to_mni(subject,surfname):
+""" Transform the surface named `surfname` for subject called `subject` into MNI coordinates. Returns [(lpts, lpolys), (rpts, rpolys)].
@@ -168,7 +175,7 @@
Source code for cortex.mni
MNI-transformed surface in same format returned by db.get_surf. """# Get MNI affine transform
- mni_affine=nibabel.load(default_template).get_affine()
+ mni_affine=nibabel.load(default_template).affine# Get subject anatomical-to-MNI transformmni_xfm=np.dot(mni_affine,db.get_mnixfm(subject,"identity"))
@@ -188,9 +195,12 @@
+[docs]
+deftransform_mni_to_subject(subject,xfm,volarray,func_to_mni,template=default_template):
- """
+""" Transform data in `volarray` from MNI space to functional space specified by `xfm`. Parameters
@@ -220,7 +230,7 @@
Source code for cortex.mni
funcspace_nii=tempfile.mktemp(".nii.gz")# Save out relevant things
- affine=nibabel.load(template).get_affine()
+ affine=nibabel.load(template).affinenibabel.save(nibabel.Nifti1Image(volarray,affine),mnispace_func_nii)_save_fsl_xfm(mni_to_func_xfm,np.linalg.inv(func_to_mni))
@@ -234,6 +244,7 @@
[docs]classDistortion(object):
- """Used to compute distortion metrics between fiducial and another (e.g. flat)
+
+[docs]
+classDistortion(object):
+"""Used to compute distortion metrics between fiducial and another (e.g. flat) surface. Parameters
@@ -52,14 +53,17 @@
Source code for cortex.polyutils.distortion
polys : 2D ndarray, shape (total_polys, 3) Triangle vertex indices in both `flat` and `ref`. """
-
+
@propertydefareal(self):
- """Compute areal distortion of the flatmap.
+"""Compute areal distortion of the flatmap. Areal distortion is calculated at each triangle as the log2 ratio of the triangle area in the flatmap to the area in the reference surface.
@@ -98,7 +102,7 @@
Source code for cortex.polyutils.distortion
@propertydefmetric(self):
- """Compute metric distortion of the flatmap.
+"""Compute metric distortion of the flatmap. Metric distortion is calculated as the difference in squared distance from each vertex to its neighbors between the flatmap and the reference.
@@ -136,6 +140,7 @@
[docs]classSurface(exact_geodesic.ExactGeodesicMixin,subsurface.SubsurfaceMixin):
- """Represents a single cortical hemisphere surface. Can be the white matter surface,
+
+[docs]
+classSurface(exact_geodesic.ExactGeodesicMixin,subsurface.SubsurfaceMixin):
+"""Represents a single cortical hemisphere surface. Can be the white matter surface, pial surface, fiducial (mid-cortical) surface, inflated surface, flattened surface, etc.
@@ -64,7 +65,9 @@
Source code for cortex.polyutils.surface
polys : 2D ndarray, shape (total_polys, 3) Indices of the vertices in each triangle in the surface. """
-
+
@property@_memodefppts(self):
- """3D matrix of points in each face: n faces x 3 points per face x 3 coords per point.
+"""3D matrix of points in each face: n faces x 3 points per face x 3 coords per point. """returnself.pts[self.polys]@property@_memodefconnected(self):
- """Sparse matrix of vertex-face associations.
+"""Sparse matrix of vertex-face associations. """npt=len(self.pts)npoly=len(self.polys)
@@ -93,7 +97,7 @@
@property@_memodefface_normals(self):
- """Normal vector for each face.
+"""Normal vector for each face. """# Compute normal vector directionnnfnorms=np.cross(self.ppts[:,1]-self.ppts[:,0],
@@ -122,7 +126,7 @@
Source code for cortex.polyutils.surface
@property@_memodefvertex_normals(self):
- """Normal vector for each vertex (average of normals for neighboring faces).
+"""Normal vector for each vertex (average of normals for neighboring faces). """# Average adjacent face normalsnnvnorms=np.nan_to_num(self.connected.dot(self.face_normals)/self.connected.sum(1)).A
@@ -132,7 +136,7 @@
Source code for cortex.polyutils.surface
@property@_memodefface_areas(self):
- """Area of each face.
+"""Area of each face. """# Compute normal vector (length is face area)nnfnorms=np.cross(self.ppts[:,1]-self.ppts[:,0],
@@ -143,7 +147,7 @@
Source code for cortex.polyutils.surface
@property@_memodefcotangent_weights(self):
- """Cotangent of angle opposite each vertex in each face.
+"""Cotangent of angle opposite each vertex in each face. """ppts=self.pptscots1=((ppts[:,1]-ppts[:,0])*
@@ -165,7 +169,7 @@