From b805f583fdb5e11f30577df28491ba7e6a3769a7 Mon Sep 17 00:00:00 2001 From: sebnaze Date: Fri, 9 Oct 2020 23:39:13 -0400 Subject: [PATCH 1/2] prepared for pull request to ins-amu --- CH_pipeline_integrated.m | 1224 ++++++++++++++++++++++++++ README.md | 28 +- config_CH.sh | 21 + example_config.sh | 21 +- main_CH.sh | 21 + main_surface.sh | 210 ++++- util/check_region_mapping_white.py | 165 ++++ util/correct_region_mapping_white.py | 33 + util/extract_white_high.py | 29 + util/fs_region.txt | 89 ++ util/lh_ref_table.txt | 46 + util/region_mapping_white.py | 113 +++ util/rh_ref_table.txt | 46 + 13 files changed, 1986 insertions(+), 60 deletions(-) create mode 100644 CH_pipeline_integrated.m create mode 100755 config_CH.sh create mode 100755 main_CH.sh create mode 100644 util/check_region_mapping_white.py create mode 100644 util/correct_region_mapping_white.py create mode 100644 util/extract_white_high.py create mode 100644 util/fs_region.txt create mode 100644 util/lh_ref_table.txt create mode 100644 util/region_mapping_white.py create mode 100644 util/rh_ref_table.txt diff --git a/CH_pipeline_integrated.m b/CH_pipeline_integrated.m new file mode 100644 index 0000000..f0248c7 --- /dev/null +++ b/CH_pipeline_integrated.m @@ -0,0 +1,1224 @@ +% Script to compute connectome harmonics +% Processing steps: +% 1) Import cortical surface surface mesh +% 2) Import tracks +% 3) Compute high-resolution connectome +% 4) Compute harmonics +% 5) Compute mutual information with default mode network +% +% Author: Sebastien Naze +% +% Dependencies (none are included in repo, those with + sign must be added in matlab_utils/): +% - Gibbs Connectomne (https://www.nitrc.org/frs/?group_id=987) +% - Freesurfer cortical surface templates (https://surfer.nmr.mgh.harvard.edu/fswiki/DownloadAndInstall) +% + and surface readers +% + Toolbox graph (https://www.mathworks.com/matlabcentral/fileexchange/5355-toolbox-graph) +% + CBrewer colormaps (https://www.mathworks.com/matlabcentral/fileexchange/34087-cbrewer---colorbrewer-schemes-for-matlab) +% + Export_fig (https://www.mathworks.com/matlabcentral/fileexchange/23629-export-fig) +% + TriangleRayIntersection (https://www.mathworks.com/matlabcentral/fileexchange/33073-triangle-ray-intersection) +% + SmoothPatch (https://www.mathworks.com/matlabcentral/fileexchange/26710-smooth-triangulated-mesh) +% + Subtightplot (https://www.mathworks.com/matlabcentral/fileexchange/39664-subtightplot) + +% global parameters +global FS PRD SUBJ_ID +FS = getenv('SUBJECTS_DIR'); +PRD = getenv('PRD'); +SUBJ_ID = getenv('SUBJ_ID'); +nWorkers = str2num(getenv('NWORKERS')); +SCRIPTS_DIR = getenv('SCRIPTS_DIR'); + +% path to dependencies +addpath(genpath(fullfile(SCRIPTS_DIR, 'matlab_utils'))); + +% Flags for cortical surface template mesh to use (if using template) +CVS_Avg35_inMNI152 = 0; +fsaverage4 = 0; +fsaverage5 = 0; +fsaverage6 = 0; + +% Flag for subject-specific cortical surface +indiv_surf = 1; + +% Flag for using pial or white matter surface (default: WM) +pial = 0; + +% Flags for cortical surface mesh smoothing +smoothIter = 0; % <-- keep at 0, not available in public realease. Contact author. + +% Flags for tractography +Yeh2018_dsistudio = 0; +GibbsConnectome = 0; +indiv_tracks = 1; +reload_tracks = 1; + +% Flags for connectome +compute_connectome = 1; +save_connectome = 1; +plot_connectome = 0; +plot_tckLengths = 0; +compute_tck_stats = 0; +plot_tck_stats = 0; +plot_connectomeStats = 0; % <-- needs add-on, contact author + +% Parameters for connectome +nsamples = 0; % 0: all tracks, otherwise nbr of tracks +sz_bound = 3; % parameter k_s in paper +ext_scaling = 2; % length of the track extension in k_s unit +minTckLength = 10; % ~ 1mm per unit, depending on tractography algo + +% Flags and parameters for connectome harmonics +compute_harmonics = 1; +reload_connectome = 0; +nHarmonics = 100; % Number of harmonics to compute +threshold_adjacency = -99; % z_C in paper (use negative value << 0 to include all tracks) +trim_adjacency = 0; % trimming +rand_trim_type = 'Ascend'; +tckLen_trim_adjacency = 0; +tckLen_trim_type = ''; +local_spread = 1; % nbr of neighbooring vertices for local diffusion on mesh +anisotropy = 0; % percentage of local connectivity trimming (max 1) + dist_based_local_trim = 0; % distance based anisotropy, set to 1 to perform + AniSorting = 'Descend'; % remove connections by ascending or descending order of length +callosectomy = 0; % percentage of inter-hemispheric connections trimmed (max 1) + n_graph_chunks = 1; % number of subgraphs to perform the eigendecomposition on (set to 2 if disconnected hemispheres) + dist_based_callo_trim = 1; % distance-based callosectomy + CalloSorting = 'Ascend'; % remove connections by ascending or descending order of length +separate_hemispheres = 0; % set to 1 if disconnected hemispheres so that both are shown when projected on cortical surface +plot_CC_stats = 0; +rand_graph = [0 0 0]; % Flag for randomization type [interHemis, intraHemis, global]] +randomize_graph_CC_only = rand_graph(1); +randomize_graph_intraHemi_only = rand_graph(2); +randomize_graph_global = rand_graph(3); +rand_proba = 1; % for performing gradual randomization (default:1) +iter_rand = 0; % for numbering surrogates +sigma_eigs = 10^-9; % param of eigendec, check help eig +tol_eigs = 10^-9; % " +plot_graph = 0; % set to 1 to plot graph illustration +plot_harmonics = 0; % set to 1 to show harmonics projected one by one on cortical surface (need plotNFsurf add-on, contact author) +nPlottedHarmonics = 20; % nbr of harmonics to visualize + +% Flags and params for MI DMN +MI_DMN = 1; +RSN='DMN'; +RSN_Yeo2011 = 0; +reload_harmonics = 0; +nBinsMI = [16]; % always keep 1 first (if v3 wanted) and ascending order +thZscMI = 1; +MI_ylim = [0 0.3]; +save_MI = 1; + +% Flags and params for MI DSK +convert_into_DSK_atlas = 1; % /!\ (make sure to reload harmonics if want to load combined) +plot_converted_harmonics = 0; +re_save_combined = 1; + +% Global flags for visualization and savings +vis = 'off'; +plot_DMN_mask = 0; % <-- needs plotNFsurf add-on +save_combined = 1; +save_figs = 1; +close_figs = 1; + +% miscallenous parameters +seed_rng = 'shuffle'; %12345; % replace by 'shuffle' to get seed based on current time +rng(seed_rng); +%nWorkers = 4; % nbr of process for parallel computations (needs parallel processing toolbox) - commented because given as environment variable +nz = ceil(nsamples/nWorkers/2); +subj_type = 'HCP'; % subject type, only used for colormaps. HCP: Blue-White-Red; HD: Purple-White-Green + +if randomize_graph_global + rand_type = strcat('_randGlobal', num2str(iter_rand)); +elseif randomize_graph_CC_only + rand_type = strcat('_randCC', num2str(iter_rand)); + if randomize_graph_intraHemi_only + rand_type = strcat('_randIntraAndCC', num2str(iter_rand)); + end +elseif randomize_graph_intraHemi_only + rand_type = strcat('_randIntra', num2str(iter_rand)); +else + if iter_rand>0 + rand_type = strcat('_rand', num2str(iter_rand)); + else + rand_type = ''; + end +end + +if rand_proba==1 + rnd_proba = ''; +else + rnd_proba = strcat('_rndP', num2str(rand_proba*100)); +end + +system(strcat("mkdir -p ", PRD, '/connectivity/img_', SUBJ_ID)); + +if trim_adjacency > 0 + trimAdj_prefix = strcat('_randTrim', rand_trim_type); + trimAdj_all = strcat(trimAdj_prefix, num2str(trim_adjacency*100)); +elseif tckLen_trim_adjacency > 0 + trimAdj_prefix = strcat('_tckLenTrim', tckLen_trim_type); + trimAdj_all = strcat(trimAdj_prefix, num2str(tckLen_trim_adjacency*100)); +else + trimAdj_prefix = ''; + trimAdj_all = ''; +end + +if RSN_Yeo2011 + rsn_type = strcat(RSN, '_Yeo2011_'); +else + rsn_type = strcat(RSN, '_rsn_'); +end + +if n_graph_chunks > 1 + callo_suffix = strcat('_nchunks', num2str(n_graph_chunks)); +else + callo_suffix = ''; +end + +if callosectomy == 1 + %separate_hemispheres = 1; +end + +if separate_hemispheres + sep_hemi='_sepHemiCombi'; +else + sep_hemi=''; +end + +if dist_based_local_trim + distAni = strcat('DistBased', AniSorting(1:3)); +else + distAni = ''; +end + +if dist_based_callo_trim + distCallo = CalloSorting(1:3); +else + distCallo = ''; +end + +%% IMPORT SURFACE %% +if CVS_Avg35_inMNI152 + SUBJ_ID = 'copy_cvs_avg35_inMNI152'; + PRD = strcat(FS, SUBJ_ID); + surf_type = 'cvsAvgInMNI152'; + + if pial + surf_type = strcat(surf_type, '_pial'); + vertices.left = load(strcat(PRD,'/surface/lh_vertices_low.txt')); + vertices.right = load(strcat(PRD,'/surface/rh_vertices_low.txt')); + + faces.left = load(strcat(PRD,'/surface/lh_triangles_low.txt'))+1; % +1 because matlab indexing starts at 1 + faces.right = load(strcat(PRD,'/surface/rh_triangles_low.txt'))+1; % +1 because matlab indexing starts at 1 + + vertices.all = [vertices.left; vertices.right]; + faces.all = [faces.left; faces.right + size(vertices.left,1)]; + + % read white matter region mapping + wm_regmap_lh = load(strcat(PRD, '/surface/lh_region_mapping_low.txt')); + wm_regmap_rh = load(strcat(PRD, '/surface/rh_region_mapping_low.txt')); + wm_regmap = [wm_regmap_lh; wm_regmap_rh]; + + % considers smoothed surface only from white + elseif ~smoothIter + % read white matter surface + vertices.left = load(strcat(PRD,'/surface/lh_white_vertices_low.txt')); + vertices.right = load(strcat(PRD,'/surface/rh_white_vertices_low.txt')); + faces.left = load(strcat(PRD,'/surface/lh_white_triangles_low.txt'))+1; % +1 because matlab indexing starts at 1 + faces.right = load(strcat(PRD,'/surface/rh_white_triangles_low.txt'))+1; % +1 because matlab indexing starts at 1 + vertices.all = [vertices.left; vertices.right]; + faces.all = [faces.left; faces.right + size(vertices.left,1)]; + + % read white matter region mapping + wm_regmap_lh = load(strcat(PRD, '/surface/lh_white_region_mapping_low.txt')); + wm_regmap_rh = load(strcat(PRD, '/surface/rh_white_region_mapping_low.txt')); + wm_regmap = [wm_regmap_lh; wm_regmap_rh]; + else + % import surface + surf_path = fullfile(PRD, 'surface', strcat('smoothSurf_iter', num2str(smoothIter))); + vertices.left = dlmread(strcat(surf_path, '_lh_vertices.txt'), ' '); + vertices.right = dlmread(strcat(surf_path, '_rh_vertices.txt'), ' '); + %vertices.all = [vertices.left; vertices.right]; + faces.left = dlmread(strcat(surf_path, '_lh_faces.txt'), ' '); + faces.right = dlmread(strcat(surf_path, '_rh_faces.txt'), ' '); + vertices.all = [vertices.left; vertices.right]; + faces.all = [faces.left; faces.right + size(vertices.left,1)]; + + end + +elseif (fsaverage4 || fsaverage5 || fsaverage6) + if fsaverage4 + SUBJ_ID ='copy_fsaverage4'; + surf_type = 'fsaverage4'; + disp('Importing fsaverage4 cortical surface...'); + elseif fsaverage5 + SUBJ_ID ='copy_fsaverage5'; + surf_type = 'fsaverage5'; + disp('Importing fsaverage5 cortical surface...'); + elseif fsaverage6 + SUBJ_ID ='copy_fsaverage6'; + surf_type = 'fsaverage6'; + disp('Importing fsaverage5 cortical surface...'); + end + PRD = strcat(FS, SUBJ_ID); + + if pial + [vertices.left, faces.left] = freesurfer_read_surf(strcat(PRD, '/surf/lh.pial')); + [vertices.right, faces.right] = freesurfer_read_surf(strcat(PRD, '/surf/rh.pial')); + surf_type = strcat(surf_type, '_pial'); + else + [vertices.left, faces.left] = freesurfer_read_surf(strcat(PRD, '/surf/lh.white')); + [vertices.right, faces.right] = freesurfer_read_surf(strcat(PRD, '/surf/rh.white')); + surf_type = strcat(surf_type, '_white'); + end + + vertices.all = [vertices.left; vertices.right]; + faces.all = [faces.left; faces.right + size(vertices.left,1)]; + + [v.left, l.left, c.left] = read_annotation(strcat(FS, SUBJ_ID,'/label/lh.aparc.annot')); + [v.right, l.right, c.right] = read_annotation(strcat(FS, SUBJ_ID,'/label/rh.aparc.annot')); + lhreftable = load(strcat(SCRIPTS_DIR, 'util/lh_ref_table.txt', '-ascii')); + rhreftable = load(strcat(SCRIPTS_DIR, 'util/rh_ref_table.txt', '-ascii')); + + % convert region mapping from fs to scripts + wm_regmap_lh = l.left; + for i = 1:numel(wm_regmap_lh) + ref_idx = find(lhreftable(:,6) == wm_regmap_lh(i)); + if ~isempty(lhreftable(ref_idx,5)) + wm_regmap_lh(i) = lhreftable(ref_idx,5); + else + wm_regmap_lh(i) = 0; + end + end + wm_regmap_rh = l.right; + for i = 1:numel(wm_regmap_rh) + ref_idx = find(rhreftable(:,6) == wm_regmap_rh(i)); + if ~isempty(lhreftable(ref_idx,5)) + wm_regmap_rh(i) = rhreftable(ref_idx,5); + else + wm_regmap_rh(i) = 0; + end + end + wm_regmap = [wm_regmap_lh; wm_regmap_rh]; + + % Correct for temporal poles + temp = zeros(size(vertices.all,1),1); + unknown_idx = find(wm_regmap==0); + tpoles_idx = unknown_idx(vertices.all(unknown_idx,3)<-15); + wm_regmap(tpoles_idx) = 0; + lh_tpoles_idx = tpoles_idx(tpoles_idx <= size(vertices.left,1)); + rh_tpoles_idx = tpoles_idx(tpoles_idx > size(vertices.left,1)); + wm_regmap(lh_tpoles_idx) = 42; + wm_regmap(rh_tpoles_idx) = 85; + + +elseif indiv_surf + surf_type = SUBJ_ID; + if pial + vertices.left = dlmread(strcat(PRD, '/surface/lh_vertices_low.txt'), ' '); + vertices.right = dlmread(strcat(PRD, '/surface/rh_vertices_low.txt'), ' '); + vertices.all = [vertices.left; vertices.right]; + faces.left = load(strcat(PRD,'/surface/lh_triangles_low.txt'))+1; % +1 because matlab indexing starts at 1 + faces.right = load(strcat(PRD,'/surface/rh_triangles_low.txt'))+1; % +1 because matlab indexing starts at 1 + faces.all = [faces.left; faces.right + size(vertices.left,1)]; + + % read white matter region mapping + wm_regmap_lh = load(strcat(PRD, '/surface/lh_region_mapping_low.txt')); + wm_regmap_rh = load(strcat(PRD, '/surface/rh_region_mapping_low.txt')); + wm_regmap = [wm_regmap_lh; wm_regmap_rh]; + else + vertices.left = dlmread(strcat(PRD, '/surface/lh_white_vertices_low.txt'), ' '); + vertices.right = dlmread(strcat(PRD, '/surface/rh_white_vertices_low.txt'), ' '); + vertices.all = [vertices.left; vertices.right]; + faces.left = load(strcat(PRD,'/surface/lh_white_triangles_low.txt'))+1; % +1 because matlab indexing starts at 1 + faces.right = load(strcat(PRD,'/surface/rh_white_triangles_low.txt'))+1; % +1 because matlab indexing starts at 1 + faces.all = [faces.left; faces.right + size(vertices.left,1)]; + + % read white matter region mapping + wm_regmap_lh = load(strcat(PRD, '/surface/lh_white_region_mapping_low.txt')); + wm_regmap_rh = load(strcat(PRD, '/surface/rh_white_region_mapping_low.txt')); + wm_regmap = [wm_regmap_lh; wm_regmap_rh]; + end +end + + +%%%%%%%%%%%%%%%%%%% +%% IMPORT TRACKS %% +%%%%%%%%%%%%%%%%%%% +if GibbsConnectome + connectome_type = 'Gibbs'; + % import tracks + if reload_tracks + load('/your/path/to/groupconnectome_169_horn2015.mat'); % to download, see HEADER + ntcks = numel(fibs); + end + +elseif indiv_tracks + connectome_type = SUBJ_ID; + + brain_info = niftiinfo(strcat(PRD, '/connectivity/brain.nii.gz')); + M_vox2mm = brain_info.Transform.T(1:3, 1:3); + A_vox2mm = brain_info.Transform.T(end, 1:3); + + M_RAS2LAS = [[-1,0,0]; [0,1,0]; [0,0,1]]; + A_RAS2LAS = [brain_info.ImageSize(1), 0, 0]; + + system(strcat("mkdir -p ", PRD, '/connectivity/tmp_ascii_tck')); + + if (reload_tracks || (compute_connectome && ~exist('fibs', 'var'))) + files = dir(strcat(PRD, '/connectivity/tmp_ascii_tck/*.txt')); + ntcks = numel(files); + tck_idx = 1:ntcks; + tck_idx = tck_idx(randperm(numel(tck_idx))); + fibs = {}; + if ~nsamples + nsamples = ntcks; + else + ntcks = nsamples; + end + tckidx = 1; + for i = 1:nsamples + tck_i = load(strcat(PRD, '/connectivity/tmp_ascii_tck/',files(i).name)); + if size(tck_i, 1) >= minTckLength + fibs{tckidx} = (((tck_i * M_RAS2LAS) + A_RAS2LAS) * M_vox2mm) + A_vox2mm; + tckidx = tckidx + 1; + end + if ~mod(i,10000) + fprintf('%i percent of tracks imported \n\r', round(i*100/nsamples)); + end + end + ntcks = tckidx-1; + if nsamples > ntcks + nsamples = ntcks; + end + else + ntcks = nsamples + end + +end + + +% update suffix for saving files +save_suffix = strcat('_', surf_type, '_', connectome_type, '_nsamples', num2str(nsamples), '_smoothIter', num2str(smoothIter), '_szBnd', num2str(sz_bound), '_extScaling', num2str(ext_scaling), '_minTckLen', num2str(minTckLength)); + +%%%%%%%%%%%%%%%%%%%%%%%% +%% Compute connectome %% +%%%%%%%%%%%%%%%%%%%%%%%% +if compute_connectome + + % tracks stats + tck_idx = 1:ntcks; + tck_idx = tck_idx(randperm(numel(tck_idx))); + + % tracks to parse + if nsamples + tck_idx = tck_idx(1:nsamples); + end + + % setting up parallel pools + nTcksPerWorker = floor(nsamples/nWorkers); + workersIdx = 0:nTcksPerWorker:nsamples; + parpool(nWorkers); + dfibs = distributed(fibs(tck_idx)); + spmd + % allocate memory of individual workers + localConnectome = sparse([], [], [], size(vertices.all, 1), size(vertices.all, 1), nz); + local_tckLengths_nb = sparse([], [], [], size(vertices.all, 1), size(vertices.all, 1), nz); + + % tracks stats + if compute_tck_stats + n_isects_w = []; % number of intersections with cortical surface mesh + stepSz_w = []; % distance between 2 consecutive pts + tckLengths_mm_w = []; + end + + own_fibs = getLocalPart(dfibs); + for i = 1:numel(own_fibs) + tck = own_fibs{i}; + + % display number of tracks processed + if ~mod(i,1000) + fprintf('%i tcks done out of %i for worker %i \n', i, nTcksPerWorker, labindex); + end + + % if track long enough for processing, proceed + if ( (size(tck,1) > sz_bound*2) && (size(tck,1) > minTckLength) ) + % first bound + bound1 = tck(1:sz_bound,:); + b1vec = ext_scaling * (bound1(end,:) - bound1(1,:)); + [isect1, ~, ~, ~, xcoords1] = TriangleRayIntersection(bound1(end,:), bound1(end,:) + b1vec, vertices.all(faces.all(:,1),:), vertices.all(faces.all(:,2),:), vertices.all(faces.all(:,3),:), 'lineType', 'segment'); + + if compute_tck_stats + n_isects_w = [sum(isect1), n_isects_w]; + end + + % check second bound only if first bound is valid + if sum(isect1) > 0 + % if several crossings, take the one closest to the beginning of the bound segment + if sum(isect1) > 1 + dists1 = pdist2(xcoords1, bound1(end,:)); + [minVal minIdx] = min(dists1); + xcoord1 = xcoords1(minIdx,:); + isect1_face_idx = minIdx; + else + isect1_face_idx = find(isect1); + xcoord1 = xcoords1(isect1_face_idx, :); + end + + % second bound + bound2 = tck(end-sz_bound:end,:); + b2vec = ext_scaling * (bound2(1,:) - bound2(end,:)); + [isect2, ~, ~, ~, xcoords2] = TriangleRayIntersection(bound2(1,:), bound2(1,:) + b2vec, vertices.all(faces.all(:,1),:), vertices.all(faces.all(:,2),:), vertices.all(faces.all(:,3),:), 'lineType', 'segment'); + + if compute_tck_stats + n_isects_w = [sum(isect2), n_isects_w]; + end + + if sum(isect2) > 0 + % if several crossings, take the one closest to the beginning of the bound segment + if sum(isect2) > 1 + dists2 = pdist2(xcoords2, bound2(1,:)); + [minVal minIdx] = min(dists2); + xcoord2 = xcoords2(minIdx,:); + isect2_face_idx = minIdx; + else + isect2_face_idx = find(isect2); + xcoord2 = xcoords2(isect2_face_idx, :); + end + + % get for each xcoords the corresponding closest vertex from the intersecting faces + dists1 = pdist2(xcoord1, vertices.all(faces.all(isect1_face_idx,:),:)); + [minDist1 minIdx1] = min(dists1); + dists2 = pdist2(xcoord2, vertices.all(faces.all(isect2_face_idx,:),:)); + [minDist2 minIdx2] = min(dists2); + + A = faces.all(isect1_face_idx,minIdx1); + B = faces.all(isect2_face_idx,minIdx2); + + % add connection to connectome + localConnectome(A,B) = localConnectome(A, B) + 1; + + % add nbr of step to tckLengths.. + local_tckLengths_nb(A,B) = local_tckLengths_nb(A,B) + size(tck,1); + + % add step size and track length to stats + if compute_tck_stats + d = diff(tck); + steps = sqrt(sum(d.*d,2)); + stepSz_w = [stepSz_w, steps']; + tckLengths_mm_w = [sum(steps), tckLengths_mm_w]; + end + end + end + end + end + end + + % global memory allocation + connectome = sparse(size(vertices.all, 1), size(vertices.all, 1)); + tckLengths_nb = sparse(size(vertices.all, 1), size(vertices.all, 1)); + n_isects = []; % number of intersections with cortical surface mesh + stepSz = []; % distance between 2 consecutive pts + tckLengths_mm = []; + + for ii = 1:nWorkers + connectome = connectome + localConnectome{ii}; + tckLengths_nb = tckLengths_nb + local_tckLengths_nb{ii}; + if compute_tck_stats + n_isects = [n_isects, n_isects_w{ii}]; + stepSz = [stepSz, stepSz_w{ii}]; + tckLengths_mm = [tckLengths_mm, tckLengths_mm_w{ii}]; + end + end + delete(gcp); + + tckLengths_nb = (tckLengths_nb + tckLengths_nb') ./ (connectome + connectome'); + tckLengths_nb(isnan(tckLengths_nb)) = 0; + + if save_connectome + save(strcat(PRD, '/connectivity/highRes_connectome', save_suffix, '.mat'), 'connectome'); + save(strcat(PRD, '/connectivity/highRes_tckSize', save_suffix, '.mat'), 'tckLengths_nb'); + end + + if plot_connectome + connectome_fig = figure('Position', [600 600 1200 600], 'Visible', vis); + subplot(1,2,1); + spy(connectome + connectome'); + title('Connectome'); + + subplot(1,2,2); + imagesc(tckLengths_nb); + title('Track lengths (nbr of segments)'); + + if save_figs + fname = strcat('highRes_connectivityMatrix_', connectome_type, '_smoothIt', num2str(smoothIter), '_nsamples', num2str(nsamples), '_szBnd', num2str(sz_bound), '_extScaling', num2str(ext_scaling)); + pause(2) + export_fig(strcat(PRD, '/connectivity/img_',SUBJ_ID,'/', fname), '-png', '-transparent'); + pause(2); + export_fig(strcat(PRD, '/connectivity/img_',SUBJ_ID,'/', fname), '-eps'); + pause(2) + display(strcat(fname, ' figure exported')); + if close_figs + close(connectome_fig); + end + end + end + + if plot_tckLengths + CC_mask = ones(size(vertices.all,1), size(vertices.all,1)); + CC_mask(1:size(vertices.left,1), 1:size(vertices.left,1)) = zeros(size(vertices.left,1), size(vertices.left,1)); + CC_mask(size(vertices.left,1)+1:end, size(vertices.left,1)+1:end) = zeros(size(vertices.right,1), size(vertices.right,1)); + CC_tckLengths_nb = (tckLengths_nb .* CC_mask); + + tckLengths_fig = figure('Position', [600 600 1200 600], 'Visible', vis); + subplot(1,2,1); + histogram(tckLengths_nb(tckLengths_nb~=0)); + xlabel('track length (nbr of segments)'); ylabel('counts'); + title('Whole brain'); + + subplot(1,2,2); + histogram(CC_tckLengths_nb(CC_tckLengths_nb~=0)); + xlabel('track length (nbr of segments)'); ylabel('counts'); + title('Inter-hemispheric'); + + if save_figs + fname = strcat('tckLengths', save_suffix); + pause(2) + export_fig(strcat(PRD, '/connectivity/img_',SUBJ_ID,'/', fname), '-png', '-transparent'); + pause(2); + export_fig(strcat(PRD, '/connectivity/img_',SUBJ_ID,'/', fname), '-eps'); + pause(2) + display(strcat(fname, ' figures exported')); + if close_figs + close(tckLengths_fig); + end + end + end + + if plot_tck_stats + tckStats_fig = figure('Position', [600 1200 1200 400]); + + subplot(1,3,1); + histogram(n_isects, -0.5:1:9.5, 'Normalization', 'Probability'); + xlabel('nbr of intersection per bound'); ylabel('Probability'); + + subplot(1,3,2); + histogram(stepSz, 0:0.1:5, 'Normalization', 'Probability'); + xlabel('Step size (mm)'); ylabel('Probability'); + + subplot(1,3,3); + histogram(tckLengths_mm, 50, 'Normalization', 'Probability'); + xlabel('Track length (mm)'); ylabel('Probability'); + + if save_figs + fname = strcat('trackStats_smoothIt', num2str(smoothIter), '_nsamples', num2str(nsamples), '_szBnd', num2str(sz_bound), '_extScaling', num2str(ext_scaling)); + pause(2) + export_fig(strcat(PRD, '/connectivity/img_',SUBJ_ID,'/', fname), '-png', '-transparent'); + pause(2); + saveas(tckStats_fig, strcat(PRD, '/connectivity/img_',SUBJ_ID,'/', fname), 'svg'); pause(2); + display(strcat(fname, ' figure exported')); + if close_figs + close(tckStats_fig); + end + end + end +end + +% update suffix for saving files +load_suffix = strcat('_', surf_type, '_', connectome_type, '_nsamples', num2str(nsamples), '_smoothIter', num2str(smoothIter), '_szBnd', num2str(sz_bound), '_extScaling', num2str(ext_scaling), '_minTckLen', num2str(minTckLength)); +save_suffix = strcat('_', surf_type, '_', connectome_type, '_nsamples', num2str(nsamples), '_smoothIter', num2str(smoothIter), '_szBnd', num2str(sz_bound), '_extScaling', num2str(ext_scaling), '_minTckLen', num2str(minTckLength),'_10taZsc', num2str(threshold_adjacency*10), trimAdj_all, '_localSpread', num2str(local_spread), '_anisotropy', distAni, num2str(anisotropy*100), '_callosectomy', num2str(callosectomy*100), distCallo, callo_suffix, rand_type, rnd_proba, sep_hemi); %, '_sigmaEigs', num2str(sigma_eigs), '_tolEigs', num2str(tol_eigs)); + + +%%%%%%%%%%%%%%%%%%%%%%% +%% Compute harmonics %% +%%%%%%%%%%%%%%%%%%%%%%% +if compute_harmonics + if ~exist('connectome', 'var') || reload_connectome + load(strcat(PRD, '/connectivity/highRes_connectome', load_suffix)); + end + + if ~exist('tckLengths_nb', 'var') || reload_connectome + load(strcat(PRD, '/connectivity/highRes_tckSize', load_suffix)); + if ~exist('tckLengths_nb', 'var') + tckLengths_nb = tckLengths_nbr; + clear tckLengths_nbr; + end + tckLengths_nb(isnan(tckLengths_nb)) = 0; + if ~issymmetric(tckLengths_nb) + tckLengths_nb = (tckLengths_nb + tckLengths_nb')./2; + end + end + + if ~exist('wm_regmap', 'var') + % read white matter region mapping + wm_regmap_lh = load(strcat(PRD, '/surface/lh_white_region_mapping_low.txt')); + wm_regmap_rh = load(strcat(PRD, '/surface/rh_white_region_mapping_low.txt')); + wm_regmap = [wm_regmap_lh; wm_regmap_rh]; + end + + + connectome(find(eye(size(connectome)))) = 0; % remove self connections + c_idx = find(connectome); % indices of connections + sigma_c = std(connectome(c_idx)); % std dev of connections weights + mu_c = mean(mean(connectome(c_idx))); % mean of connections weights + + % Adjacency matrix of the connectome + disp('Trimming connectome...'); + % weighted trimming (z-score based on weights) + A_c = zeros(size(connectome)); + if threshold_adjacency + zsc = (connectome(c_idx) - mu_c) / sigma_c; + A_c(c_idx) = zsc > threshold_adjacency; + % semi-weighted trimming (percentage of nbr of connections (not weight), + % starting with smallest weights). + elseif trim_adjacency>0 + A_c = connectome > 0; + ntrim = round(trim_adjacency * numel(c_idx)); + [vals, vals_idx] = sort(connectome(c_idx), rand_trim_type); + connectome(c_idx(vals_idx(1:ntrim))) = 0; + % track lengths based trimming (percentage of nbr of connections (not weights), + % starting with longest fibers). + elseif tckLen_trim_adjacency>0 + ntrim = round(tckLen_trim_adjacency * numel(c_idx)); + [vals, vals_idx] = sort(tckLengths_nb(c_idx), tckLen_trim_type); + connectome(c_idx(vals_idx(1:ntrim))) = 0; + A_c = connectome > 0; + else + A_c = connectome ~= 0; + end + + if ~issymmetric(A_c) + A_c = (A_c + A_c') > 0; + end + + % compute local mesh adjacency matrix + A_local = sparse(size(vertices.all,1),size(vertices.all,1)); + + % local connectivity : immediate neighbours i.e. local spread = 1 + if local_spread > 0 + A_local_lh = triangulation2adjacency(faces.left); + A_local_rh = triangulation2adjacency(faces.right); + A_local(1:size(vertices.left,1), 1:size(vertices.left,1)) = A_local_lh; + A_local(size(vertices.left,1)+1:end, size(vertices.left,1)+1:end) = A_local_rh; + + % neighbours of neighbours i.e. local spread = 2 + if local_spread > 1 + A_local_2 = sparse(size(vertices.all,1),size(vertices.all,1)); + for i = 1:size(A_local,1) + ci_idx = find(A_local(i,:)); + for j = ci_idx + cj_idx = find(A_local(j,:)); + A_local_2(i,cj_idx) = 1; + end + if ~mod(i, 1000) + fprintf('%i%% local spread done. \n\r', round(i*100/size(A_local,1))); + end + end + A_local = (A_local + A_local_2) > 0; + A_local(find(eye(size(A_local)))) = 0; + end + end + + % random trimming of local connectivity + if anisotropy + if dist_based_local_trim + if local_spread==1 + load(fullfile(PRD, 'connectivity', 'R_local_1')); + R_local = R_local_1; + elseif local_spread==2 + load(fullfile(PRD, 'connectivity', 'R_local_2')); + R_local = R_local_2; + else + error('choose local_spread to be 1 or 2.') + end + c_idx_local = find(A_local>0); + [rows, cols] = ind2sub(size(A_local), c_idx_local); + n_lc = numel(c_idx_local); + n_trim = round(n_lc * anisotropy); % total number of connections to trim + local_lengths = R_local(c_idx_local); + [sorted_local_lengths, local_c_idx] = sort(local_lengths, AniSorting); + idx_trimmed = local_c_idx(1:n_trim); + A_local(rows(idx_trimmed), cols(idx_trimmed)) = 0; % remove connections at trimmed indices + A_local(cols(idx_trimmed), rows(idx_trimmed)) = 0; % remove connections at trimmed indices + else + c_idx_local = find(A_local>0); + [rows, cols] = ind2sub(size(A_local), c_idx_local); + n_lc = numel(c_idx_local); % total number of local connections + n_trim = round(n_lc * anisotropy); % total number of connections to trim + idx_trimmed = randperm(n_lc, n_trim); % select randomly indices to be trimmed + + A_local(rows(idx_trimmed), cols(idx_trimmed)) = 0; % remove connections at trimmed indices + A_local(cols(idx_trimmed), rows(idx_trimmed)) = 0; % remove connections at trimmed indices + end + end + + + % create corpus callosum mask to select inter-henispheric connections + CC_mask = ones(size(vertices.all,1), size(vertices.all,1)); + CC_mask(1:size(vertices.left,1), 1:size(vertices.left,1)) = zeros(size(vertices.left,1), size(vertices.left,1)); + CC_mask(size(vertices.left,1)+1:end, size(vertices.left,1)+1:end) = zeros(size(vertices.right,1), size(vertices.right,1)); + + CC_tckLengths_nb = (tckLengths_nb .* A_c .* CC_mask); + + if plot_CC_stats + CC_stats_fig = figure('Position', [800 800 1200 400], 'visible', vis); + subplot(1,3,1); + histogram(CC_tckLengths_nb(CC_tckLengths_nb~=0)); + xlabel('CC tck lengths (nbr of points)'); ylabel('counts'); + title(sprintf('Before callosectomy n = %i', sum(sum(full(CC_tckLengths_nb~=0))))); + end + + % trim interhemispheric connections by descending length + if callosectomy + CC_idx = find(CC_tckLengths_nb>0); + CC_pos = CC_tckLengths_nb(CC_idx); + if dist_based_callo_trim + [sorted_CC_tckLen, sorted_CC_idx] = sort(CC_pos, CalloSorting); + else + sorted_CC_idx = randperm(numel(CC_pos)); % actually not sorted but kept name for compatibility + sorted_CC_tckLen = CC_pos(sorted_CC_idx); + end + + trim_sorted_idx = 1:ceil(callosectomy * numel(sorted_CC_tckLen)); + [CC_i, CC_j] = ind2sub(size(A_c), CC_idx(sorted_CC_idx(trim_sorted_idx))); + A_c(CC_i, CC_j) = 0; + A_c(CC_j, CC_i) = 0; + end + + % Randomizations + if randomize_graph_global + if rand_proba==1 + A_c = randmio_und(A_c, iter_rand); + else + n_rows = size(A_c,1); + n_rand = floor(rand_proba*n_rows); + rand_idx = randperm(n_rows, n_rand); + A_c(rand_idx,rand_idx) = randmio_und(A_c(rand_idx,rand_idx), iter_rand); + end + end + + if randomize_graph_CC_only + A_CC_c = A_c(size(vertices.left)+1:end, 1:size(vertices.left,1)); + A_CC_rand = randmio_dir(A_CC_c, iter_rand); + A_c(size(vertices.left)+1:end, 1:size(vertices.left,1)) = A_CC_rand; + A_c(1:size(vertices.left,1), size(vertices.left)+1:end) = A_CC_rand'; + end + + if randomize_graph_intraHemi_only + A_left = A_c(1:size(vertices.left), 1:size(vertices.left,1)); + A_left_rand = randmio_und(A_left,iter_rand); + A_c(1:size(vertices.left), 1:size(vertices.left,1)) = A_left_rand; + + A_right = A_c(size(vertices.left)+1:end, size(vertices.left,1)+1:end); + A_right_rand = randmio_und(A_right,iter_rand); + A_c(size(vertices.left)+1:end, size(vertices.left,1)+1:end) = A_right_rand; + end + + A = (A_local + A_c) > 0; % combine local and global adjacency matrices + + if plot_CC_stats + CC_tckLengths_nb = (tckLengths_nb .* A_c .* CC_mask); % update with new A_c (callosal trimmed) + subplot(1,3,2); + histogram(CC_tckLengths_nb(CC_tckLengths_nb~=0)); + xlabel('CC tck lengths (nbr of points)'); ylabel('counts'); + title(sprintf('After callosectomy n = %i', sum(sum(full(CC_tckLengths_nb~=0))))); + end + + % degree matrix of adjacency matrix + D_A = degree(graph(A)); + + % remove isolated vertices (D_A==0) and subcortical areas ([...] is the array containing the indices of subcortical areas in the region mapping) - /!\ Only Desikan-Killiany atlas supported + subCtx_arr = ismember(wm_regmap, [0 1 2 3 4 5 6 7 8 9 10 45 46 47 48 49 50 51 52 53]); + ctx_idx = find(~subCtx_arr .* ~(D_A==0)); + A_ctx = zeros(size(A)); + A_ctx(ctx_idx, ctx_idx) = A(ctx_idx, ctx_idx); + + if plot_CC_stats + CC_tckLengths_nb = (tckLengths_nb .* A_ctx .* CC_mask); % update with new A_c (callosal trimmed) + subplot(1,3,3); + histogram(CC_tckLengths_nb(CC_tckLengths_nb~=0)); + xlabel('CC tck lengths (nbr of points)'); ylabel('counts'); + title(sprintf('After callosectomy \n\r and removing subcortical areas, n = %i', sum(sum(full(CC_tckLengths_nb~=0))))); + + % separate figure inter-hemispheric degree projection on cortical surface + CC_deg_fig = plotNFsurf('CC degree', vertices, faces, degree(graph(CC_tckLengths_nb)), subj_type, vis); + + if save_figs + % save degree + fname = strcat('CC_tckLengths_nb_degree', save_suffix); + export_fig(strcat(PRD, '/connectivity/img_',SUBJ_ID,'/', fname), '-png', '-transparent'); + pause(2); + % save distribution of CC tck lengths + fname = strcat(PRD, '/connectivity/img_',SUBJ_ID,'/CC_tckLengths_nb_histo', save_suffix); + pause(1); + saveas(CC_stats_fig, fname, 'png'); + pause(1); + saveas(CC_stats_fig, fname, 'svg'); + pause(2); + end + if close_figs + close(CC_stats_fig); + close(CC_deg_fig); + end + end + + % discard isolated clusters disconnected from main graph connectome + G = graph(A_ctx); + G_bins = conncomp(G); + G_idx = find(G_bins==1); % get node indices of main cluster + + if n_graph_chunks == 2 + % find index of second cluster + ubin = unique(G_bins); + nbin = zeros(numel(ubin),1); + for b = 1:numel(ubin) + nbin(b) = numel(find(G_bins==ubin(b))); + end + [nbins, bin_idx] = sort(nbin, 'descend'); + ubin(bin_idx); + val = ubin(bin_idx(2)); + G_idx2 = find(G_bins==val); + G_idx = cat(2, G_idx, G_idx2); + end + + if plot_graph + g_fig = figure('Position', [600 600 800 800], 'Visible', vis); + plot(graph(A_ctx(G_idx, G_idx))); + if save_figs + fname = strcat('Graph', save_suffix); + pause(2); + export_fig(strcat(PRD, '/connectivity/img_',SUBJ_ID,'/', fname), '-png', '-transparent'); + pause(2); + g_fig.Renderer = 'Painters'; + saveas(g_fig, strcat(PRD, '/connectivity/img_',SUBJ_ID,'/', fname), 'svg'); + close(g_fig); + end + end + + if plot_connectomeStats + plotConnectomeStats(vertices, faces, connectome, A_c, save_figs, threshold_adjacency, PRD, SUBJ_ID, subj_type, save_suffix, vis); + end + + % extract final degree matrix + D = diag(degree(graph(A_ctx))); + + % compute laplacian + L = 0.5 * ( (D - A_ctx)' + (D - A_ctx) ); + + % perform eigen decomposition + opts.tol = tol_eigs; + opts.issym = 1; + opts.disp = 1; + [evecs, evals] = eigs(L(G_idx, G_idx), nHarmonics, sigma_eigs, opts); + + % reconstruct harmonics in entire surface mesh + H = zeros(size(connectome,1), nHarmonics); + H(G_idx, :) = evecs; + + % case of harmonics computed for each hemisphere separately + if separate_hemispheres + % find index of second cluster + ubin = unique(G_bins); + nbin = zeros(numel(ubin),1); + for b = 1:numel(ubin) + nbin(b) = numel(find(G_bins==ubin(b))); + end + [nbins, bin_idx] = sort(nbin, 'descend'); + ubin(bin_idx); + val = ubin(bin_idx(2)); + + % compute eigenvector-eigenvalue pair of second cluster (i.e. 2nd hemisphere) + G_idx2 = find(G_bins==val); + [evecs2, evals2] = eigs(L(G_idx2, G_idx2), nHarmonics, sigma_eigs, opts); + + % create harmonics by assembling pairs or eigenvectors using same and different polarity + H = zeros(size(connectome,1), nHarmonics); + H(G_idx, 1:2:nHarmonics) = evecs(:,1:(nHarmonics/2)); + H(G_idx, 2:2:nHarmonics) = -evecs(:,1:(nHarmonics/2)); + H(G_idx2,1:2:nHarmonics) = evecs2(:,1:(nHarmonics/2)); + H(G_idx2,2:2:nHarmonics) = evecs2(:,1:(nHarmonics/2)); + end + + % save everything needed for plotting and re-use + combined = struct('vertices', vertices, 'faces', faces, 'L', sparse(L), 'D_A', D_A, 'eigvals', evals, 'H', H, ... + 'local_vs_global_ratio', sum(sum(A_local(ctx_idx,ctx_idx))) / sum(sum(A_ctx)), 'A_ctx', sparse(A_ctx), 'A_local', sparse(A_local), 'D', sparse(D), 'ta_zsc', threshold_adjacency, ... + 'mu_cc', mu_c, 'sigma_cc', sigma_c, 'ta_w', threshold_adjacency*sigma_c+mu_c); + if save_combined + save(strcat(PRD, '/connectivity/combined_harmonics', save_suffix, '.mat'), '-struct', 'combined'); + end + + % plot eigenvalues + fig = figure('Visible', 'off'); bar(diag(evals)); title('Eigenvalues'); + if save_figs + saveas(fig, strcat(PRD, '/connectivity/img_',SUBJ_ID,'/eigenvalues', save_suffix), 'svg'); + end + close(fig); + + % plot harmonics + if plot_harmonics + for h=1:nPlottedHarmonics + fname = strcat('H', num2str(h, '%02i'), save_suffix); + hrmnc = plotNFsurf(fname, combined.vertices, combined.faces, combined.H(:,h), subj_type, vis); + if save_figs + pause(2); + export_fig(strcat(PRD, '/connectivity/img_',SUBJ_ID,'/', fname), '-png', '-transparent'); + pause(2); + close(hrmnc); + display(strcat(fname, ' image exported')); + end + end + end + +end + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +%% Convert High Resolution Harmonics into atlas space %% +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +if convert_into_DSK_atlas + fprintf('Converting Harmonics from mesh resolution to DSK atlas... \n\r'); + load_suffix = strcat('_', surf_type, '_', connectome_type, '_nsamples', num2str(nsamples), '_smoothIter', num2str(smoothIter), '_szBnd', num2str(sz_bound), '_extScaling', num2str(ext_scaling), '_10taZsc', num2str(threshold_adjacency*10), trimAdj_all, '_localSpread', num2str(local_spread), '_anisotropy', distAni, num2str(anisotropy*100), '_callosectomy', num2str(callosectomy*100), distCallo, callo_suffix, rand_type, rnd_proba, sep_hemi); %, '_sigmaEigs', num2str(sigma_eigs), '_tolEigs', num2str(tol_eigs)); + save_suffix = strcat('_', surf_type, '_', connectome_type, '_nsamples', num2str(nsamples), '_smoothIter', num2str(smoothIter), '_szBnd', num2str(sz_bound), '_extScaling', num2str(ext_scaling), '_10taZsc', num2str(threshold_adjacency*10), trimAdj_all, '_localSpread', num2str(local_spread), '_anisotropy', distAni, num2str(anisotropy*100), '_callosectomy', num2str(callosectomy*100), distCallo, callo_suffix, rand_type, rnd_proba, sep_hemi); %, '_10thZscMI', num2str(thZscMI*10)); %, '_sigmaEigs', num2str(sigma_eigs), '_tolEigs', num2str(tol_eigs)); + if (~exist('combined', 'var') || reload_harmonics) + combined = load(strcat(PRD, '/connectivity/combined_harmonics', load_suffix, '.mat')); + end + + if ~exist('wm_regmap', 'var') + % read white matter region mapping + wm_regmap_lh = load(strcat(PRD, '/surface/lh_white_region_mapping_low.txt')); + wm_regmap_rh = load(strcat(PRD, '/surface/rh_white_region_mapping_low.txt')); + wm_regmap = [wm_regmap_lh; wm_regmap_rh]; + end + + ROIs = unique(wm_regmap); + nROI = numel(ROIs); + H_DSK = zeros(nHarmonics,nROI); + for h = 1:nHarmonics + for i = 1:nROI + H_DSK(h,i) = mean(combined.H(find(wm_regmap==ROIs(i)), h)); + end + end + + if plot_converted_harmonics + for h=1:nPlottedHarmonics + tmpH = zeros(1,size(combined.H,1)); + for i = 1:nROI + tmpH(find(wm_regmap==ROIs(i))) = H_DSK(h,i); + end + fname = strcat('H_DSK', num2str(h, '%02i'), save_suffix); + hrmnc = plotNFsurf(fname, combined.vertices, combined.faces, tmpH', subj_type, vis); + if save_figs + pause(2); + export_fig(strcat(PRD, '/connectivity/img_',SUBJ_ID,'/', fname), '-png', '-transparent'); + pause(2); + hrmnc.Renderer = 'Painters'; + saveas(hrmnc, strcat(PRD, '/connectivity/img_', SUBJ_ID,'/', fname), 'svg'); pause(1) + close(hrmnc); + display(strcat(fname, ' image exported')); + end + end + end + + if re_save_combined + combined.H_DSK = sparse(H_DSK); + if isfield(combined, 'A_ctx') + if ~issparse(combined.A_ctx) + combined.A_ctx = sparse(combined.A_ctx); + end + end + if isfield(combined, 'A') + if ~issparse(combined.A) + combined.A = sparse(combined.A); + end + end + if isfield(combined,'D') + if ~issparse(combined.D) + combined.D = sparse(combined.D); + end + end + if isfield(combined,'L') + if ~issparse(combined.L) + combined.L = sparse(combined.L); + end + end + save(strcat(PRD, '/connectivity/combined_harmonics', save_suffix, '.mat'), '-struct', 'combined'); + end +end + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +%% Mutual information with RSNs based on region mapping %% +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +if MI_RSN + fprintf('Computing Mutual Information... \n\r'); + load_suffix = strcat('_', surf_type, '_', connectome_type, '_nsamples', num2str(nsamples), '_smoothIter', num2str(smoothIter), '_szBnd', num2str(sz_bound), '_extScaling', num2str(ext_scaling), '_minTckLen', num2str(minTckLength), '_10taZsc', num2str(threshold_adjacency*10), trimAdj_all, '_localSpread', num2str(local_spread), '_anisotropy', distAni, num2str(anisotropy*100), '_callosectomy', num2str(callosectomy*100), distCallo, callo_suffix, rand_type, rnd_proba, sep_hemi); %, '_sigmaEigs', num2str(sigma_eigs), '_tolEigs', num2str(tol_eigs)); + save_suffix = strcat('_', surf_type, '_', connectome_type, '_nsamples', num2str(nsamples), '_smoothIter', num2str(smoothIter), '_szBnd', num2str(sz_bound), '_extScaling', num2str(ext_scaling), '_minTckLen', num2str(minTckLength), '_10taZsc', num2str(threshold_adjacency*10), trimAdj_all, '_localSpread', num2str(local_spread), '_anisotropy', distAni, num2str(anisotropy*100), '_callosectomy', num2str(callosectomy*100), distCallo, callo_suffix, rand_type, rnd_proba, sep_hemi, '_10thZscMI', num2str(thZscMI*10)); %, '_sigmaEigs', num2str(sigma_eigs), '_tolEigs', num2str(tol_eigs)); + if (~exist('combined', 'var') || reload_harmonics) + combined = load(strcat(PRD, '/connectivity/combined_harmonics', load_suffix, '.mat')); + end + + if ~exist('wm_regmap', 'var') + % read white matter region mapping + wm_regmap_lh = load(strcat(PRD, '/surface/lh_white_region_mapping_low.txt')); + wm_regmap_rh = load(strcat(PRD, '/surface/rh_white_region_mapping_low.txt')); + wm_regmap = [wm_regmap_lh; wm_regmap_rh]; + end + + % select DMN areas ([...] is the array containing the indices of DMN areas in the region mapping: isthmus cing, mPFC, middle temp, PCC, angular gyrus) + RSN_version = '2020_'; %'2018_' <-- older versions of the code used less regions in DMN + + % region wise + if strcmp(RSN, 'DMN') + if strcmp(RSN_version, '2020_') + rsn_lh = [13 17 19 24 32 34 35 37 39]; + rsn_rh = [60 62 67 75 77 78 80 82]; + else + rsn_lh = [19 23 24 32 40]; + rsn_rh = [62 66 67 75 83]; + end + elseif strcmp(RSN, 'SM') + rsn_lh = [31 33 26]; + rsn_rh = [74 76 69]; + elseif strcmp(RSN, 'FP') + rsn_lh = [12 16 18 36]; + rsn_rh = [55 59 61 79]; + elseif strcmp(RSN, 'Vis') + rsn_lh = [20 30 14 22]; + rsn_rh = [63 73 57 65]; + elseif strcmp(RSN, 'L') + rsn_lh = [15 21 23 41 42]; + rsn_rh = [58 64 66 84 85]; + elseif strcmp(RSN, 'VA') + rsn_lh = [27 44]; + rsn_rh = [70 87]; + elseif strcmp(RSN, 'DA') + rsn_lh = [38 13]; + rsn_rh = [81 56]; + end + + % based on Yeo et al. 2011 + if RSN_Yeo2011 + if ~contains(SUBJ_ID, 'fsaverage') + error("Yeo2011 parcellaltion only available for fsaverage surfaces"); + end + [v.left, l.left, c.left] = read_annotation(strcat(PRD,'/label/lh.Yeo2011_7Networks_N1000.annot')); + [v.right, l.right, c.right] = read_annotation(strcat(PRD,'/label/rh.Yeo2011_7Networks_N1000.annot')); + wm_regmap_lh = l.left; + wm_regmap_rh = l.right; + wm_regmap = [wm_regmap_lh; wm_regmap_rh]; + if strcmp(RSN, 'DA') + rsn_lh = [947712]; + rsn_rh = [941712]; + elseif strcmp(RSN, 'FP') + rsn_lh = [2266342]; + rsn_rh = [2266342]; + elseif strcmp(RSN, 'DMN') + rsn_lh = [5127885]; + rsn_rh = [5127885]; + elseif strcmp(RSN, 'Vis') + rsn_lh = [8786552]; + rsn_rh = [8786552]; + elseif strcmp(RSN, 'L') + rsn_lh = [10811612]; + rsn_rh = [10811612]; + elseif strcmp(RSN, 'SM') + rsn_lh = [11829830]; + rsn_rh = [11829830]; + elseif strcmp(RSN, 'VA') + rsn_lh = [16399044]; + rsn_rh = [16399044]; + end + RSN_version = '_Yeo2011'; + end + + rsn_lh_arr = ismember(wm_regmap_lh, rsn_lh); + rsn_rh_arr = ismember(wm_regmap_rh, rsn_rh); + rsn_lh_idx = find(rsn_lh_arr); + rsn_rh_idx = find(rsn_rh_arr); + + % whole brain + wm_rsn_arr = ismember(wm_regmap, cat(2, rsn_lh, rsn_rh)); + wm_rsn_idx = find(wm_rsn_arr); + + + rsn_arr = [rsn_lh_arr; rsn_rh_arr; wm_rsn_arr]; + lh_idxs = 1:numel(wm_regmap_lh); + rh_idxs = numel(wm_regmap_lh)+1:numel(wm_regmap); + all_idxs = 1:numel(wm_regmap); + idxs = {lh_idxs, rh_idxs, all_idxs}; + + if plot_RSN_mask + RSN_fig = plotNFsurf(RSN, combined.vertices, combined.faces, double(wm_rsn_arr), 'HCP', vis); + if save_figs + pause(1) + print(RSN_fig, strcat(PRD, '/connectivity/img_', SUBJ_ID,'/', rsn_type, save_suffix), '-dpng'); pause(1) + end + if strcmp(vis, 'off') + close(RSN_fig); + end + end + + + % Compute MI with DMN using different methods + for m = 1:numel(nBinsMI) + % transform harmonics into binary vectors (using z-score thresholding at value thZscMI) + if nBinsMI(m)==1 + for h=1:nHarmonics + for hemi = 1:3 % left, right, both + zharm = ( combined.H(:,h) - mean(combined.H(:,h)) ) / std(combined.H(:,h)); + zharm_bin = zharm > thZscMI; + + J = [ sum(~wm_rsn_arr(idxs{hemi}) & ~zharm_bin(idxs{hemi})), sum(~wm_rsn_arr(idxs{hemi}) & zharm_bin(idxs{hemi})); ... + sum( wm_rsn_arr(idxs{hemi}) & ~zharm_bin(idxs{hemi})), sum( wm_rsn_arr(idxs{hemi}) & zharm_bin(idxs{hemi})) ] / numel(zharm_bin(idxs{hemi})); + JJ = sum( sum( J .* log2(J./(sum(J,2)*sum(J,1))) ) ); + + % sign reversal + zharm_bin = -zharm > thZscMI; + K = [ sum(~wm_rsn_arr(idxs{hemi}) & ~zharm_bin(idxs{hemi})), sum(~wm_rsn_arr(idxs{hemi}) & zharm_bin(idxs{hemi})); ... + sum( wm_rsn_arr(idxs{hemi}) & ~zharm_bin(idxs{hemi})), sum( wm_rsn_arr(idxs{hemi}) & zharm_bin(idxs{hemi})) ] / numel(zharm_bin(idxs{hemi})); + KK = sum( sum( K .* log2(K./(sum(K,2)*sum(K,1))) ) ); + + MI(m,hemi,h) = max(JJ,KK); + + CC(h) = corr(double(wm_rsn_arr), double(zharm_bin)); + end + end + MI_suffix = 'v3'; + else + % discretize harmonics into n bins + for h = 1:nHarmonics + for hemi = 1:3 % left, right, both + [nh,eh] = histcounts(combined.H(:,h), nBinsMI(m), 'Normalization', 'probability'); + [nd,ed] = histcounts(wm_rsn_arr, 2, 'Normalization', 'probability'); + for i=1:nBinsMI(m) % binarized (i.e. more "continuous") data (Harmonics) + for j=1:2 % binary data (DMN) + jdp(i,j) = sum((combined.H(idxs{hemi},h)>eh(i)) .* (combined.H(idxs{hemi},h)<=eh(i+1)) .* (wm_rsn_arr(idxs{hemi})>ed(j)) .* (wm_rsn_arr(idxs{hemi})<=ed(j+1)))/numel(wm_rsn_arr(idxs{hemi})); + idp(i,j) = nh(i)*nd(j); + end + end + idx = jdp~=0; + MI(m,hemi,h) = sum( sum( jdp(idx) .* log2(jdp(idx) ./ idp(idx)) ) ); + end + end + MI_suffix = strcat('nBins', num2str(nBinsMI(m))); + end + MI_m = permute(MI(m,:,:), [3 2 1]); + MI_fig = figure('Position', [0 1000 3200 300], 'Visible', vis); + bar(1:nHarmonics, MI_m); + ylim(MI_ylim); + xlabel('Harmonics'); ylabel('Mutual Information'); + legend({'left hemi', 'right hemi', 'both'}) + if save_figs + pause(1) + fname = strcat(PRD, '/connectivity/img_', SUBJ_ID,'/MI_', rsn_type, MI_suffix, save_suffix); + print(MI_fig, fname, '-depsc'); + print(MI_fig, fname, '-dpng'); + print(MI_fig, fname, '-dsvg'); + + end + if save_MI + save(strcat(PRD, '/connectivity/MI_',rsn_type, RSN_version, MI_suffix, save_suffix), 'MI_m'); + end + if strcmp(vis, 'off') + close(MI_fig); + end + end +end + diff --git a/README.md b/README.md index 6071082..77529f1 100644 --- a/README.md +++ b/README.md @@ -1,13 +1,23 @@ # Surface and Connectivity Reconstruction: Imaging Pipeline for TVB Simulations -## SCRIPTS 0.4.1 +## SCRIPTS 0.4-CH Please see the [wiki](https://github.com/ins-amu/scripts/wiki) -#### New in 0.4.1: -- Remove need for Matlab -- new subparcellation scheme where arbitrary number of subregions can be chosen +If using SCRIPTS, please cite this work: +Proix T, Spiegler A, Schirner M, Rothmeier S, Ritter P, Jirsa, VK. How do parcellation size and short-range connectivity affect dynamics in large-scale brain network models? NeuroImage, 2016, 142:135-149 \[[link](https://www.sciencedirect.com/science/article/abs/pii/S1053811916302518)\] -#### New in 0.4: +If using the Connectomer Harmonics add-on, please cite: +Naze S., Proix T., Atasoy S., Kozloski J. R. (2020) Robustness of connectome harmonics to local gray matter and long-range white matter connectivity changes. NeuroImage \[[link](https://www.sciencedirect.com/science/article/pii/S1053811920308508)\] + + +#### Connectome Harmonics release: +- Compute high-resolution connectome based on tracks - surface mesh intersections (requires MATLAB Parallel Computing Toolbox). +- Compute eigenvalue/eigenvector pairs of combined local and long-range connectivity matrices. +- Compute Mutual Information of 100 harmonics (of lowest eigenvalues) with the Default Mode Network. +- To perform: after running `main_surface.sh`, run `main_CH.sh`. +- Template connectome harmonics for surfaces *cvs\_avg35\_inMNI152*, *fsaverage4* and *fsaverage5* using the [Gibbs tractography streamlines](https://www.nitrc.org/projects/gibbsconnectome) is now [available on zenodo](https://zenodo.org/record/4027989) + +#### NEW in 0.4: - Dockerfile for easy installation with Docker. - Tests. - Reorganization and simplification of the code => compatibility with tvb-make. @@ -15,7 +25,7 @@ Please see the [wiki](https://github.com/ins-amu/scripts/wiki) #### Features - prepare data for TVB: surface reconstruction, region mapping, connectome. -- mrtrix 3.0 RC3: SIFT/SIFT2, ACT, 3 types of registration structural/diffusion, denoising, topup/eddy corrections, bias field corrections, mask upsampling and dilatation, multi-shell multi-tissue, dhollander algo, fsl subcortical structure parcellation, tractogram and tdi generation, multi-threaded. +- mrtrix 3.0 RC2: SIFT/SIFT2, ACT, 3 types of registration structural/diffusion, denoising, topup/eddy corrections, bias field corrections, mask upsampling and dilatation, multi-shell multi-tissue, dhollander algo, fsl subcortical structure parcellation, tractogram and tdi generation, multi-threaded. - automatic config checks. - convenience script for HCP datasets. - handle automatically reverse phase-encoding DWI in most cases. @@ -27,10 +37,6 @@ Please see the [wiki](https://github.com/ins-amu/scripts/wiki) This poject use the MIT License. The full license is in LICENSE.txt in the SCRIPTS distribution. -Copyright (c) [2014] [The SCRIPTS Developers] - -#### How to cite +Copyright (c) [2014-2020] [The SCRIPTS Developers] -When citing SCRIPTS, please cite this work: -Proix T, Spiegler A, Schirner M, Rothmeier S, Ritter P, Jirsa, VK. How do parcellation size and short-range connectivity affect dynamics in large-scale brain network models? NeuroImage, 2016, 142:135-149 diff --git a/config_CH.sh b/config_CH.sh new file mode 100755 index 0000000..a9fb5f2 --- /dev/null +++ b/config_CH.sh @@ -0,0 +1,21 @@ +#!/usr/bin/env bash +# This is the separate config file for computing connectome harmonics +# It assumes the main SCRIPTS config file has been set up and the main_surface.sh ran without errors. + +# By default, the routine compute subject specific harmonics based on all the tracks computed by the tractography + + +####################### +## Setup environment ## +####################### + +# Path to SCRIPTS root directory +export SCRIPTS_DIR=`pwd` + +# Path to Matlab Utils (for dependencies) +export PMU=$SCRIPTS_DIR/matlab_utils/ + +# number of parallel cores to compute high-resolution connectome (different than NTHREADS because more memory consuming) +export NWORKERS=4 + + diff --git a/example_config.sh b/example_config.sh index 089504a..8145c97 100644 --- a/example_config.sh +++ b/example_config.sh @@ -31,6 +31,14 @@ export PRD=/path_to_root_dir/ # this will determine the name of your subject in the final directory export SUBJ_ID=name_subj +# Matlab path if you have it +# export MATLAB=/path_to_matlab/ +export MATLAB=$(which matlab) +# if you don't have matlab, download and install the MCR (linux 64 bits) here: +# http://www.mathworks.com/products/compiler/mcr/index.html +# and uncomment the following line +# export MCR=/path_to_matlab_runtime_compiler/ +# export MCR=/usr/local/MATLAB/MATLAB_Runtime/v93 #### Standard options @@ -83,14 +91,13 @@ export REGISTRATION="regular" export REGION_MAPPING_CORR="0.42" # for computing subconnectivity -# if you want subdivided parcellations, you can a list of how many -# regions you want for each parecellations. -# Subparcellations will be based on subdiving the existing parcellation -# in more regions, with the constraints of minimizing the difference between -# subregion sizes across all subregions +# if you want subdivided parcellations, you can set the folowing value +# according to the following table +# K: 0 1 2 3 4 5 +# Number of Nodes: 70 140 280 560 1120 2240 # default: "" -# Needs to be a list of even integers -export N_SUBREGIONS_LIST="100 200" +# Needs to be a list of integers +export K_LIST="0 1 2 3 4 5" # number of tracks used in the tractography step. # default: 10.000.000 diff --git a/main_CH.sh b/main_CH.sh new file mode 100755 index 0000000..9624ccd --- /dev/null +++ b/main_CH.sh @@ -0,0 +1,21 @@ +#!/usr/bin/env bash + +source config_CH.sh + +##################################### +## Convert tracks into ascii files ## +##################################### +if [ ! -d $PRD/connectivity/tmp_ascii_tck ]; then + mkdir -p $PRD/connectivity/tmp_ascii_tck + echo "exporting tracts in ascii... this can take a while" + tckconvert $PRD/connectivity/whole_brain_post.tck \ + $PRD/connectivity/tmp_ascii_tck/output-[].txt \ + -scanner2voxel $PRD/connectivity/brain_2_diff.nii.gz \ + -nthreads "$NB_THREADS" +fi + +####################################### +## Run Connectome Harmonics pipeline ## +####################################### +echo "Run Pipeline matlab routine..." +$MATLAB -r "run CH_pipeline_integrated.m; quit;" -nodesktop diff --git a/main_surface.sh b/main_surface.sh index 6cff888..1c8f1b9 100755 --- a/main_surface.sh +++ b/main_surface.sh @@ -13,6 +13,8 @@ # TODO: add the missing mrviews # TOCHECK: test/retest +source config.sh + #### Checks and preset variables # import and check config @@ -62,6 +64,11 @@ if [ -z "$SUBJ_ID" ]; then exit 1 fi +if [ -z "$MATLAB" ] || [ -z "$MCR" ]; then + echo "Matlab ou MCR path missing" + # exit 1 # not mandatory if K_LIST="" +fi + if [ -z "$SUBJECTS_DIR" ]; then echo "you have to set the SUBJECTS_DIR environnement variable for FreeSurfer" >> "$PRD"/log_processing_parameters.txt exit 1 @@ -130,11 +137,11 @@ else fi # TODO: check if list of integers -if [ -z "$N_SUBREGIONS_LIST" ]; then - echo "set N_SUBREGIONS_LIST parameter to empty" | tee -a "$PRD"/log_processing_parameters.txt +if [ -z "$K_LIST" ]; then + echo "set K_LIST parameter to empty" | tee -a "$PRD"/log_processing_parameters.txt K_LIST="" else - echo "N_SUBREGIONS_LIST parameter is "$N_SUBREGIONS_LIST"" | tee -a "$PRD"/log_processing_parameters.txt + echo "K_LIST parameter is "$K_LIST"" | tee -a "$PRD"/log_processing_parameters.txt fi @@ -224,10 +231,10 @@ view_step=0 ######## HCP pre_scripts if [ "$HCP" = "yes" ]; then - #if [ ! -d "$FS"/"$SUBJ_ID"/ ]; then + if [ ! -d "$FS"/"$SUBJ_ID"/ ]; then echo "running HCP pre_scripts" bash util/HCP_pre_scripts.sh - #fi + fi fi @@ -244,6 +251,7 @@ if [ ! -d "$FS"/"$SUBJ_ID" ] ; then fi ###################################### left hemisphere +### PIAL SURFACE ### # export pial into text file mkdir -p "$PRD"/surface if [ ! -f "$PRD"/surface/lh.pial.asc ]; then @@ -253,12 +261,14 @@ if [ ! -f "$PRD"/surface/lh.pial.asc ]; then mris_info "$FS"/"$SUBJ_ID"/surf/lh.pial >& "$PRD"/surface/lhinfo.txt fi + # triangles and vertices high if [ ! -f "$PRD"/surface/lh_vertices_high.txt ]; then echo "extracting left vertices and triangles" python util/extract_high.py lh fi + # decimation using remesher if [ ! -f $PRD/surface/lh_vertices_low.txt ]; then echo "left decimation using remesher" @@ -284,7 +294,49 @@ if [ ! -f "$PRD"/surface/lh_region_mapping_low.txt ]; then python util/check_region_mapping.py lh fi +### WM SURFACE ### +# export white matter surface into text file +mkdir -p $PRD/surface +if [ ! -f $PRD/surface/lh.white.asc ]; then + echo "importing left white matter surfaces from freesurfer" + mris_convert "$FS"/"$SUBJ_ID"/surf/lh.white "$PRD"/surface/lh.white.asc +fi + +# triangles and vertices high +if [ ! -f $PRD/surface/lh_white_vertices_high.txt ]; then + echo "extracting left vertices and triangles for white matter surface" + python util/extract_white_high.py lh +fi + +# decimation using remesher +if [ ! -f $PRD/surface/lh_white_vertices_low.txt ]; then + echo "left white matter decimation using remesher" + # -> to mesh + python util/txt2off.py $PRD/surface/lh_white_vertices_high.txt $PRD/surface/lh_white_triangles_high.txt $PRD/surface/lh_white_high.off + # decimation + ./remesher/cmdremesher/cmdremesher $PRD/surface/lh_white_high.off $PRD/surface/lh_white_low.off + # export to list vertices triangles + python util/off2txt.py $PRD/surface/lh_white_low.off $PRD/surface/lh_white_vertices_low.txt $PRD/surface/lh_white_triangles_low.txt +fi + +# create the left region mapping +if [ ! -f $PRD/surface/lh_white_region_mapping_low_not_corrected.txt ]; then + echo "generating the left white matter region mapping on the decimated white matter surface" + python util/region_mapping_white.py lh +fi + +# correct +if [ ! -f $PRD/surface/lh_white_region_mapping_low.txt ]; then + echo "correct the left white matter region mapping" + python util/correct_region_mapping_white.py lh + echo "check left white matter region mapping" + python util/check_region_mapping_white.py lh +fi + + + ###################################### right hemisphere +### PIAL SURAFCE ### # export pial into text file if [ ! -f "$PRD"/surface/rh.pial.asc ]; then echo "importing right pial surface from freesurfer" @@ -323,6 +375,47 @@ if [ ! -f "$PRD"/surface/rh_region_mapping_low.txt ]; then echo "check right region mapping" python util/check_region_mapping.py rh fi + +### WM SURAFCE ### +# export white matter surface into text file +mkdir -p $PRD/surface +if [ ! -f $PRD/surface/rh.white.asc ]; then + echo "importing right white matter surfaces from freesurfer" + mris_convert "$FS"/"$SUBJ_ID"/surf/rh.white "$PRD"/surface/rh.white.asc +fi + +# triangles and vertices high +if [ ! -f $PRD/surface/rh_white_vertices_high.txt ]; then + echo "extracting right vertices and triangles for white matter surface" + python util/extract_white_high.py rh +fi + +# decimation using remesher +if [ ! -f $PRD/surface/rh_white_vertices_low.txt ]; then + echo "right white matter decimation using remesher" + # -> to mesh + python util/txt2off.py $PRD/surface/rh_white_vertices_high.txt $PRD/surface/rh_white_triangles_high.txt $PRD/surface/rh_white_high.off + # decimation + ./remesher/cmdremesher/cmdremesher $PRD/surface/rh_white_high.off $PRD/surface/rh_white_low.off + # export to list vertices triangles + python util/off2txt.py $PRD/surface/rh_white_low.off $PRD/surface/rh_white_vertices_low.txt $PRD/surface/rh_white_triangles_low.txt +fi + +# create the right region mapping +if [ ! -f $PRD/surface/rh_white_region_mapping_low_not_corrected.txt ]; then + echo "generating the right white matter region mapping on the decimated white matter surface" + python util/region_mapping_white.py rh +fi + +# correct +if [ ! -f $PRD/surface/rh_white_region_mapping_low.txt ]; then + echo "correct the right white matter region mapping" + python util/correct_region_mapping_white.py rh + echo "check right white matter region mapping" + python util/check_region_mapping_white.py rh +fi + + ###################################### both hemisphere # prepare final directory mkdir -p $PRD/$SUBJ_ID @@ -355,7 +448,6 @@ fi mkdir -p $PRD/connectivity mkdir -p $PRD/$SUBJ_ID/connectivity - ## preprocessing # See: http://mrtrix.readthedocs.io/en/0.3.16/workflows/DWI_preprocessing_for_quantitative_analysis.html @@ -366,10 +458,11 @@ if [ ! -f "$PRD"/connectivity/predwi.mif ]; then i_im=1 echo "generate dwi mif file" echo "if asked, please select a series of images by typing a number" - mrconvert $PRD/data/DWI/ $PRD/connectivity/predwi_"$i_im".mif \ - -export_pe_table $PRD/connectivity/pe_table \ - -export_grad_mrtrix $PRD/connectivity/bvecs_bvals_init \ + mrconvert $PRD/data/DWI/data.nii.gz $PRD/connectivity/predwi_"$i_im".mif \ + -fslgrad $PRD/data/DWI/bvecs.bvec $PRD/data/DWI/bvals.bval \ + -export_grad_mrtrix $PRD/connectivity/bvecs_bvals_init \ -datatype float32 -stride 0,0,0,1 -force -nthreads "$NB_THREADS" +# -export_pe_table $PRD/connectivity/pe_table \ cp $PRD/connectivity/predwi_1.mif $PRD/connectivity/predwi.mif if [ "$FORCE" = "no" ]; then echo "Do you want to add another image serie (different phase encoding)? [y, n]" @@ -433,7 +526,8 @@ if [ "$view_step" = 1 -a "$CHECK" = "yes" ] || [ "$CHECK" = "force" ] && [ -n "$ $PRD/connectivity/noise_res.mif fi -# topup/eddy corrections + +# topup/eddy corrections** if [ ! -f "$PRD"/connectivity/predwi_denoised_preproc.mif ]; then view_step=1 if [ "$TOPUP" = "eddy_correct" ]; then @@ -443,7 +537,9 @@ if [ ! -f "$PRD"/connectivity/predwi_denoised_preproc.mif ]; then dwipreproc $PRD/connectivity/predwi_denoised.mif \ $PRD/connectivity/predwi_denoised_preproc.mif \ -export_grad_mrtrix $PRD/connectivity/bvecs_bvals_final \ - -rpe_header -force -nthreads "$NB_THREADS" + -rpe_none -pe_dir $PE -force \ + -nthreads "$NB_THREADS" +#-rpe_header -cuda -force -nthreads "$NB_THREADS" else # no topup/eddy echo "no topup/eddy applied" mrconvert $PRD/connectivity/predwi_denoised.mif \ @@ -452,6 +548,7 @@ if [ ! -f "$PRD"/connectivity/predwi_denoised_preproc.mif ]; then -force -nthreads "$NB_THREADS" fi fi + # check preproc files if [ "$view_step" = 1 -a "$CHECK" = "yes" ] || [ "$CHECK" = "force" ] && [ -n "$DISPLAY" ]; then echo "check preprocessed mif file (no topup/no eddy)" @@ -518,6 +615,7 @@ fi if [ ! -f "$PRD"/connectivity/dwi.mif ]; then native_voxelsize=$(mrinfo $PRD/connectivity/mask_native.mif -spacing \ | cut -f 1 -d " " | xargs printf "%.3f") + upsampling=$(echo ""$native_voxelsize">1.25" | bc) if [ "$upsampling" = 1 ]; then echo "upsampling dwi" @@ -652,6 +750,7 @@ if [ ! -f $PRD/connectivity/aparc+aseg_reorient.nii.gz ]; then "$FSL"fslreorient2std $PRD/connectivity/aparc+aseg.nii.gz \ $PRD/connectivity/aparc+aseg_reorient.nii.gz fi + # check parcellation to brain.mgz if [ "$view_step" = 1 -a "$CHECK" = "yes" ] || [ "$CHECK" = "force" ] && [ -n "$DISPLAY" ]; then # TODO: mrview discrete colour scheme? @@ -718,6 +817,7 @@ if [ ! -f "$PRD"/connectivity/brain_2_diff.nii.gz ]; then -linear $PRD/connectivity/diffusion_2_struct_mrtrix.txt \ -inverse -force fi + # check brain and parcellation to diff if [ "$view_step" = 1 -a "$CHECK" = "yes" ] || [ "$CHECK" = "force" ] && [ -n "$DISPLAY" ]; then echo "check parcellation registration to diffusion space" @@ -800,6 +900,7 @@ else -nthreads "$NB_THREADS" fi fi + if [ "$view_step" = 1 -a "$CHECK" = "yes" ] || [ "$CHECK" = "force" ] && [ -n "$DISPLAY" ]; then echo "check ODF image" view_step=0 @@ -808,6 +909,7 @@ if [ "$view_step" = 1 -a "$CHECK" = "yes" ] || [ "$CHECK" = "force" ] && [ -n "$ -overlay.opacity 0.5 fi + # Fibre orientation distribution estimation if [ ! -f "$PRD"/connectivity/wm_CSD.mif ]; then # Both for multishell and single shell since we use dhollander in the @@ -839,6 +941,7 @@ if [ "$view_step" = 1 -a "$CHECK" = "yes" ] || [ "$CHECK" = "force" ] && [ -n "$ fi + # tractography if [ ! -f "$PRD"/connectivity/whole_brain.tck ]; then if [ "$SIFT" = "sift" ]; then @@ -900,6 +1003,10 @@ if [ ! -f "$PRD"/connectivity/whole_brain.tck ]; then fi fi + + + + # postprocessing if [ ! -f "$PRD"/connectivity/whole_brain_post.tck -a ! -h "$PRD"/connectivity/whole_brain_post.tck ]; then echo "$PRD"/connectivity/whole_brain_post.tck @@ -931,7 +1038,7 @@ if [ ! -f "$PRD"/connectivity/whole_brain_post.tck -a ! -h "$PRD"/connectivity/w echo "using act" tcksift2 $PRD/connectivity/whole_brain.tck \ $PRD/connectivity/wm_CSD.mif \ - $PRD/connectivity/streamline_weights.csv\ + $PRD/connectivity/streamline_weights.csv \ -act $PRD/connectivity/act.mif \ -out_mu $PRD/connectivity/mu.txt \ -out_coeffs $PRD/connectivity/streamline_coeffs.csv \ @@ -951,6 +1058,8 @@ if [ ! -f "$PRD"/connectivity/whole_brain_post.tck -a ! -h "$PRD"/connectivity/w fi fi + + ## now compute connectivity and length matrix if [ ! -f "$PRD"/connectivity/aparcaseg_2_diff_"$ASEG".mif ]; then echo "compute parcellation labels" @@ -974,6 +1083,8 @@ if [ ! -f "$PRD"/connectivity/aparcaseg_2_diff_"$ASEG".mif ]; then fi fi + + if [ ! -f "$PRD"/connectivity/weights.csv ]; then echo "compute connectivity matrix weights" if [ "$SIFT" = "sift2" ]; then @@ -993,6 +1104,9 @@ if [ ! -f "$PRD"/connectivity/weights.csv ]; then fi fi + + + if [ ! -f "$PRD"/connectivity/tract_lengths.csv ]; then echo "compute connectivity matrix edge lengths" view_step=1 @@ -1033,6 +1147,7 @@ if [ "$view_step" = 1 -a "$CHECK" = "yes" ] || [ "$CHECK" = "force" ] && [ -n "$ -connectome.load $PRD/connectivity/weights.csv fi + # view tractogram and tdi if [ "$view_step" = 1 -a "$CHECK" = "yes" ] || [ "$CHECK" = "force" ] && [ -n "$DISPLAY" ]; then echo "view tractogram and tdi image" @@ -1069,11 +1184,13 @@ if [ "$view_step" = 1 -a "$CHECK" = "yes" ] || [ "$CHECK" = "force" ] && [ -n "$ fi mrview $PRD/connectivity/aparcaseg_2_diff_$ASEG.mif \ -overlay.load $PRD/connectivity/whole_brain_post_tdi.mif \ - -overlay.opacity 0.5 -overlay.interpolation_off \ + -overlay.opacity 0.5 -overlay.interpolation 0 \ -tractography.load $PRD/connectivity/whole_brain_post_decimated.tck +# -overlay.opacity 0.5 -overlay.interpolation_off \ fi + # Compute other files # we do not compute hemisphere # subcortical is already done @@ -1084,6 +1201,7 @@ if [ ! -f "$PRD"/"$SUBJ_ID"/connectivity/cortical.txt ]; then python util/compute_connectivity_files.py fi + # zip to put in final format pushd . > /dev/null cd $PRD/$SUBJ_ID/connectivity > /dev/null @@ -1092,64 +1210,70 @@ zip $PRD/$SUBJ_ID/connectivity.zip areas.txt average_orientations.txt \ popd > /dev/null + ################### subparcellations # compute sub parcellations connectivity if asked -if [ -n "$N_SUBREGIONS_LIST" ]; then - for N_SUBREGIONS in $N_SUBREGIONS_LIST; do - export N_SUBREGIONS=$N_SUBREGIONS - mkdir -p $PRD/$SUBJ_ID/connectivity_"$N_SUBREGIONS" - if [ ! -f $PRD/connectivity/aparcaseg_2_diff_"$N_SUBREGIONS".nii.gz ]; then - echo "compute subparcellations for $N_SUBREGIONS regions" - python util/subparcel.py $N_SUBREGIONS +if [ -n "$K_LIST" ]; then + for K in $K_LIST; do + export curr_K=$(( 2**K )) + mkdir -p $PRD/$SUBJ_ID/connectivity_"$curr_K" + if [ ! -f $PRD/connectivity/aparcaseg_2_diff_"$curr_K".nii.gz ]; then + echo "compute subparcellations for $curr_K" + if [ -n "$MATLAB" ]; then + $MATLAB -r "run subparcel.m; quit;" -nodesktop -nodisplay + else + sh util/run_subparcel.sh $MCR + fi + gzip $PRD/connectivity/aparcaseg_2_diff_"$curr_K".nii fi - if [ ! -f $PRD/$SUBJ_ID/region_mapping_"$N_SUBREGIONS".txt ]; then - echo "generate region mapping for subparcellation "$N_SUBREGIONS"" + if [ ! -f $PRD/$SUBJ_ID/region_mapping_"$curr_K".txt ]; then + echo "generate region mapping for subparcellation "$curr_K"" python util/region_mapping_other_parcellations.py fi - if [ ! -f $PRD/connectivity/aparcaseg_2_diff_"$N_SUBREGIONS".mif ]; then - mrconvert $PRD/connectivity/aparcaseg_2_diff_"$N_SUBREGIONS".nii.gz \ - $PRD/connectivity/aparcaseg_2_diff_"$N_SUBREGIONS".mif \ + if [ ! -f $PRD/connectivity/aparcaseg_2_diff_"$curr_K".mif ]; then + mrconvert $PRD/connectivity/aparcaseg_2_diff_"$curr_K".nii.gz \ + $PRD/connectivity/aparcaseg_2_diff_"$curr_K".mif \ -datatype float32 -force fi - if [ ! -f $PRD/connectivity/weights_"$N_SUBREGIONS".csv ]; then - echo "compute connectivity matrix using act for subparcellation "$N_SUBREGIONS"" + if [ ! -f $PRD/connectivity/weights_"$curr_K".csv ]; then + echo "compute connectivity matrix using act for subparcellation "$curr_K"" if [ "$SIFT" = "sift2" ]; then # -tck_weights_in flag only needed for sift2 but not for sift/no processing tck2connectome $PRD/connectivity/whole_brain_post.tck \ - $PRD/connectivity/aparcaseg_2_diff_"$N_SUBREGIONS".mif \ - $PRD/connectivity/weights_"$N_SUBREGIONS".csv -assignment_radial_search 2 \ - -out_assignments $PRD/connectivity/edges_2_nodes_"$N_SUBREGIONS".csv \ + $PRD/connectivity/aparcaseg_2_diff_"$curr_K".mif \ + $PRD/connectivity/weights_"$curr_K".csv -assignment_radial_search 2 \ + -out_assignments $PRD/connectivity/edges_2_nodes_"$curr_K".csv \ -tck_weights_in $PRD/connectivity/streamline_weights.csv \ -force -nthreads "$NB_THREADS" else tck2connectome $PRD/connectivity/whole_brain_post.tck \ - $PRD/connectivity/aparcaseg_2_diff_"$N_SUBREGIONS".mif \ - $PRD/connectivity/weights_"$N_SUBREGIONS".csv -assignment_radial_search 2 \ - -out_assignments $PRD/connectivity/edges_2_nodes_"$N_SUBREGIONS".csv \ + $PRD/connectivity/aparcaseg_2_diff_"$curr_K".mif \ + $PRD/connectivity/weights_"$curr_K".csv -assignment_radial_search 2 \ + -out_assignments $PRD/connectivity/edges_2_nodes_"$curr_K".csv \ -force -nthreads "$NB_THREADS" fi fi - if [ ! -f "$PRD"/connectivity/tract_lengths_"$N_SUBREGIONS".csv ]; then - echo "compute connectivity matrix edge lengths subparcellation "$N_SUBREGIONS"" + if [ ! -f "$PRD"/connectivity/tract_lengths_"$curr_K".csv ]; then + echo "compute connectivity matrix edge lengths subparcellation "$curr_K"" view_step=1 # mean length result: weight by the length, then average # see: http://community.mrtrix.org/t/tck2connectome-edge-statistic-sift2-questions/1059/2 # Not applying sift2, as here the mean is \ # sum(streamline length * streamline weight)/no streamlines, does not make sense tck2connectome $PRD/connectivity/whole_brain_post.tck \ - $PRD/connectivity/aparcaseg_2_diff_"$N_SUBREGIONS".mif \ - $PRD/connectivity/tract_lengths_"$N_SUBREGIONS".csv \ + $PRD/connectivity/aparcaseg_2_diff_"$curr_K".mif \ + $PRD/connectivity/tract_lengths_"$curr_K".csv \ -assignment_radial_search 2 -zero_diagonal -scale_length \ -stat_edge mean -force -nthreads "$NB_THREADS" #fi fi - if [ ! -f "$PRD"/"$SUBJ_ID"/connectivity_"$N_SUBREGIONS"/weights.txt ]; then - echo "generate files for TVB subparcellation "$N_SUBREGIONS"" - python util/compute_connectivity_sub.py $PRD/connectivity/weights_"$N_SUBREGIONS".csv $PRD/connectivity/tract_lengths_"$N_SUBREGIONS".csv $PRD/$SUBJ_ID/connectivity_"$N_SUBREGIONS"/weights.txt $PRD/$SUBJ_ID/connectivity_"$N_SUBREGIONS"/tract_lengths.txt + if [ ! -f "$PRD"/"$SUBJ_ID"/connectivity_"$curr_K"/weights.txt ]; then + echo "generate files for TVB subparcellation "$curr_K"" + python util/compute_connectivity_sub.py $PRD/connectivity/weights_"$curr_K".csv $PRD/connectivity/tract_lengths_"$curr_K".csv $PRD/$SUBJ_ID/connectivity_"$curr_K"/weights.txt $PRD/$SUBJ_ID/connectivity_"$curr_K"/tract_lengths.txt fi pushd . > /dev/null - cd $PRD/$SUBJ_ID/connectivity_"$N_SUBREGIONS" > /dev/null - zip $PRD/$SUBJ_ID/connectivity_"$N_SUBREGIONS".zip weights.txt tract_lengths.txt centres.txt average_orientations.txt -q + cd $PRD/$SUBJ_ID/connectivity_"$curr_K" > /dev/null + zip $PRD/$SUBJ_ID/connectivity_"$curr_K".zip weights.txt tract_lengths.txt centres.txt average_orientations.txt -q popd > /dev/null done fi @@ -1233,3 +1357,5 @@ if [ "$FORWARD_MODEL" = "yes" ]; then fi echo "SUCCESS" exit + +END_COMMENT diff --git a/util/check_region_mapping_white.py b/util/check_region_mapping_white.py new file mode 100644 index 0000000..5c70786 --- /dev/null +++ b/util/check_region_mapping_white.py @@ -0,0 +1,165 @@ +import os +import sys +PRD = os.environ['PRD'] +CHECK = os.environ['CHECK'] +if "DISPLAY" in os.environ: + DISPLAY = os.environ['DISPLAY'] +else: + DISPLAY = "" +region_mapping_corr = float(os.environ['REGION_MAPPING_CORR']) +os.chdir(os.path.join(PRD, 'surface')) +from copy import deepcopy +from pylab import * +from mpl_toolkits.mplot3d import Axes3D +from collections import Counter + +def breadth_first_search(iposi, itrian, ilab): + queue = [iposi] + V = [iposi] + while len(queue) > 0: + iQ = queue.pop() + iedges = list(itrian[np.argwhere(itrian==iQ)[:,0]].flatten()) + while len(iedges)>0: + ineig = iedges.pop() + if ineig not in V and texture[ineig]==ilab: + V.append(ineig) + queue.append(ineig) + return V + +def calculate_connected(texture, vert, trian): + "find if the regions are connected components using Breadth-first seach" + labels = np.unique(texture) + res = [] + for ilab in labels: + ipos = np.nonzero(texture==ilab) + ivert = vert[ipos] + itrian=[] + for itri in np.nonzero(texture==ilab)[0].tolist(): + itrian.extend(trian[np.nonzero(trian==itri)[0]]) + itrian = np.array(itrian).astype('int') + V = breadth_first_search(ipos[0][0], itrian, ilab) + res.append((ilab, ivert.shape[0]-len(V))) + return res + +def find_both_components(texture, vert, trian, ilab): + " find the two subgraphs" + ipos = np.nonzero(texture==ilab) + ivert = vert[ipos] + itrian=[] + for itri in ipos[0].tolist(): + itrian.extend(trian[np.nonzero(trian==itri)[0]]) + itrian = np.array(itrian).astype('int') + # first region + V1 = breadth_first_search(ipos[0][0], itrian, ilab) + # second region + istart = 1 + while ipos[0][istart] in V1: + istart += 1 + V2 = breadth_first_search(ipos[0][istart], itrian, ilab) + return (V1, V2) + +def correct_sub_region(texture, trian, Vw): + "correct the region mapping for the chosen component" + new_texture = copy(texture) + icount = 0 + while len(Vw)>0: + iVw = Vw.pop() + itrian = trian[np.nonzero(trian==iVw)[0]].flatten().astype('int').tolist() + ir = list(filter(lambda x : new_texture[x] != new_texture[iVw], itrian)) + if len(ir)>0: + new_texture[iVw] = new_texture[Counter(ir).most_common(1)[0][0]] + else: + if icount<50: + Vw.insert(0, iVw) + icount +=1 + else: + # TODO: good error message + print('error in correction') + import pdb; pdb.set_trace() + return new_texture + +def check_region_mapping(texture, vert, trian, ilab): + "drawing the region" + ipos = np.nonzero(texture==ilab) + itrian=[] + for itri in ipos[0].tolist(): + itrian.extend(trian[np.nonzero(trian==itri)[0]]) + itrian = np.array(itrian).astype('int') + bool_itrian = np.in1d(itrian, ipos[0]).reshape(itrian.shape[0], 3) + itrian[np.nonzero(bool_itrian == False)] = 0 + citri = vstack([vstack([itrian[:,0], itrian[:,1]]).T, vstack([itrian[:,1],itrian[:,2]]).T, vstack([itrian[:,2],itrian[:,0]]).T]) + bcitri = (citri!=0).sum(1) + valp = citri[bcitri==2] + fig = figure(figsize=(15, 15)) + fig.suptitle('region ' + str(int(ilab))) + ax = fig.add_subplot(111, projection='3d') + xlabel('x') + ylabel('y') + for iv in np.arange(valp.shape[0]): + ax.plot(vert[valp[iv], 0], vert[valp[iv], 1], vert[valp[iv], 2]) + # old function + # xitrians = vert[np.hstack((itrian, itrian[:,0][:, newaxis])), 0] + # yitrians = vert[np.hstack((itrian, itrian[:,0][:, newaxis])), 1] + # zitrians = vert[np.hstack((itrian, itrian[:,0][:, newaxis])), 2] + # fig = figure(figsize=(15, 15)) + # fig.suptitle('region ' + int(ilab)) + # ax = fig.add_subplot(111, projection='3d') + # for iv in range(xitrians.shape[0]): + # ax.plot(xitrians[iv], yitrians[iv], zitrians[iv], alpha=0.4) + # ax.scatter(vert[ipos, 0], vert[ipos, 1], vert[ipos, 2], c='b', s=45) + show() + +if __name__ == '__main__': + + rl = sys.argv[1] + + vert = loadtxt(rl+'_white_vertices_low.txt') + trian = loadtxt(rl+'_white_triangles_low.txt') + texture = loadtxt(rl + '_white_region_mapping_low.txt') + + res = np.array(calculate_connected(texture, vert, trian)) + wrong_labels = res[np.where(res[:,1]>0.),0][0] + if len(wrong_labels)==0: + print('evrything is fine, continuing') + else: + print("WARNING, some region have several components") + for iwrong in wrong_labels: + # TODO: handle more than two components + (V1, V2) = find_both_components(texture, vert, trian, iwrong) + if CHECK =="yes" and len(DISPLAY)>0: + print("checking") + check_region_mapping(texture, vert, trian, iwrong) + i=0 + while True and i<10: + try: + choice_user = raw_input("""Do you want to get rid of region with: + 1) {0} nodes + 2) {1} nodes + 3) continue the pipeline anyway + (answer: 1, 2, or 3)? \n""".format(len(V1), len(V2))) + print("you chose " + choice_user) + + choice_user = int(choice_user) + if choice_user not in [1, 2, 3]: + raise ValueError + break + except ValueError: + print('please choose 1, 2 or 3') + i += 1 + continue + else: + print('failure total, no check mode') + else: + print("no check, selecting automatically the smallest components") + choice_user=argmin((len(V1), len(V2)))+1 + if choice_user==3: + print("keep that correction") + savetxt(rl + '_white_region_mapping_low.txt', new_texture) + elif choice_user==1: + texture = correct_sub_region(texture, trian, V1) + elif choice_user==2: + texture = correct_sub_region(texture, trian, V2) + else: + print('failure of the choice') + + savetxt(rl + '_white_region_mapping_low.txt', texture) diff --git a/util/correct_region_mapping_white.py b/util/correct_region_mapping_white.py new file mode 100644 index 0000000..029cd11 --- /dev/null +++ b/util/correct_region_mapping_white.py @@ -0,0 +1,33 @@ +import os +import sys +from copy import deepcopy +from pylab import * +from collections import Counter + +rl = sys.argv[1] +PRD = os.environ['PRD'] +region_mapping_corr = float(os.environ['REGION_MAPPING_CORR']) +os.chdir(os.path.join(PRD, 'surface')) + +texture = loadtxt(rl + '_white_region_mapping_low_not_corrected.txt') +vert = loadtxt(rl + '_white_vertices_low.txt') +trian = loadtxt(rl + '_white_triangles_low.txt') +for _ in range(10): + new_texture = deepcopy(texture) + labels = np.unique(texture) + #import pdb; pdb.set_trace() + for ilab in labels: + iverts = (np.nonzero(texture==ilab)[0]).tolist() + if len(iverts)>0: + for inode in iverts: + iall = trian[np.nonzero(trian==inode)[0]].flatten().tolist() + #import pdb; pdb.set_trace() + ineig = unique(list(filter(lambda x: x!=inode, iall))).astype('int') + ivals = np.array(Counter(texture[ineig]).most_common()).astype('int') + if ivals[np.nonzero(ivals[:,0]==ilab), 1].shape[1]==0: + new_texture[inode] = Counter(texture[ineig]).most_common(1)[0][0] + elif ivals[np.nonzero(ivals[:,0] == ilab), 1][0,0] < region_mapping_corr * len(ineig): + new_texture[inode] = Counter(texture[ineig]).most_common(1)[0][0] + texture = deepcopy(new_texture) + +savetxt(rl + '_white_region_mapping_low.txt', new_texture) diff --git a/util/extract_white_high.py b/util/extract_white_high.py new file mode 100644 index 0000000..a5421f4 --- /dev/null +++ b/util/extract_white_high.py @@ -0,0 +1,29 @@ +import numpy as np +import os +import sys +rl = sys.argv[1] +PRD = os.environ['PRD'] +os.chdir(os.path.join(PRD, 'surface')) + +# read lh_info +with open(rl + 'info.txt', 'r') as f: + lines = f.readlines() + +c_ras_line = lines[32] +ista = c_ras_line.index(' (') + 2 +iend = c_ras_line.index(')\n') +lc_ras = c_ras_line[ista:iend].split(',') +c_ras = np.array(lc_ras).astype('float') + +with open(rl + '.white.asc', 'r') as f: + f.readline() + nb_vert = f.readline().split(' ')[0] + read_data = [[np.double(line.rstrip('\n').split()[0]), + np.double(line.rstrip('\n').split()[1]), + np.double(line.rstrip('\n').split()[2])] for line in f] + +a = np.array(read_data) +vert_high = a[0:int(nb_vert), 0:3] + c_ras +np.savetxt(rl + '_white_vertices_high.txt', vert_high, fmt='%.6f %.6f %.6f') +tri_high = a[int(nb_vert):, 0:3] +np.savetxt(rl +'_white_triangles_high.txt', tri_high, fmt='%d %d %d') diff --git a/util/fs_region.txt b/util/fs_region.txt new file mode 100644 index 0000000..33bb6d4 --- /dev/null +++ b/util/fs_region.txt @@ -0,0 +1,89 @@ +0 Unknown +1 Brain-Stem +2 Left-Cerebellum-Cortex +3 Left-Thalamus-Proper +4 Left-Caudate +5 Left-Putamen +6 Left-Pallidum +7 Left-Hippocampus +8 Left-Amygdala +9 Left-Accumbens-area +10 ctx-lh-unknown +11 ctx-lh-bankssts +12 ctx-lh-caudalanteriorcingulate +13 ctx-lh-caudalmiddlefrontal +14 ctx-lh-cuneus +15 ctx-lh-entorhinal +16 ctx-lh-fusiform +17 ctx-lh-inferiorparietal +18 ctx-lh-inferiortemporal +19 ctx-lh-isthmuscingulate +20 ctx-lh-lateraloccipital +21 ctx-lh-lateralorbitofrontal +22 ctx-lh-lingual +23 ctx-lh-medialorbitofrontal +24 ctx-lh-middletemporal +25 ctx-lh-parahippocampal +26 ctx-lh-paracentral +27 ctx-lh-parsopercularis +28 ctx-lh-parsorbitalis +29 ctx-lh-parstriangularis +30 ctx-lh-pericalcarine +31 ctx-lh-postcentral +32 ctx-lh-posteriorcingulate +33 ctx-lh-precentral +34 ctx-lh-precuneus +35 ctx-lh-rostralanteriorcingulate +36 ctx-lh-rostralmiddlefrontal +37 ctx-lh-superiorfrontal +38 ctx-lh-superiorparietal +39 ctx-lh-superiortemporal +40 ctx-lh-supramarginal +41 ctx-lh-frontalpole +42 ctx-lh-temporalpole +43 ctx-lh-transversetemporal +44 ctx-lh-insula +45 Right-Cerebellum-Cortex +46 Right-Thalamus-Proper +47 Right-Caudate +48 Right-Putamen +49 Right-Pallidum +50 Right-Hippocampus +51 Right-Amygdala +52 Right-Accumbens-area +53 ctx-rh-unknown +54 ctx-rh-bankssts +55 ctx-rh-caudalanteriorcingulate +56 ctx-rh-caudalmiddlefrontal +57 ctx-rh-cuneus +58 ctx-rh-entorhinal +59 ctx-rh-fusiform +60 ctx-rh-inferiorparietal +61 ctx-rh-inferiortemporal +62 ctx-rh-isthmuscingulate +63 ctx-rh-lateraloccipital +64 ctx-rh-lateralorbitofrontal +65 ctx-rh-lingual +66 ctx-rh-medialorbitofrontal +67 ctx-rh-middletemporal +68 ctx-rh-parahippocampal +69 ctx-rh-paracentral +70 ctx-rh-parsopercularis +71 ctx-rh-parsorbitalis +72 ctx-rh-parstriangularis +73 ctx-rh-pericalcarine +74 ctx-rh-postcentral +75 ctx-rh-posteriorcingulate +76 ctx-rh-precentral +77 ctx-rh-precuneus +78 ctx-rh-rostralanteriorcingulate +79 ctx-rh-rostralmiddlefrontal +80 ctx-rh-superiorfrontal +81 ctx-rh-superiorparietal +82 ctx-rh-superiortemporal +83 ctx-rh-supramarginal +84 ctx-rh-frontalpole +85 ctx-rh-temporalpole +86 ctx-rh-transversetemporal +87 ctx-rh-insula + diff --git a/util/lh_ref_table.txt b/util/lh_ref_table.txt new file mode 100644 index 0000000..fcaa48b --- /dev/null +++ b/util/lh_ref_table.txt @@ -0,0 +1,46 @@ +0 0 0 0 0 0 +16 119 159 176 1 11575159 +8 230 148 34 2 2266342 +10 0 118 14 3 947712 +11 122 186 220 4 14465658 +12 236 13 176 5 11537900 +13 12 48 255 6 16723980 +17 220 216 20 7 1366236 +18 103 255 255 8 16777063 +26 255 165 0 9 42495 +1000 25 5 25 10 1639705 +1001 25 100 40 11 2647065 +1002 125 100 160 12 10511485 +1003 100 25 0 13 6500 +1005 220 20 100 14 6558940 +1006 220 20 10 15 660700 +1007 180 220 140 16 9231540 +1008 220 60 220 17 14433500 +1009 180 40 120 18 7874740 +1010 140 20 140 19 9180300 +1011 20 30 140 20 9182740 +1012 35 75 50 21 3296035 +1013 225 140 140 22 9211105 +1014 200 35 75 23 4924360 +1015 160 100 50 24 3302560 +1016 20 220 60 25 3988500 +1017 60 220 60 26 3988540 +1018 220 180 140 27 9221340 +1019 20 100 50 28 3302420 +1020 220 60 20 29 1326300 +1021 120 100 60 30 3957880 +1022 220 20 20 31 1316060 +1023 220 180 220 32 14464220 +1024 60 20 220 33 14423100 +1025 160 140 180 34 11832480 +1026 80 20 140 35 9180240 +1027 75 50 125 36 8204875 +1028 20 220 160 37 10542100 +1029 20 180 140 38 9221140 +1030 140 220 220 39 14474380 +1031 80 160 20 40 1351760 +1032 100 0 100 41 6553700 +1033 70 70 70 42 11146310 +1034 150 150 200 43 13145750 +1035 255 192 32 44 2146559 + diff --git a/util/region_mapping_white.py b/util/region_mapping_white.py new file mode 100644 index 0000000..6d65637 --- /dev/null +++ b/util/region_mapping_white.py @@ -0,0 +1,113 @@ +import numpy as np +import os +import os.path as op +import sys + +rl = sys.argv[1] +PRD = os.environ['PRD'] +FS = os.environ['FS'] +SUBJ_ID = os.environ['SUBJ_ID'] +PARCEL = os.environ['PARCEL'] + +def read_annot(fname): + + """Read a Freesurfer annotation from a .annot file. + Note : Copied from nibabel + Parameters + ---------- + fname : str + Path to annotation file + Returns + ------- + annot : numpy array, shape=(n_verts) + Annotation id at each vertex + ctab : numpy array, shape=(n_entries, 5) + RGBA + label id colortable array + names : list of str + List of region names as stored in the annot file + """ + if not op.isfile(fname): + dir_name = op.split(fname)[0] + if not op.isdir(dir_name): + raise IOError('Directory for annotation does not exist: %s', + fname) + cands = os.listdir(dir_name) + cands = [c for c in cands if '.annot' in c] + if len(cands) == 0: + raise IOError('No such file %s, no candidate parcellations ' + 'found in directory' % fname) + else: + raise IOError('No such file %s, candidate parcellations in ' + 'that directory: %s' % (fname, ', '.join(cands))) + with open(fname, "rb") as fid: + n_verts = np.fromfile(fid, '>i4', 1)[0] + data = np.fromfile(fid, '>i4', n_verts * 2).reshape(n_verts, 2) + annot = data[data[:, 0], 1] + ctab_exists = np.fromfile(fid, '>i4', 1)[0] + if not ctab_exists: + raise Exception('Color table not found in annotation file') + n_entries = np.fromfile(fid, '>i4', 1)[0] + if n_entries > 0: + length = np.fromfile(fid, '>i4', 1)[0] + orig_tab = np.fromfile(fid, '>c', length) + orig_tab = orig_tab[:-1] + names = list() + ctab = np.zeros((n_entries, 5), np.int) + for i in range(n_entries): + name_length = np.fromfile(fid, '>i4', 1)[0] + name = np.fromfile(fid, "|S%d" % name_length, 1)[0] + names.append(name) + ctab[i, :4] = np.fromfile(fid, '>i4', 4) + ctab[i, 4] = (ctab[i, 0] + ctab[i, 1] * (2 ** 8) + + ctab[i, 2] * (2 ** 16) + + ctab[i, 3] * (2 ** 24)) + else: + ctab_version = -n_entries + if ctab_version != 2: + raise Exception('Color table version not supported') + n_entries = np.fromfile(fid, '>i4', 1)[0] + ctab = np.zeros((n_entries, 5), np.int) + length = np.fromfile(fid, '>i4', 1)[0] + np.fromfile(fid, "|S%d" % length, 1) # Orig table path + entries_to_read = np.fromfile(fid, '>i4', 1)[0] + names = list() + for i in range(entries_to_read): + np.fromfile(fid, '>i4', 1) # Structure + name_length = np.fromfile(fid, '>i4', 1)[0] + name = np.fromfile(fid, "|S%d" % name_length, 1)[0] + names.append(name) + ctab[i, :4] = np.fromfile(fid, '>i4', 4) + ctab[i, 4] = (ctab[i, 0] + ctab[i, 1] * (2 ** 8) + + ctab[i, 2] * (2 ** 16)) + # convert to more common alpha value + ctab[:, 3] = 255 - ctab[:, 3] + return annot, ctab, names + + +if PARCEL=='desikan': + L, _, _ = read_annot(os.path.join(FS, SUBJ_ID, 'label', rl + '.aparc.annot')) +elif PARCEL=='destrieux': + L, _, _ = read_annot(os.path.join(FS, SUBJ_ID, 'label', rl + '.aparc.a2009s.annot')) +elif PARCEL=='HCP-MMP': + raise NotImplementedError #TODO volumetric parcellation script + L, _, _ = read_annot(os.path.join('share', rl + '.HCP-MMP1.annot')) +elif PARCEL=='Yeo-7nets': + raise NotImplementedError #TODO volumetric parcellation script + L, _, _ = read_annot(os.path.join('share', rl + '.Yeo_7nets.annot')) +elif PARCEL=='Yeo-17nets': + raise NotImplementedError #TODO volumetric parcellation script + L, _, _ = read_annot(os.path.join('share', rl + '.Yeo_17nets.annot')) +# using the ref table instead of the annot to reorder the region indices as we want for the region mapping +ref_table = np.loadtxt(open(os.path.join('share', 'reference_table_' + PARCEL + ".csv"), "rb"), delimiter=",", skiprows=1, usecols=(5,6)) +vl = np.loadtxt(os.path.join(PRD, 'surface', rl + '_white_vertices_low.txt')) # vertices low +vh = np.loadtxt(os.path.join(PRD, 'surface', rl + '_white_vertices_high.txt')) # vertices high +reg_map = [] +for vli in vl: + pos = np.argmin(np.sum(np.abs(vh - vli), 1)) + if rl == 'lh': # colors are the same for left and right hemispheres + find_tab = np.nonzero(ref_table[:, 1] == L[pos])[0][0] + elif rl=='rh': + find_tab = np.nonzero(ref_table[:, 1] == L[pos])[0][-1] + reg_map.append(ref_table[find_tab, 0]) + +np.savetxt(os.path.join(PRD, 'surface', rl + '_white_region_mapping_low_not_corrected.txt'), reg_map) diff --git a/util/rh_ref_table.txt b/util/rh_ref_table.txt new file mode 100644 index 0000000..0410fd2 --- /dev/null +++ b/util/rh_ref_table.txt @@ -0,0 +1,46 @@ +0 0 0 0 0 0 +16 119 159 176 1 11575159 +47 230 148 34 45 2266342 +49 0 118 14 46 947712 +50 122 186 220 47 14465658 +51 236 13 176 48 11537900 +52 13 48 255 49 16723981 +53 220 216 20 50 1366236 +54 103 255 255 51 16777063 +58 255 165 0 52 42495 +2000 25 5 25 53 1639705 +2001 25 100 40 54 2647065 +2002 125 100 160 55 10511485 +2003 100 25 0 56 6500 +2005 220 20 100 57 6558940 +2006 220 20 10 58 660700 +2007 180 220 140 59 9231540 +2008 220 60 220 60 14433500 +2009 180 40 120 61 7874740 +2010 140 20 140 62 9180300 +2011 20 30 140 63 9182740 +2012 35 75 50 64 3296035 +2013 225 140 140 65 9211105 +2014 200 35 75 66 4924360 +2015 160 100 50 67 3302560 +2016 20 220 60 68 3988500 +2017 60 220 60 69 3988540 +2018 220 180 140 70 9221340 +2019 20 100 50 71 3302420 +2020 220 60 20 72 1326300 +2021 120 100 60 73 3957880 +2022 220 20 20 74 1316060 +2023 220 180 220 75 14464220 +2024 60 20 220 76 14423100 +2025 160 140 180 77 11832480 +2026 80 20 140 78 9180240 +2027 75 50 125 79 8204875 +2028 20 220 160 80 10542100 +2029 20 180 140 81 9221140 +2030 140 220 220 82 14474380 +2031 80 160 20 83 1351760 +2032 100 0 100 84 6553700 +2033 70 70 70 85 11146310 +2034 150 150 200 86 13145750 +2035 255 192 32 87 2146559 + From edcdc848b617cc8277d841bedef87c56c9f45951 Mon Sep 17 00:00:00 2001 From: sebnaze Date: Thu, 19 Nov 2020 09:03:41 -0500 Subject: [PATCH 2/2] updated readme with CH package dependencies and matlab_utils folder to put them --- README.md | 10 ++++++++++ matlab_utils/.keep | 0 2 files changed, 10 insertions(+) create mode 100644 matlab_utils/.keep diff --git a/README.md b/README.md index 77529f1..a86e20b 100644 --- a/README.md +++ b/README.md @@ -17,6 +17,16 @@ Naze S., Proix T., Atasoy S., Kozloski J. R. (2020) Robustness of connectome har - To perform: after running `main_surface.sh`, run `main_CH.sh`. - Template connectome harmonics for surfaces *cvs\_avg35\_inMNI152*, *fsaverage4* and *fsaverage5* using the [Gibbs tractography streamlines](https://www.nitrc.org/projects/gibbsconnectome) is now [available on zenodo](https://zenodo.org/record/4027989) + +#### Connectome Harmonics dependencies +Install the following packages in the matlab_utils folder: +- Toolbox graph +- CBrewer colormaps +- Export_fig +- TriangleRayIntersection +- SmoothPatch +- Subtightplot + #### NEW in 0.4: - Dockerfile for easy installation with Docker. - Tests. diff --git a/matlab_utils/.keep b/matlab_utils/.keep new file mode 100644 index 0000000..e69de29