def data_compression(fmri_masked, mask_img, mask_np, output_size):
"""
data : array_like
A matrix of shape (`V`, `N`) with `V` voxels `N` timepoints
The functional dataset that needs to be reduced
mask : a numpy array of the mask
output_size : integer
The number of elements that the data should be reduced to
"""
## Transform nifti files to a data matrix with the NiftiMasker
import time
from nilearn import input_data
datacompressiontime=time.time()
nifti_masker = input_data.NiftiMasker(mask_img= mask_img, memory='nilearn_cache',
mask_strategy='background', memory_level=1,
standardize=False)
ward=[]
# Perform Ward clustering
from sklearn.feature_extraction import image
shape = mask_np.shape
connectivity = image.grid_to_graph(n_x=shape[0], n_y=shape[1],
n_z=shape[2], mask=mask_np)
from sklearn.cluster import FeatureAgglomeration
start = time.time()
ward = FeatureAgglomeration(n_clusters=output_size, connectivity=connectivity,
linkage='ward')
ward.fit(fmri_masked)
#print("Ward agglomeration compressing voxels into clusters: %.2fs" % (time.time() - start))
labels = ward.labels_
#print ('Extracting reduced Dimension Data')
data_reduced = ward.transform(fmri_masked)
fmri_masked=[]
#print('Data compression took ', (time.time()- datacompressiontime), ' seconds')
return {'data':data_reduced, 'labels':labels}
评论列表
文章目录