# @title Install dependencies !pip install nilearn --quiet import os import numpy as np import matplotlib.pyplot as plt import nibabel as nib # neuroimaging I/O library from nilearn import plotting, image #@title Figure settings %matplotlib inline %config InlineBackend.figure_format = 'retina' plt.style.use("https://raw.githubusercontent.com/NeuromatchAcademy/course-content/master/nma.mplstyle") # The download cells will store the data in nested directories starting here: DATA_DIR = "./fslcourse" if not os.path.isdir(DATA_DIR): os.mkdir(DATA_DIR) # Description of the two experiments EXPERIMENTS = { 'fluency' : { 'TR' : 4.2, # time resolution in seconds 'ntimes' : 106, # number of time points 'EVs' : ['Gen','Shad'] # conditions }, 'parametric' : { 'TR' : 0.933, 'ntimes' : 1100, 'EVs' : ['WPM_0050','WPM_0350','WPM_0650','WPM_0950','WPM_1250'] } } # @title Download and unzip the data in `DATA_DIR` import os, shutil, requests, tarfile fname = "fslcourse.tgz" url = "https://osf.io/syt65/download/" if not os.path.isfile(fname): try: r = requests.get(url) except requests.ConnectionError: print("!!! Failed to download data !!!") else: if r.status_code != requests.codes.ok: print("!!! Failed to download data !!!") else: print("Downloading data...") with open(fname, "wb") as fid: fid.write(r.content) # open file with tarfile.open(fname) as fzip: fzip.extractall(DATA_DIR) print("Download completed!") else: print("Data have been already downloaded!") # @title Helper functions def load_evs(exp, dir): """Load EVs (explanatory variables) data for one task experiment. Args: experiment (str) : Name of experiment dir (str) : Data directory Returns evs (dict) """ TR = EXPERIMENTS[exp]['TR'] EVs = [] for ev in EXPERIMENTS[exp]['EVs']: ev_file = os.path.join(dir, exp, "EVs", f"{ev}.txt") ev_array = np.loadtxt(ev_file, ndmin=2, unpack=True) ev = dict(zip(["onset", "duration", "amplitude"], ev_array)) # Determine when trial starts, rounded down start = np.floor(ev["onset"] / TR).astype(int) # Use trial duration to determine how many frames to include for trial duration = np.ceil(ev["duration"] / TR).astype(int) # Take the range of frames that correspond to this specific trial frames = [s + np.arange(0, d) for s, d in zip(start, duration)] a = np.zeros(EXPERIMENTS[exp]['ntimes']) for frame in frames: a[frame] = 1 EVs.append(a) return dict(zip(EXPERIMENTS[exp]['EVs'], EVs)) def get_HRF(duration, TR, peak): """ Really dumb Haemodynamic response function (not physiologically plausible) It simply goes up and down linearly from 0 to peak and back down Args: duration (float) : in seconds TR (float) : in seconds peak (float) : in seconds Returns: 1D array """ n = int(np.ceil(duration / TR)) x = np.linspace(0, duration, n) h = np.zeros(n) h[x < peak] = x[x < peak] / peak h[x >= peak] = (x[x >= peak] - duration) / (peak - duration) h = h / np.sum(h) return h def glm(Y, X, C=None, mask=None): """ Run a general linear model Args: Y (2d array) : time-by-space data matrix X (2d array) : time-by-regressors design matrix C (2d array) : contrasts-by-regressor contrrast matrix [default=Identity] mask (1d array) : spatial mask wherre GLM is run Returns: contrast maps t-stats """ if C is None: C = np.identity(X.shape[1]) if mask is None: mask = np.ones(Y.shape[1]) # initialise matrices beta = np.zeros((X.shape[1], Y.shape[1])) cope = np.zeros((C.shape[0], Y.shape[1])) varbeta = np.zeros_like(beta) tstat = np.zeros_like(beta) # solve glm beta[:, mask > 0] = np.linalg.pinv(X) @ Y[:, mask > 0] # apply contrasts cope[:, mask > 0] = np.dot(C, beta[:, mask > 0]) # calculate uncertainty (varcope) r = Y - X@beta dof = X.shape[0] - np.linalg.matrix_rank(X) sig2 = np.sum(r**2, axis=0) / dof varcope = np.outer(C @ np.diag(np.linalg.inv(X.T @ X)) @ C.T, sig2) # calculate t-stats tstat[:, mask] = cope[:, mask] / np.sqrt(varcope[:, mask]) return cope, tstat # Use nibabel to load the data img = nib.load(os.path.join(DATA_DIR, "fslcourse_data", "fluency", "fmri_data.nii.gz")) # get the actual data using the img nibabel Object # this returns a numpy array data = img.get_fdata() # Understand the dimensions of the data print(data.shape) # x-by-y-by-z-by-time # Load and plot the two EVs plt.figure() plt.plot(load_evs('fluency', os.path.join(DATA_DIR, "fslcourse_data"))['Gen'], label='Word Generation') plt.plot(load_evs('fluency', os.path.join(DATA_DIR, "fslcourse_data"))['Shad'], label='Word Shadowing') plt.legend() plt.show() HRF = get_HRF(duration=10, TR=1, peak=3) EVs = load_evs('fluency', os.path.join(DATA_DIR, "fslcourse_data")) n = len(EVs['Gen']) ev1 = np.convolve(EVs['Gen'], HRF, 'full')[:n] ev2 = np.convolve(EVs['Shad'], HRF, 'full')[:n] # plot the new EVs: plt.figure() plt.plot(EVs['Gen']) plt.plot(ev1) plt.show() plt.figure() plt.plot(EVs['Shad']) plt.plot(ev2) plt.show() # demean the task regressors ev1 = ev1 - np.mean(ev1) ev2 = ev2 - np.mean(ev2) # append a constant regressor design_matrix = np.asarray([ev1, ev2, np.ones_like(ev1)]).T print(design_matrix.shape) plt.figure() plt.plot(design_matrix) plt.show() # turn 4D data array to 2D (N-by-time) Y = np.reshape(data, (np.prod(data.shape[:3]), -1)).T # create a mask where data is non-zero mask = np.sum(Y**2, axis=0)>0 # run GLM beta, t = glm(Y=Y, X=design_matrix, mask=mask) # turn back to 4D arrays shape = list(data.shape[:3]) shape.append(beta.shape[0]) beta_r = np.reshape(beta.T,shape) t_r = np.reshape(t.T,shape) # Look at fit for best voxel idx = np.argmax(t[0, :]) print(t[:, idx]) plt.figure() plt.plot(Y[:, idx]) plt.plot(design_matrix @ beta[:, idx]) plt.show() # This uses the nilearn package # select Word Gen contrast tt = t_r[..., 0] # turn into nilearn image object map = image.new_img_like(img, tt, copy_header=True) # use to display (assumes that the map is in MNI standard space, which it is because the data is) plotting.plot_stat_map(map, threshold=2.5)