-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathdSPM_localize_postStim.py
173 lines (137 loc) · 7.29 KB
/
dSPM_localize_postStim.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
# Import libraries
import os
import mne
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
import seaborn as sns
from mne.minimum_norm import make_inverse_operator, apply_inverse, source_band_induced_power
from mne.time_frequency import tfr_morlet
import multiprocessing as mp
import time
def make_dSPM(subjectID):
#Localization parameters
fmin = 15
fmax = 30
startTime = 0.25
endTime = 1.25
tmins = [0.0, -0.575]
# Setup paths and names for file
channelName = 'MEG1311'
dataDir = '/home/timb/camcan/'
MEGDir = os.path.join(dataDir, 'proc_data/TaskSensorAnalysis_transdef')
outDir = os.path.join('/media/NAS/lpower/BetaSourceLocalization/postStimData',channelName, subjectID)
subjectsDir = os.path.join(dataDir, 'subjects/')
epochFifFilename = 'transdef_transrest_mf2pt2_task_raw_buttonPress_duration=3.4s_cleaned-epo.fif'
epochFif = os.path.join(MEGDir, subjectID, epochFifFilename)
spectralEventsCSV = subjectID + '_MEG1311_spectral_events.csv'
csvFile = '/media/NAS/bbrady/random old results/spectralEvents/'+ subjectID + '/' + spectralEventsCSV
transFif = subjectsDir + 'coreg/sub-' + subjectID + '-trans.fif'
srcFif = subjectsDir + 'sub-' + subjectID + '/bem/sub-' + subjectID + '-5-src.fif'
bemFif = subjectsDir + 'sub-' + subjectID + '/bem/sub-' + subjectID + '-5120-bem-sol.fif'
emptyroomFif = '/media/NAS/lpower/BetaSourceLocalization/emptyroomData/' + subjectID + '/emptyroom_trans-epo.fif'
# Files to make
stcFile = os.path.join(outDir,'transdef_transrest_mf2pt2_task_raw_buttonPress_duration=3e.4s_cleaned-epo_postBetaEvents_dSPM')
stcMorphFile = os.path.join(outDir,'transdef_transrest_mf2pt2_task_raw_buttonPress_duration=3.4s_cleaned-epo_postBetaEvents_dSPM_fsaverage')
testCompleteFile = os.path.join(outDir,'transdef_transrest_mf2pt2_task_raw_buttonPress_duration=3.4s_cleaned-epo_postBetaEvents_dSPM-lh.stc')
if not os.path.exists(outDir):
os.makedirs(outDir)
if not os.path.exists(testCompleteFile):
# Read all transient events for subject
df = pd.read_csv(csvFile)
print(df)
# Events that meet Shin criteria only
df1 = df[df['Outlier Event']]
# Freq range of interest
df2 = df1.drop(df1[df1['Peak Frequency'] < fmin].index)
df3 = df2.drop(df2[df2['Peak Frequency'] > fmax].index)
df4 = df3.drop(df3[df3['Peak Time'] > endTime].index)
newDf = df4.drop(df4[df4['Peak Time'] < startTime].index)
#I only want to take the top 55 highest power beta events (based on pre-analysis calculations in R)
#If the dataframe has less than 55 values, return
newDf = newDf.sort_values(by='Normalized Peak Power')
if newDf.size >= 55:
newDf = newDf.tail(n=55)
else:
print("Not enough bursts to make a map.")
return
# Read epochs
originalEpochs = mne.read_epochs(epochFif)
# Re-calculate epochs to have one per spectral event - this is the part that ensures that the map is calculated per burst
## IMPORTANT FOR METHODS DESCRIPTION ##
numEvents = len(newDf)
epochList = []
for e in np.arange(numEvents):
thisDf = newDf.iloc[e]
onsetTime = thisDf['Event Onset Time']
epoch = originalEpochs[thisDf['Trial']]
epochCrop = epoch.crop(onsetTime+tmins[1], onsetTime-tmins[1])
epochCrop = epochCrop.apply_baseline(baseline=(None,None))
# Fix epochCrops times array to be the same every time = (-.4, .4)
epochCrop.shift_time(tmins[1], relative=False)
if (epochCrop.tmin == -0.575 and epochCrop.tmax == 0.575):
epochList.append(epochCrop)
epochs = mne.concatenate_epochs(epochList)
epochs.pick_types(meg=True)
#Need to read in the empty room noise to compute the noise covariance matrix with the empty room noise.
#Read in raw room noise data
empty_room = mne.read_epochs(emptyroomFif)
#Compute noise covariance
noise_cov = mne.compute_covariance(empty_room, tmin=0, tmax=None)
#Calculate evoked response (only used Mags for the DICS beamformer so can just use Mags for this)
evoked = epochs.average().pick('meg')
# Read source space
src = mne.read_source_spaces(srcFif)
# Make forward solution
forward = mne.make_forward_solution(epochs.info,trans=transFif, src=src, bem=bemFif, meg=True, eeg=False)
#Make MEG inverse operator
inverse_operator = make_inverse_operator(evoked.info, forward, noise_cov)
#Calculate the inverse solution using the MNE method
method = "dSPM"
snr = 3.
lambda2 = 1. / snr ** 2
# Compute a source estimate per frequency band
bands = dict(beta=[15,30])
stc = source_band_induced_power(epochs, inverse_operator, bands, method="MNE", n_cycles=2,
use_fft=False, n_jobs=1)
#sLORETA_stc = source_band_induced_power(epochs, inverse_operator, bands, method="sLORETA", n_cycles=2,
# use_fft=False, n_jobs=1)
baselineData = stc['beta'].data[:,0:400]
activeData = stc['beta'].data[:,575:975]
ERS = np.log2(activeData/baselineData)
ERSstc = mne.SourceEstimate(ERS, vertices=stc['beta'].vertices, tmin=stc['beta'].tmin, tstep=stc['beta'].tstep, subject=stc['beta'].subject)
ERSband = ERSstc.mean()
ERSband.save(stcFile)
#Compute morph file and save
morph = mne.compute_source_morph(ERSband, subject_from='sub-' + subjectID,
subject_to='fsaverage',
subjects_dir=subjectsDir)
ERSmorph = morph.apply(ERSband)
ERSmorph.save(stcMorphFile)
#print(stcMorphFile)
return stc
if __name__ == "__main__":
# Find subjects to be analysed
homeDir = '/media/NAS/lpower/camcan/'
dataDir = homeDir + 'spectralEvents/task/MEG0221'
camcanCSV = dataDir + '/spectralEventAnalysis.csv'
subjectData = pd.read_csv(camcanCSV)
# Take only subjects with more than 55 epochs
subjectData = subjectData[subjectData['numEpochs'] > 55]
# Drop subjects with PMBR stc already made
subjectData = subjectData.drop(subjectData[subjectData['bemExists'] == False].index)
subjectData = subjectData.drop(subjectData[subjectData['srcExists'] == False].index)
subjectData = subjectData.drop(subjectData[subjectData['transExists'] == False].index)
subjectData = subjectData.drop(subjectData[subjectData['PreStim Stc Exists'] == True].index)
subjectIDs = subjectData['SubjectID'].tolist()
print(len(subjectIDs))
print(subjectIDs)
ex_subs = ['CC520395','CC222326','CC310414','CC320568', 'CC320636', 'CC321595', 'CC510534','CC520136','CC520745', 'CC520775', 'CC621080', 'CC720304']
for x in ex_subs:
subjectIDs.remove(x)
# Set up the parallel task pool to use all available processors
count = int(np.round(mp.cpu_count()*1/4))
pool = mp.Pool(processes=count)
# Run the jobs
pool.map(make_dSPM, subjectIDs)
#dSPM_stc = make_dSPM('CC110033')