diff --git a/develop/.buildinfo b/develop/.buildinfo new file mode 100644 index 00000000..7c08904a --- /dev/null +++ b/develop/.buildinfo @@ -0,0 +1,4 @@ +# Sphinx build info version 1 +# This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done. +config: f43e356d84e74a6ccecdbab8dd44ba1e +tags: 645f666f9bcd5a90fca523b33c5a78b7 diff --git a/develop/.nojekyll b/develop/.nojekyll new file mode 100644 index 00000000..e69de29b diff --git a/develop/_downloads/07fcc19ba03226cd3d83d4e40ec44385/auto_examples_python.zip b/develop/_downloads/07fcc19ba03226cd3d83d4e40ec44385/auto_examples_python.zip new file mode 100644 index 00000000..8692a92e Binary files /dev/null and b/develop/_downloads/07fcc19ba03226cd3d83d4e40ec44385/auto_examples_python.zip differ diff --git a/develop/_downloads/12812c6275fa4a49792d7f7384f3d252/visual_gonogo_python.zip b/develop/_downloads/12812c6275fa4a49792d7f7384f3d252/visual_gonogo_python.zip new file mode 100644 index 00000000..15cb0ecb Binary files /dev/null and b/develop/_downloads/12812c6275fa4a49792d7f7384f3d252/visual_gonogo_python.zip differ diff --git a/develop/_downloads/18e4b69d6ddc9302425e645c1fec16c3/04r__cueing_group_analysis_winter2019.py b/develop/_downloads/18e4b69d6ddc9302425e645c1fec16c3/04r__cueing_group_analysis_winter2019.py new file mode 100644 index 00000000..84a7795e --- /dev/null +++ b/develop/_downloads/18e4b69d6ddc9302425e645c1fec16c3/04r__cueing_group_analysis_winter2019.py @@ -0,0 +1,326 @@ +""" +Cueing Group Analysis Winter 2019 +=============================== + +""" + +################################################################################################### +# Setup +# ----------------------------- + +# Standard Pythonic imports +import os,sys,glob,numpy as np, pandas as pd +import scipy +from collections import OrderedDict +import warnings +warnings.filterwarnings('ignore') +from matplotlib import pyplot as plt +import matplotlib.patches as patches + +# MNE functions +from mne import Epochs, find_events, concatenate_raws +from mne.time_frequency import tfr_morlet + +# EEG-Noteooks functions +from eegnb.analysis.analysis_utils import load_data +from eegnb.datasets import fetch_dataset + +# sphinx_gallery_thumbnail_number = 1 + +################################################################################################### +# Load the data +# ----------------------------- + +eegnb_data_path = os.path.join(os.path.expanduser('~/'),'.eegnb', 'data') +cueing_data_path = os.path.join(eegnb_data_path, 'visual-cueing', 'kylemathlab_dev') + +# If dataset hasn't been downloaded yet, download it +if not os.path.isdir(cueing_data_path): + fetch_dataset(data_dir=eegnb_data_path, experiment='visual-cueing', site='kylemathlab_dev') + + +################################################################################################### +# Put the data into MNE Epochs +# ----------------------------- +# +# Fall 2018 +# subs = [101, 102, 103, 104, 105, 106, 108, 109, 110, 111, 112, +# 202, 203, 204, 205, 207, 208, 209, 210, 211, +# 301, 302, 303, 304, 305, 306, 307, 308, 309] +# +# Winter 2019 +subs = [1101, 1102, 1103, 1104, 1105, 1106, 1108, 1109, 1110, + 1202, 1203, 1205, 1206, 1209, 1210, 1211, 1215, + 1301, 1302, 1313, + 1401, 1402, 1403, 1404, 1405, 1408, 1410, 1411, 1412, 1413, 1413, 1414, 1415, 1416] +# +# Both +# subs = [101, 102, 103, 104, 105, 106, 108, 109, 110, 111, 112, +# 202, 203, 204, 205, 207, 208, 209, 210, 211, +# 301, 302, 303, 304, 305, 306, 307, 308, 309, +# 1101, 1102, 1103, 1104, 1105, 1106, 1108, 1109, 1110, +# 1202, 1203, 1205, 1206, 1209, 1210, 1211, 1215, +# 1301, 1302, 1313, +# 1401, 1402, 1403, 1404, 1405, 1408, 1410, 1411, 1412, 1413, 1413, 1414, 1415, 1416] +# +# +# placeholders to add to for each subject +diff_out = [] +Ipsi_out = [] +Contra_out = [] +Ipsi_spectra_out = [] +Contra_spectra_out = [] +diff_spectra_out = [] +ERSP_diff_out = [] +ERSP_Ipsi_out = [] +ERSP_Contra_out = [] + +frequencies = np.linspace(6, 30, 100, endpoint=True) +wave_cycles = 6 + +# time frequency window for analysis +f_low = 7 # Hz +f_high = 10 +f_diff = f_high-f_low + +t_low = 0 # s +t_high = 1 +t_diff = t_high-t_low + +bad_subs= [6, 7, 13, 26] +really_bad_subs = [11, 12, 19] +sub_count = 0 + + + +for sub in subs: + print(sub) + + sub_count += 1 + + + if (sub_count in really_bad_subs): + rej_thresh_uV = 90 + elif (sub_count in bad_subs): + rej_thresh_uV = 90 + else: + rej_thresh_uV = 90 + + rej_thresh = rej_thresh_uV*1e-6 + + + # Load both sessions + raw = load_data(sub,1, # subject, session + experiment='visual-cueing',site='kylemathlab_dev',device_name='muse2016', + data_dir = eegnb_data_path) + + raw.append( + load_data(sub,2, # subject, session + experiment='visual-cueing', site='kylemathlab_dev', device_name='muse2016', + data_dir = eegnb_data_path)) + + + # Filter Raw Data + raw.filter(1,30, method='iir') + + #Select Events + events = find_events(raw) + event_id = {'LeftCue': 1, 'RightCue': 2} + epochs = Epochs(raw, events=events, event_id=event_id, + tmin=-1, tmax=2, baseline=(-1, 0), + reject={'eeg':rej_thresh}, preload=True, + verbose=False, picks=[0, 3]) + print('Trials Remaining: ' + str(len(epochs.events)) + '.') + + # Compute morlet wavelet + # Left Cue + tfr, itc = tfr_morlet(epochs['LeftCue'], freqs=frequencies, + n_cycles=wave_cycles, return_itc=True) + tfr = tfr.apply_baseline((-1,-.5),mode='mean') + power_Ipsi_TP9 = tfr.data[0,:,:] + power_Contra_TP10 = tfr.data[1,:,:] + + # Right Cue + tfr, itc = tfr_morlet(epochs['RightCue'], freqs=frequencies, + n_cycles=wave_cycles, return_itc=True) + tfr = tfr.apply_baseline((-1,-.5),mode='mean') + power_Contra_TP9 = tfr.data[0,:,:] + power_Ipsi_TP10 = tfr.data[1,:,:] + + # Compute averages Differences + power_Avg_Ipsi = (power_Ipsi_TP9+power_Ipsi_TP10)/2; + power_Avg_Contra = (power_Contra_TP9+power_Contra_TP10)/2; + power_Avg_Diff = power_Avg_Ipsi-power_Avg_Contra; + + #output data into array + times = epochs.times + Ipsi_out.append(np.mean(power_Avg_Ipsi[np.argmax(frequencies>f_low): + np.argmax(frequencies>f_high)-1, + np.argmax(times>t_low):np.argmax(times>t_high)-1 ] + ) + ) + Ipsi_spectra_out.append(np.mean(power_Avg_Ipsi[:,np.argmax(times>t_low): + np.argmax(times>t_high)-1 ],1 + ) + ) + + Contra_out.append(np.mean(power_Avg_Contra[np.argmax(frequencies>f_low): + np.argmax(frequencies>f_high)-1, + np.argmax(times>t_low):np.argmax(times>t_high)-1 ] + ) + ) + + Contra_spectra_out.append(np.mean(power_Avg_Contra[:,np.argmax(times>t_low): + np.argmax(times>t_high)-1 ],1)) + + + diff_out.append(np.mean(power_Avg_Diff[np.argmax(frequencies>f_low): + np.argmax(frequencies>f_high)-1, + np.argmax(times>t_low):np.argmax(times>t_high)-1 ] + ) + ) + diff_spectra_out.append(np.mean(power_Avg_Diff[:,np.argmax(times>t_low): + np.argmax(times>t_high)-1 ],1 + ) + ) + + #save the spectrograms to average over after + ERSP_diff_out.append(power_Avg_Diff) + ERSP_Ipsi_out.append(power_Avg_Ipsi) + ERSP_Contra_out.append(power_Avg_Contra) + + +################################################################################################### +# Combine subjects +# ---------------------------- + +#average spectrograms +GrandAvg_diff = np.nanmean(ERSP_diff_out,0) +GrandAvg_Ipsi = np.nanmean(ERSP_Ipsi_out,0) +GrandAvg_Contra = np.nanmean(ERSP_Contra_out,0) + +#average spectra +GrandAvg_spec_Ipsi = np.nanmean(Ipsi_spectra_out,0) +GrandAvg_spec_Contra = np.nanmean(Contra_spectra_out,0) +GrandAvg_spec_diff = np.nanmean(diff_spectra_out,0) + +#error bars for spectra (standard error) +num_good = len(diff_out) - sum(np.isnan(diff_out)) +GrandAvg_spec_Ipsi_ste = np.nanstd(Ipsi_spectra_out,0)/np.sqrt(num_good) +GrandAvg_spec_Contra_ste = np.nanstd(Contra_spectra_out,0)/np.sqrt(num_good) +GrandAvg_spec_diff_ste = np.nanstd(diff_spectra_out,0)/np.sqrt(num_good) + +################################################################################################### + +#Plot Spectra error bars +fig, ax = plt.subplots(1) +plt.errorbar(frequencies,GrandAvg_spec_Ipsi,yerr=GrandAvg_spec_Ipsi_ste) +plt.errorbar(frequencies,GrandAvg_spec_Contra,yerr=GrandAvg_spec_Contra_ste) +plt.legend(('Ipsi','Contra')) +plt.xlabel('Frequency (Hz)') +plt.ylabel('Power (uV^2)') +plt.hlines(0,3,33) + +################################################################################################### + +#Plot Spectra Diff error bars +fig, ax = plt.subplots(1) +plt.errorbar(frequencies,GrandAvg_spec_diff,yerr=GrandAvg_spec_diff_ste) +plt.legend('Ipsi-Contra') +plt.xlabel('Frequency (Hz)') +plt.ylabel('Power (uV^2)') +plt.hlines(0,3,33) + +################################################################################################### +# +# Grand Average Ipsi +plot_max = np.max([np.max(np.abs(GrandAvg_Ipsi)), np.max(np.abs(GrandAvg_Contra))]) +fig, ax = plt.subplots(1) +im = plt.imshow(GrandAvg_Ipsi, + extent=[times[0], times[-1], frequencies[0], frequencies[-1]], + aspect='auto', origin='lower', cmap='coolwarm', vmin=-plot_max, vmax=plot_max) +plt.xlabel('Time (sec)') +plt.ylabel('Frequency (Hz)') +plt.title('Power Ipsi') +cb = fig.colorbar(im) +cb.set_label('Power') +# Create a Rectangle patch +rect = patches.Rectangle((t_low,f_low),t_diff,f_diff,linewidth=1,edgecolor='k',facecolor='none') +# Add the patch to the Axes +ax.add_patch(rect) +# +##e################################################################################################# +# +# Grand Average Contra +# +fig, ax = plt.subplots(1) +im = plt.imshow(GrandAvg_Contra, + extent=[times[0], times[-1], frequencies[0], frequencies[-1]], + aspect='auto', origin='lower', cmap='coolwarm', vmin=-plot_max, vmax=plot_max) +plt.xlabel('Time (sec)') +plt.ylabel('Frequency (Hz)') +plt.title('Power Contra') +cb = fig.colorbar(im) +cb.set_label('Power') +# Create a Rectangle patch +rect = patches.Rectangle((t_low,f_low),t_diff,f_diff,linewidth=1,edgecolor='k',facecolor='none') +# Add the patch to the Axes +ax.add_patch(rect) +# +################################################################################################### +# +# Grand Average Ipsi-Contra Difference +# +plot_max_diff = np.max(np.abs(GrandAvg_diff)) +fig, ax = plt.subplots(1) +im = plt.imshow(GrandAvg_diff, + extent=[times[0], times[-1], frequencies[0], frequencies[-1]], + aspect='auto', origin='lower', cmap='coolwarm', vmin=-plot_max_diff, vmax=plot_max_diff) +plt.xlabel('Time (sec)') +plt.ylabel('Frequency (Hz)') +plt.title('Power Difference Ipsi-Contra') +cb = fig.colorbar(im) +cb.set_label('Ipsi-Contra Power') +# Create a Rectangle patch +rect = patches.Rectangle((t_low,f_low),t_diff,f_diff,linewidth=1,edgecolor='k',facecolor='none') +# Add the patch to the Axes +ax.add_patch(rect) + +################################################################################################### +# Compute t test +# ---------------------------- + +num_good = len(diff_out) - sum(np.isnan(diff_out)) + +[tstat, pval] = scipy.stats.ttest_ind(diff_out,np.zeros(len(diff_out)),nan_policy='omit') +print('Ipsi Mean: '+ str(np.nanmean(Ipsi_out))) +print('Contra Mean: '+ str(np.nanmean(Contra_out))) +print('Mean Diff: '+ str(np.nanmean(diff_out))) +print('t(' + str(num_good-1) + ') = ' + str(round(tstat,3))) +print('p = ' + str(round(pval,3))) + +################################################################################################### +# Save average powers ipsi and contra +# ---------------------------- + +print(diff_out) +raw_data = {'Ipsi Power': Ipsi_out, + 'Contra Power': Contra_out} +df = pd.DataFrame(raw_data, columns = ['Ipsi Power', 'Contra Power']) +print(df) +df.to_csv('375CueingEEG.csv') +print('Saved subject averages for each condition to 375CueingEEG.csv file in present directory') + +################################################################################################### +# Save spectra +# ---------------------------- + +df = pd.DataFrame(Ipsi_spectra_out,columns=frequencies) +print(df) +df.to_csv('375CueingIpsiSpec.csv') + +df = pd.DataFrame(Contra_spectra_out,columns=frequencies) +df.to_csv('375CueingContraSpec.csv') +print('Saved Spectra to 375CueingContraSpec.csv file in present directory') + + diff --git a/develop/_downloads/1f56a155b1a68a196eeb8aea2e0b7517/02r__cueing_group_analysis.ipynb b/develop/_downloads/1f56a155b1a68a196eeb8aea2e0b7517/02r__cueing_group_analysis.ipynb new file mode 100644 index 00000000..10fa1ea4 --- /dev/null +++ b/develop/_downloads/1f56a155b1a68a196eeb8aea2e0b7517/02r__cueing_group_analysis.ipynb @@ -0,0 +1,169 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "\n# Cueing Group Analysis\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Setup\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# Standard pythonic importa\nimport os,sys,glob,numpy as np,pandas as pd\nfrom collections import OrderedDict\nimport warnings\nwarnings.filterwarnings('ignore')\nfrom matplotlib import pyplot as plt\nimport matplotlib.patches as patches\n\n# MNE functions\nfrom mne import Epochs, find_events, concatenate_raws\nfrom mne.time_frequency import tfr_morlet\n\n# EEG-Notebooks functions\nfrom eegnb.datasets import datasets\nfrom eegnb.analysis.analysis_utils import load_data" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Download the data\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "eegnb_data_path = os.path.join(os.path.expanduser('~/'),'.eegnb', 'data')\ncueing_data_path = os.path.join(eegnb_data_path, 'visual-cueing', 'kylemathlab_dev')\n\n# If dataset hasn't been downloaded yet, download it\nif not os.path.isdir(cueing_data_path):\n datasets.fetch_dataset(data_dir=eegnb_data_path, experiment='visual-cueing', site='kylemathlab_dev')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Load data into MNE objects\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# MNE is a very powerful Python library for analyzing EEG data. It provides helpful functions for performing key tasks such as filtering EEG data, rejecting artifacts, and grouping EEG data into chunks (epochs).\n\n# The first step after loading dependencies is use MNE to read the data we've collected into an MNE Raw object" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "subs = [101, 102, 103, 104, 105, 106, 108, 109, 110, 111, 112,\n 202, 203, 204, 205, 207, 208, 209, 210, 211, \n 301, 302, 303, 304, 305, 306, 307, 308, 309]\n\ndiff_out = []\nIpsi_out = []\nContra_out = []\nIpsi_spectra_out = []\nContra_spectra_out = []\ndiff_spectra_out = []\nERSP_diff_out = []\nERSP_Ipsi_out = []\nERSP_Contra_out = []\n\nfrequencies = np.linspace(6, 30, 100, endpoint=True)\nwave_cycles = 6\n\n# time frequency window for analysis\nf_low = 7 # Hz\nf_high = 10\nf_diff = f_high-f_low\n \nt_low = 0 # s\nt_high = 1\nt_diff = t_high-t_low\n\nbad_subs= [6, 7, 13, 26]\nreally_bad_subs = [11, 12, 19]\nsub_count = 0 \n \n \n \nfor sub in subs:\n print(sub)\n \n sub_count += 1\n\n \n if (sub_count in really_bad_subs):\n rej_thresh_uV = 90\n elif (sub_count in bad_subs):\n rej_thresh_uV = 90\n else:\n rej_thresh_uV = 90\n\n rej_thresh = rej_thresh_uV*1e-6\n \n \n # Load both sessions\n raw = load_data(sub,1, # subject, session\n experiment='visual-cueing',site='kylemathlab_dev',device_name='muse2016',\n data_dir = eegnb_data_path)\n \n raw.append(\n load_data(sub,2, # subject, session\n experiment='visual-cueing', site='kylemathlab_dev', device_name='muse2016',\n data_dir = eegnb_data_path))\n \n\n # Filter Raw Data\n raw.filter(1,30, method='iir')\n\n #Select Events\n events = find_events(raw)\n event_id = {'LeftCue': 1, 'RightCue': 2}\n epochs = Epochs(raw, events=events, event_id=event_id, \n tmin=-1, tmax=2, baseline=(-1, 0), \n reject={'eeg':rej_thresh}, preload=True,\n verbose=False, picks=[0, 3])\n print('Trials Remaining: ' + str(len(epochs.events)) + '.')\n\n # Compute morlet wavelet\n\n # Left Cue\n tfr, itc = tfr_morlet(epochs['LeftCue'], freqs=frequencies, \n n_cycles=wave_cycles, return_itc=True)\n tfr = tfr.apply_baseline((-1,-.5),mode='mean')\n #tfr.plot(picks=[0], mode='logratio', \n # title='TP9 - Ipsi');\n #tfr.plot(picks=[3], mode='logratio', \n # title='TP10 - Contra');\n power_Ipsi_TP9 = tfr.data[0,:,:]\n power_Contra_TP10 = tfr.data[1,:,:]\n\n # Right Cue\n tfr, itc = tfr_morlet(epochs['RightCue'], freqs=frequencies, \n n_cycles=wave_cycles, return_itc=True)\n tfr = tfr.apply_baseline((-1,-.5),mode='mean')\n #tfr.plot(picks=[0], mode='logratio', \n # title='TP9 - Contra');\n #tfr.plot(picks=[3], mode='logratio', \n # title='TP10 - Ipsi');\n power_Contra_TP9 = tfr.data[0,:,:]\n power_Ipsi_TP10 = tfr.data[1,:,:]\n\n # Plot Differences\n #%matplotlib inline\n times = epochs.times\n power_Avg_Ipsi = (power_Ipsi_TP9+power_Ipsi_TP10)/2;\n power_Avg_Contra = (power_Contra_TP9+power_Contra_TP10)/2;\n power_Avg_Diff = power_Avg_Ipsi-power_Avg_Contra;\n\n\n #find max to make color range\n plot_max = np.max([np.max(np.abs(power_Avg_Ipsi)), np.max(np.abs(power_Avg_Contra))])\n plot_diff_max = np.max(np.abs(power_Avg_Diff))\n\n \n \n #Ipsi\n fig, ax = plt.subplots(1)\n im = plt.imshow(power_Avg_Ipsi,\n extent=[times[0], times[-1], frequencies[0], frequencies[-1]],\n aspect='auto', origin='lower', cmap='coolwarm', vmin=-plot_max, vmax=plot_max)\n plt.xlabel('Time (sec)')\n plt.ylabel('Frequency (Hz)')\n plt.title('Power Average Ipsilateral to Cue')\n cb = fig.colorbar(im)\n cb.set_label('Power')\n # Create a Rectangle patch\n rect = patches.Rectangle((t_low,f_low),t_diff,f_diff,linewidth=1,edgecolor='k',facecolor='none')\n # Add the patch to the Axes\n ax.add_patch(rect)\n\n #TP10\n fig, ax = plt.subplots(1)\n im = plt.imshow(power_Avg_Contra,\n extent=[times[0], times[-1], frequencies[0], frequencies[-1]],\n aspect='auto', origin='lower', cmap='coolwarm', vmin=-plot_max, vmax=plot_max)\n plt.xlabel('Time (sec)')\n plt.ylabel('Frequency (Hz)')\n plt.title(str(sub) + ' - Power Average Contra to Cue')\n cb = fig.colorbar(im)\n cb.set_label('Power')\n # Create a Rectangle patch\n rect = patches.Rectangle((t_low,f_low),t_diff,f_diff,linewidth=1,edgecolor='k',facecolor='none')\n # Add the patch to the Axes\n ax.add_patch(rect)\n\n #difference between conditions\n fig, ax = plt.subplots(1)\n im = plt.imshow(power_Avg_Diff,\n extent=[times[0], times[-1], frequencies[0], frequencies[-1]],\n aspect='auto', origin='lower', cmap='coolwarm', vmin=-plot_diff_max, vmax=plot_diff_max)\n plt.xlabel('Time (sec)')\n plt.ylabel('Frequency (Hz)')\n plt.title('Power Difference Ipsi-Contra')\n cb = fig.colorbar(im)\n cb.set_label('Ipsi-Contra Power')\n # Create a Rectangle patch\n rect = patches.Rectangle((t_low,f_low),t_diff,f_diff,linewidth=1,edgecolor='k',facecolor='none')\n # Add the patch to the Axes\n ax.add_patch(rect)\n \n \n \n \n #output data into array\n Ipsi_out.append(np.mean(power_Avg_Ipsi[np.argmax(frequencies>f_low):\n np.argmax(frequencies>f_high)-1,\n np.argmax(times>t_low):np.argmax(times>t_high)-1 ]\n )\n ) \n Ipsi_spectra_out.append(np.mean(power_Avg_Ipsi[:,np.argmax(times>t_low):\n np.argmax(times>t_high)-1 ],1\n )\n )\n \n Contra_out.append(np.mean(power_Avg_Contra[np.argmax(frequencies>f_low):\n np.argmax(frequencies>f_high)-1,\n np.argmax(times>t_low):np.argmax(times>t_high)-1 ]\n )\n )\n \n Contra_spectra_out.append(np.mean(power_Avg_Contra[:,np.argmax(times>t_low):\n np.argmax(times>t_high)-1 ],1))\n \n \n diff_out.append(np.mean(power_Avg_Diff[np.argmax(frequencies>f_low):\n np.argmax(frequencies>f_high)-1,\n np.argmax(times>t_low):np.argmax(times>t_high)-1 ]\n )\n )\n diff_spectra_out.append(np.mean(power_Avg_Diff[:,np.argmax(times>t_low):\n np.argmax(times>t_high)-1 ],1\n )\n )\n \n \n ERSP_diff_out.append(power_Avg_Diff)\n ERSP_Ipsi_out.append(power_Avg_Ipsi)\n ERSP_Contra_out.append(power_Avg_Contra)\n\n\n \nprint(np.shape(ERSP_diff_out))\nprint(np.shape(Contra_spectra_out))\n\nprint(diff_out)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Combine subjects\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "GrandAvg_diff = np.nanmean(ERSP_diff_out,0)\nGrandAvg_Ipsi = np.nanmean(ERSP_Ipsi_out,0)\nGrandAvg_Contra = np.nanmean(ERSP_Contra_out,0)\n\nGrandAvg_spec_Ipsi = np.nanmean(Ipsi_spectra_out,0)\nGrandAvg_spec_Contra = np.nanmean(Contra_spectra_out,0)\nGrandAvg_spec_diff = np.nanmean(diff_spectra_out,0)\n\nnum_good = len(diff_out) - sum(np.isnan(diff_out)) \nGrandAvg_spec_Ipsi_ste = np.nanstd(Ipsi_spectra_out,0)/np.sqrt(num_good)\nGrandAvg_spec_Contra_ste = np.nanstd(Contra_spectra_out,0)/np.sqrt(num_good)\nGrandAvg_spec_diff_ste = np.nanstd(diff_spectra_out,0)/np.sqrt(num_good)\n\n#Spectra error bars\nfig, ax = plt.subplots(1)\nplt.errorbar(frequencies,GrandAvg_spec_Ipsi,yerr=GrandAvg_spec_Ipsi_ste)\nplt.errorbar(frequencies,GrandAvg_spec_Contra,yerr=GrandAvg_spec_Contra_ste)\n\nplt.legend(('Ipsi','Contra'))\nplt.xlabel('Frequency (Hz)')\nplt.ylabel('Power (uV^2)') \nplt.hlines(0,3,33)\n\n#Spectra Diff error bars\nfig, ax = plt.subplots(1)\nplt.errorbar(frequencies,GrandAvg_spec_diff,yerr=GrandAvg_spec_diff_ste)\n\nplt.legend('Ipsi-Contra')\nplt.xlabel('Frequency (Hz)')\nplt.ylabel('Power (uV^2)') \nplt.hlines(0,3,33)\n\n#Grand Average Ipsi\nplot_max = np.max([np.max(np.abs(GrandAvg_Ipsi)), np.max(np.abs(GrandAvg_Contra))]) \nfig, ax = plt.subplots(1)\nim = plt.imshow(GrandAvg_Ipsi,\n extent=[times[0], times[-1], frequencies[0], frequencies[-1]],\n aspect='auto', origin='lower', cmap='coolwarm', vmin=-plot_max, vmax=plot_max)\nplt.xlabel('Time (sec)')\nplt.ylabel('Frequency (Hz)')\nplt.title('Power Ipsi')\ncb = fig.colorbar(im)\ncb.set_label('Power')\n# Create a Rectangle patch\nrect = patches.Rectangle((t_low,f_low),t_diff,f_diff,linewidth=1,edgecolor='k',facecolor='none')\n# Add the patch to the Axes\nax.add_patch(rect)\n\n#Grand Average Contra\nfig, ax = plt.subplots(1)\nim = plt.imshow(GrandAvg_Contra,\n extent=[times[0], times[-1], frequencies[0], frequencies[-1]],\n aspect='auto', origin='lower', cmap='coolwarm', vmin=-plot_max, vmax=plot_max)\nplt.xlabel('Time (sec)')\nplt.ylabel('Frequency (Hz)')\nplt.title('Power Contra')\ncb = fig.colorbar(im)\ncb.set_label('Power')\n# Create a Rectangle patch\nrect = patches.Rectangle((t_low,f_low),t_diff,f_diff,linewidth=1,edgecolor='k',facecolor='none')\n# Add the patch to the Axes\nax.add_patch(rect)\n\n#Grand Average Ipsi-Contra Difference\nplot_max_diff = np.max(np.abs(GrandAvg_diff))\nfig, ax = plt.subplots(1)\nim = plt.imshow(GrandAvg_diff,\n extent=[times[0], times[-1], frequencies[0], frequencies[-1]],\n aspect='auto', origin='lower', cmap='coolwarm', vmin=-plot_max_diff, vmax=plot_max_diff)\nplt.xlabel('Time (sec)')\nplt.ylabel('Frequency (Hz)')\nplt.title('Power Difference Ipsi-Contra')\ncb = fig.colorbar(im)\ncb.set_label('Ipsi-Contra Power')\n# Create a Rectangle patch\nrect = patches.Rectangle((t_low,f_low),t_diff,f_diff,linewidth=1,edgecolor='k',facecolor='none')\n# Add the patch to the Axes\nax.add_patch(rect)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Compute t test\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "import scipy\nnum_good = len(diff_out) - sum(np.isnan(diff_out))\n\n[tstat, pval] = scipy.stats.ttest_ind(diff_out,np.zeros(len(diff_out)),nan_policy='omit')\nprint('Ipsi Mean: '+ str(np.nanmean(Ipsi_out))) \nprint('Contra Mean: '+ str(np.nanmean(Contra_out))) \nprint('Mean Diff: '+ str(np.nanmean(diff_out))) \nprint('t(' + str(num_good-1) + ') = ' + str(round(tstat,3)))\nprint('p = ' + str(round(pval,3)))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Save average powers ipsi and contra\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "import pandas as pd\nprint(diff_out)\nraw_data = {'Ipsi Power': Ipsi_out, \n 'Contra Power': Contra_out}\ndf = pd.DataFrame(raw_data, columns = ['Ipsi Power', 'Contra Power'])\ndf.to_csv('375CueingEEG.csv')\nprint('Saved subject averages for each condition to 375CueingEEG.csv file in present directory')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Save spectra\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "df = pd.DataFrame(Ipsi_spectra_out,columns=frequencies)\ndf.to_csv('375CueingIpsiSpec.csv')\n\ndf = pd.DataFrame(Contra_spectra_out,columns=frequencies)\ndf.to_csv('375CueingContraSpec.csv')\nprint('Saved Spectra to 375Cueing*Spec.csv file in present directory')" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.18" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} \ No newline at end of file diff --git a/develop/_downloads/2cbf7eec12f4415419df2cf85cbe5c5b/visual_p300_python.zip b/develop/_downloads/2cbf7eec12f4415419df2cf85cbe5c5b/visual_p300_python.zip new file mode 100644 index 00000000..077e042e Binary files /dev/null and b/develop/_downloads/2cbf7eec12f4415419df2cf85cbe5c5b/visual_p300_python.zip differ diff --git a/develop/_downloads/2f1a411d3414306e436c6540c86910c5/00x__n170_run_experiment.ipynb b/develop/_downloads/2f1a411d3414306e436c6540c86910c5/00x__n170_run_experiment.ipynb new file mode 100644 index 00000000..4a6319eb --- /dev/null +++ b/develop/_downloads/2f1a411d3414306e436c6540c86910c5/00x__n170_run_experiment.ipynb @@ -0,0 +1,68 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "\n# N170 run experiment\n\nThis example demonstrates the initiation of an EEG stream with eeg-expy, and how to run \nan experiment. \n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Setup\n--------------------- \n\nImports\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "from eegnb import generate_save_fn\nfrom eegnb.devices.eeg import EEG\nfrom eegnb.experiments import VisualN170\n\n# Define some variables\nboard_name = \"muse2\" # board name\nexperiment_name = \"visual_n170\" # experiment name\nsubject_id = 0 # test subject id\nsession_nb = 0 # session number\nrecord_duration = 120 # recording duration\n\n# generate save path\nsave_fn = generate_save_fn(board_name, experiment_name, subject_id, session_nb)\n\n# create device object\neeg_device = EEG(device=board_name)\n\n# Experiment type\nexperiment = VisualN170(duration=record_duration, eeg=eeg_device, save_fn=save_fn, use_vr=False)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Run experiment\n--------------------- \n\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "experiment.run()\n\n# Saved csv location\nprint(\"Recording saved in\", experiment.save_fn)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.18" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} \ No newline at end of file diff --git a/develop/_downloads/2fc210914b93a8aaaaa1a6667585ad74/visual_cueing_jupyter.zip b/develop/_downloads/2fc210914b93a8aaaaa1a6667585ad74/visual_cueing_jupyter.zip new file mode 100644 index 00000000..a8919ce3 Binary files /dev/null and b/develop/_downloads/2fc210914b93a8aaaaa1a6667585ad74/visual_cueing_jupyter.zip differ diff --git a/develop/_downloads/335ce423c80436163e4a75b13e3dba64/00x__ssvep_run_experiment.py b/develop/_downloads/335ce423c80436163e4a75b13e3dba64/00x__ssvep_run_experiment.py new file mode 100644 index 00000000..ce23c176 --- /dev/null +++ b/develop/_downloads/335ce423c80436163e4a75b13e3dba64/00x__ssvep_run_experiment.py @@ -0,0 +1,43 @@ +""" +SSVEP run experiment +=============================== + +This example demonstrates the initiation of an EEG stream with eeg-expy, and how to run +an experiment. + +""" + +################################################################################################### +# Setup +# --------------------- +# +# Imports +import os +from eegnb import generate_save_fn +from eegnb.devices.eeg import EEG +from eegnb.experiments import VisualSSVEP + +# Define some variables +board_name = "muse2" +experiment = "visual_ssvep" +subject_id = 0 +session_nb = 0 +record_duration = 120 + +################################################################################################### +# Initiate EEG device +# --------------------- +# +# Start EEG device +eeg_device = EEG(device=board_name) + +# Create save file name +save_fn = generate_save_fn(board_name, experiment, subject_id, session_nb) +print(save_fn) + +################################################################################################### +# Run experiment +# --------------------- +# +ssvep = VisualSSVEP(duration=record_duration, eeg=eeg_device, save_fn=save_fn) +ssvep.run() diff --git a/develop/_downloads/482813616f7e52f19737a9e9e4714600/01r__ssvep_viz.ipynb b/develop/_downloads/482813616f7e52f19737a9e9e4714600/01r__ssvep_viz.ipynb new file mode 100644 index 00000000..f34cbda7 --- /dev/null +++ b/develop/_downloads/482813616f7e52f19737a9e9e4714600/01r__ssvep_viz.ipynb @@ -0,0 +1,133 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "\n# SSVEP Visualization\n\nThis example demonstrates loading, organizing, and visualizing data from the steady-state visual evoked potentials (SSVEP) experiment. \n\nThe data used is the first subject and first session of the one of the eeg-expy ssvep example datasets, recorded using the InteraXon MUSE EEG headset (2016 model). This session consists of six two-minute blocks of continuous recording. \n\nWe first use the `fetch_datasets` to obtain a list of filenames. If these files are not already present \nin the specified data directory, they will be quickly downloaded from the cloud. \n\nAfter loading the data, we place it in an MNE `Epochs` object, and obtain the trial-averaged response. \n\nThe final figures show the visual frequencies appearing in the measured power spectrum. \n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# Some standard pythonic imports\nimport os, numpy as np, pandas as pd\nfrom collections import OrderedDict\nimport warnings\nwarnings.filterwarnings('ignore')\nfrom matplotlib import pyplot as plt\n\n# MNE functions\nfrom mne import Epochs,find_events\nfrom mne.time_frequency import tfr_morlet\n\n# EEG-Notebooks functions\nfrom eegnb.analysis.analysis_utils import load_data,plot_conditions\nfrom eegnb.datasets import fetch_dataset" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Load Data\n ---------------------\n\n We will use the eeg-expy SSVEP example dataset\n\n Note that if you are running this locally, the following cell will download\n the example dataset, if you do not already have it.\n\n##################################################################################################\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "eegnb_data_path = os.path.join(os.path.expanduser('~/'),'.eegnb', 'data') \nssvep_data_path = os.path.join(eegnb_data_path, 'visual-SSVEP', 'eegnb_examples')\n\n# If dataset hasn't been downloaded yet, download it \nif not os.path.isdir(ssvep_data_path):\n fetch_dataset(data_dir=eegnb_data_path, experiment='visual-SSVEP', site='eegnb_examples'); \n\n\nsubject = 1\nsession = 1\nraw = load_data(subject, session, \n experiment='visual-SSVEP', site='eegnb_examples', device_name='muse2016',\n data_dir = eegnb_data_path,\n replace_ch_names={'Right AUX': 'POz'})\nraw.set_channel_types({'POz': 'eeg'})" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Visualize the power spectrum\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "raw.plot_psd()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Epoching\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# Next, we will chunk (epoch) the data into segments representing the data 100ms before to 800ms after each stimulus.\n# Note: we will not reject epochs here because the amplitude of the SSVEP at POz is so large it is difficult to separate from eye blinks\n\nevents = find_events(raw)\nevent_id = {'30 Hz': 1, '20 Hz': 2}\nepochs = Epochs(raw, events=events, event_id=event_id, \n tmin=-0.5, tmax=4, baseline=None, preload=True,\n verbose=False, picks=[0, 1, 2, 3, 4])\nprint('sample drop %: ', (1 - len(epochs.events)/len(events)) * 100)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Stimuli-Specific PSD\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# Next, we can compare the PSD of epochs specifically during 20hz and 30hz stimulus presentation\n\nf, axs = plt.subplots(2, 1, figsize=(10, 10))\n\nwelch_params=dict(method='welch',\n n_fft=1028,\n n_per_seg=256 * 3,\n picks='all')\n\npsd1, freq1 = epochs['30 Hz'].compute_psd(**welch_params).get_data(return_freqs=True)\npsd2, freq2 = epochs['20 Hz'].compute_psd(**welch_params).get_data(return_freqs=True)\npsd1 = 10 * np.log10(psd1)\npsd2 = 10 * np.log10(psd2)\n\npsd1_mean = psd1.mean(0)\npsd1_std = psd1.mean(0)\n\npsd2_mean = psd2.mean(0)\npsd2_std = psd2.mean(0)\n\naxs[0].plot(freq1, psd1_mean[[0, 3], :].mean(0), color='b', label='30 Hz')\naxs[0].plot(freq2, psd2_mean[[0, 3], :].mean(0), color='r', label='20 Hz')\n\naxs[1].plot(freq1, psd1_mean[4, :], color='b', label='30 Hz')\naxs[1].plot(freq2, psd2_mean[4, :], color='r', label='20 Hz')\n\naxs[0].set_title('TP9 and TP10')\naxs[1].set_title('POz')\n\naxs[0].set_ylabel('Power Spectral Density (dB)')\naxs[1].set_ylabel('Power Spectral Density (dB)')\n\naxs[0].set_xlim((2, 50))\naxs[1].set_xlim((2, 50))\n\naxs[1].set_xlabel('Frequency (Hz)')\n\naxs[0].legend()\naxs[1].legend()\n\nplt.show();\n\n# With this visualization we can clearly see distinct peaks at 30hz and 20hz in the PSD, corresponding to the frequency of the visual stimulation. The peaks are much larger at the POz electrode, but still visible at TP9 and TP10" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Spectrogram\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# We can also look for SSVEPs in the spectrogram, which uses color to represent the power of frequencies in the EEG signal over time\n\nfrequencies = np.logspace(1, 1.75, 60)\ntfr, itc = tfr_morlet(epochs['30 Hz'], freqs=frequencies,picks='all',\n n_cycles=15, return_itc=True)\ntfr.plot(picks=[4], baseline=(-0.5, -0.1), mode='logratio', \n title='POz - 30 Hz stim');\n\ntfr, itc = tfr_morlet(epochs['20 Hz'], freqs=frequencies,picks='all',\n n_cycles=15, return_itc=True)\ntfr.plot(picks=[4], baseline=(-0.5, -0.1), mode='logratio', \n title='POz - 20 Hz stim');\n\n# Set Layout engine to tight to fix error with using colorbar layout error\nplt.figure().set_layout_engine('tight');\nplt.tight_layout()\n\n# Once again we can see clear SSVEPs at 30hz and 20hz" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.18" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} \ No newline at end of file diff --git a/develop/_downloads/4c0d23639fbd212d64cd06010552da22/visual_n170_python.zip b/develop/_downloads/4c0d23639fbd212d64cd06010552da22/visual_n170_python.zip new file mode 100644 index 00000000..94b68463 Binary files /dev/null and b/develop/_downloads/4c0d23639fbd212d64cd06010552da22/visual_n170_python.zip differ diff --git a/develop/_downloads/4cd288a0df5274e2be1a468e1967a571/04r__cueing_group_analysis_winter2019.ipynb b/develop/_downloads/4cd288a0df5274e2be1a468e1967a571/04r__cueing_group_analysis_winter2019.ipynb new file mode 100644 index 00000000..5fc9e08b --- /dev/null +++ b/develop/_downloads/4cd288a0df5274e2be1a468e1967a571/04r__cueing_group_analysis_winter2019.ipynb @@ -0,0 +1,216 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "\n# Cueing Group Analysis Winter 2019\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Setup\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# Standard Pythonic imports\nimport os,sys,glob,numpy as np, pandas as pd\nimport scipy\nfrom collections import OrderedDict\nimport warnings\nwarnings.filterwarnings('ignore')\nfrom matplotlib import pyplot as plt\nimport matplotlib.patches as patches\n\n# MNE functions\nfrom mne import Epochs, find_events, concatenate_raws\nfrom mne.time_frequency import tfr_morlet\n\n# EEG-Noteooks functions\nfrom eegnb.analysis.analysis_utils import load_data\nfrom eegnb.datasets import fetch_dataset" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Load the data\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "eegnb_data_path = os.path.join(os.path.expanduser('~/'),'.eegnb', 'data')\ncueing_data_path = os.path.join(eegnb_data_path, 'visual-cueing', 'kylemathlab_dev')\n\n# If dataset hasn't been downloaded yet, download it\nif not os.path.isdir(cueing_data_path):\n fetch_dataset(data_dir=eegnb_data_path, experiment='visual-cueing', site='kylemathlab_dev')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Put the data into MNE Epochs\n\nFall 2018\nsubs = [101, 102, 103, 104, 105, 106, 108, 109, 110, 111, 112,\n 202, 203, 204, 205, 207, 208, 209, 210, 211, \n 301, 302, 303, 304, 305, 306, 307, 308, 309]\n\nWinter 2019\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "subs = [1101, 1102, 1103, 1104, 1105, 1106, 1108, 1109, 1110,\n 1202, 1203, 1205, 1206, 1209, 1210, 1211, 1215,\n 1301, 1302, 1313, \n 1401, 1402, 1403, 1404, 1405, 1408, 1410, 1411, 1412, 1413, 1413, 1414, 1415, 1416]\n#\n# Both\n# subs = [101, 102, 103, 104, 105, 106, 108, 109, 110, 111, 112,\n# 202, 203, 204, 205, 207, 208, 209, 210, 211, \n# 301, 302, 303, 304, 305, 306, 307, 308, 309,\n# 1101, 1102, 1103, 1104, 1105, 1106, 1108, 1109, 1110,\n# 1202, 1203, 1205, 1206, 1209, 1210, 1211, 1215,\n# 1301, 1302, 1313, \n# 1401, 1402, 1403, 1404, 1405, 1408, 1410, 1411, 1412, 1413, 1413, 1414, 1415, 1416]\n#\n#\n# placeholders to add to for each subject\ndiff_out = []\nIpsi_out = []\nContra_out = []\nIpsi_spectra_out = []\nContra_spectra_out = []\ndiff_spectra_out = []\nERSP_diff_out = []\nERSP_Ipsi_out = []\nERSP_Contra_out = []\n\nfrequencies = np.linspace(6, 30, 100, endpoint=True)\nwave_cycles = 6\n\n# time frequency window for analysis\nf_low = 7 # Hz\nf_high = 10\nf_diff = f_high-f_low\n \nt_low = 0 # s\nt_high = 1\nt_diff = t_high-t_low\n\nbad_subs= [6, 7, 13, 26]\nreally_bad_subs = [11, 12, 19]\nsub_count = 0 \n \n \n \nfor sub in subs:\n print(sub)\n \n sub_count += 1\n\n \n if (sub_count in really_bad_subs):\n rej_thresh_uV = 90\n elif (sub_count in bad_subs):\n rej_thresh_uV = 90\n else:\n rej_thresh_uV = 90\n\n rej_thresh = rej_thresh_uV*1e-6\n \n \n # Load both sessions\n raw = load_data(sub,1, # subject, session\n experiment='visual-cueing',site='kylemathlab_dev',device_name='muse2016',\n data_dir = eegnb_data_path)\n \n raw.append(\n load_data(sub,2, # subject, session\n experiment='visual-cueing', site='kylemathlab_dev', device_name='muse2016',\n data_dir = eegnb_data_path))\n \n\n # Filter Raw Data\n raw.filter(1,30, method='iir')\n\n #Select Events\n events = find_events(raw)\n event_id = {'LeftCue': 1, 'RightCue': 2}\n epochs = Epochs(raw, events=events, event_id=event_id, \n tmin=-1, tmax=2, baseline=(-1, 0), \n reject={'eeg':rej_thresh}, preload=True,\n verbose=False, picks=[0, 3])\n print('Trials Remaining: ' + str(len(epochs.events)) + '.')\n\n # Compute morlet wavelet\n # Left Cue\n tfr, itc = tfr_morlet(epochs['LeftCue'], freqs=frequencies, \n n_cycles=wave_cycles, return_itc=True)\n tfr = tfr.apply_baseline((-1,-.5),mode='mean')\n power_Ipsi_TP9 = tfr.data[0,:,:]\n power_Contra_TP10 = tfr.data[1,:,:]\n\n # Right Cue\n tfr, itc = tfr_morlet(epochs['RightCue'], freqs=frequencies, \n n_cycles=wave_cycles, return_itc=True)\n tfr = tfr.apply_baseline((-1,-.5),mode='mean')\n power_Contra_TP9 = tfr.data[0,:,:]\n power_Ipsi_TP10 = tfr.data[1,:,:]\n\n # Compute averages Differences\n power_Avg_Ipsi = (power_Ipsi_TP9+power_Ipsi_TP10)/2;\n power_Avg_Contra = (power_Contra_TP9+power_Contra_TP10)/2;\n power_Avg_Diff = power_Avg_Ipsi-power_Avg_Contra;\n \n #output data into array\n times = epochs.times\n Ipsi_out.append(np.mean(power_Avg_Ipsi[np.argmax(frequencies>f_low):\n np.argmax(frequencies>f_high)-1,\n np.argmax(times>t_low):np.argmax(times>t_high)-1 ]\n )\n ) \n Ipsi_spectra_out.append(np.mean(power_Avg_Ipsi[:,np.argmax(times>t_low):\n np.argmax(times>t_high)-1 ],1\n )\n )\n \n Contra_out.append(np.mean(power_Avg_Contra[np.argmax(frequencies>f_low):\n np.argmax(frequencies>f_high)-1,\n np.argmax(times>t_low):np.argmax(times>t_high)-1 ]\n )\n )\n \n Contra_spectra_out.append(np.mean(power_Avg_Contra[:,np.argmax(times>t_low):\n np.argmax(times>t_high)-1 ],1))\n \n \n diff_out.append(np.mean(power_Avg_Diff[np.argmax(frequencies>f_low):\n np.argmax(frequencies>f_high)-1,\n np.argmax(times>t_low):np.argmax(times>t_high)-1 ]\n )\n )\n diff_spectra_out.append(np.mean(power_Avg_Diff[:,np.argmax(times>t_low):\n np.argmax(times>t_high)-1 ],1\n )\n )\n \n #save the spectrograms to average over after\n ERSP_diff_out.append(power_Avg_Diff)\n ERSP_Ipsi_out.append(power_Avg_Ipsi)\n ERSP_Contra_out.append(power_Avg_Contra)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Combine subjects\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "#average spectrograms\nGrandAvg_diff = np.nanmean(ERSP_diff_out,0)\nGrandAvg_Ipsi = np.nanmean(ERSP_Ipsi_out,0)\nGrandAvg_Contra = np.nanmean(ERSP_Contra_out,0)\n\n#average spectra\nGrandAvg_spec_Ipsi = np.nanmean(Ipsi_spectra_out,0)\nGrandAvg_spec_Contra = np.nanmean(Contra_spectra_out,0)\nGrandAvg_spec_diff = np.nanmean(diff_spectra_out,0)\n\n#error bars for spectra (standard error)\nnum_good = len(diff_out) - sum(np.isnan(diff_out)) \nGrandAvg_spec_Ipsi_ste = np.nanstd(Ipsi_spectra_out,0)/np.sqrt(num_good)\nGrandAvg_spec_Contra_ste = np.nanstd(Contra_spectra_out,0)/np.sqrt(num_good)\nGrandAvg_spec_diff_ste = np.nanstd(diff_spectra_out,0)/np.sqrt(num_good)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "#Plot Spectra error bars\nfig, ax = plt.subplots(1)\nplt.errorbar(frequencies,GrandAvg_spec_Ipsi,yerr=GrandAvg_spec_Ipsi_ste)\nplt.errorbar(frequencies,GrandAvg_spec_Contra,yerr=GrandAvg_spec_Contra_ste)\nplt.legend(('Ipsi','Contra'))\nplt.xlabel('Frequency (Hz)')\nplt.ylabel('Power (uV^2)') \nplt.hlines(0,3,33)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "#Plot Spectra Diff error bars\nfig, ax = plt.subplots(1)\nplt.errorbar(frequencies,GrandAvg_spec_diff,yerr=GrandAvg_spec_diff_ste)\nplt.legend('Ipsi-Contra')\nplt.xlabel('Frequency (Hz)')\nplt.ylabel('Power (uV^2)') \nplt.hlines(0,3,33)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Grand Average Ipsi\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "plot_max = np.max([np.max(np.abs(GrandAvg_Ipsi)), np.max(np.abs(GrandAvg_Contra))]) \nfig, ax = plt.subplots(1)\nim = plt.imshow(GrandAvg_Ipsi,\n extent=[times[0], times[-1], frequencies[0], frequencies[-1]],\n aspect='auto', origin='lower', cmap='coolwarm', vmin=-plot_max, vmax=plot_max)\nplt.xlabel('Time (sec)')\nplt.ylabel('Frequency (Hz)')\nplt.title('Power Ipsi')\ncb = fig.colorbar(im)\ncb.set_label('Power')\n# Create a Rectangle patch\nrect = patches.Rectangle((t_low,f_low),t_diff,f_diff,linewidth=1,edgecolor='k',facecolor='none')\n# Add the patch to the Axes\nax.add_patch(rect)\n#\n##e#################################################################################################\n#\n# Grand Average Contra\n#\nfig, ax = plt.subplots(1)\nim = plt.imshow(GrandAvg_Contra,\n extent=[times[0], times[-1], frequencies[0], frequencies[-1]],\n aspect='auto', origin='lower', cmap='coolwarm', vmin=-plot_max, vmax=plot_max)\nplt.xlabel('Time (sec)')\nplt.ylabel('Frequency (Hz)')\nplt.title('Power Contra')\ncb = fig.colorbar(im)\ncb.set_label('Power')\n# Create a Rectangle patch\nrect = patches.Rectangle((t_low,f_low),t_diff,f_diff,linewidth=1,edgecolor='k',facecolor='none')\n# Add the patch to the Axes\nax.add_patch(rect)\n#" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Grand Average Ipsi-Contra Difference\n\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "plot_max_diff = np.max(np.abs(GrandAvg_diff))\nfig, ax = plt.subplots(1)\nim = plt.imshow(GrandAvg_diff,\n extent=[times[0], times[-1], frequencies[0], frequencies[-1]],\n aspect='auto', origin='lower', cmap='coolwarm', vmin=-plot_max_diff, vmax=plot_max_diff)\nplt.xlabel('Time (sec)')\nplt.ylabel('Frequency (Hz)')\nplt.title('Power Difference Ipsi-Contra')\ncb = fig.colorbar(im)\ncb.set_label('Ipsi-Contra Power')\n# Create a Rectangle patch\nrect = patches.Rectangle((t_low,f_low),t_diff,f_diff,linewidth=1,edgecolor='k',facecolor='none')\n# Add the patch to the Axes\nax.add_patch(rect)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Compute t test\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "num_good = len(diff_out) - sum(np.isnan(diff_out))\n\n[tstat, pval] = scipy.stats.ttest_ind(diff_out,np.zeros(len(diff_out)),nan_policy='omit')\nprint('Ipsi Mean: '+ str(np.nanmean(Ipsi_out))) \nprint('Contra Mean: '+ str(np.nanmean(Contra_out))) \nprint('Mean Diff: '+ str(np.nanmean(diff_out))) \nprint('t(' + str(num_good-1) + ') = ' + str(round(tstat,3)))\nprint('p = ' + str(round(pval,3)))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Save average powers ipsi and contra\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "print(diff_out)\nraw_data = {'Ipsi Power': Ipsi_out, \n 'Contra Power': Contra_out}\ndf = pd.DataFrame(raw_data, columns = ['Ipsi Power', 'Contra Power'])\nprint(df)\ndf.to_csv('375CueingEEG.csv')\nprint('Saved subject averages for each condition to 375CueingEEG.csv file in present directory')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Save spectra\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "df = pd.DataFrame(Ipsi_spectra_out,columns=frequencies)\nprint(df)\ndf.to_csv('375CueingIpsiSpec.csv')\n\ndf = pd.DataFrame(Contra_spectra_out,columns=frequencies)\ndf.to_csv('375CueingContraSpec.csv')\nprint('Saved Spectra to 375CueingContraSpec.csv file in present directory')" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.18" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} \ No newline at end of file diff --git a/develop/_downloads/67001aab4aa80bdc4a44406add4d085e/visual_cueing_python.zip b/develop/_downloads/67001aab4aa80bdc4a44406add4d085e/visual_cueing_python.zip new file mode 100644 index 00000000..46385dd4 Binary files /dev/null and b/develop/_downloads/67001aab4aa80bdc4a44406add4d085e/visual_cueing_python.zip differ diff --git a/develop/_downloads/679acb9da15a2bd0f1cf78d5dea6995b/00x__n170_run_experiment.py b/develop/_downloads/679acb9da15a2bd0f1cf78d5dea6995b/00x__n170_run_experiment.py new file mode 100644 index 00000000..b7188696 --- /dev/null +++ b/develop/_downloads/679acb9da15a2bd0f1cf78d5dea6995b/00x__n170_run_experiment.py @@ -0,0 +1,42 @@ +""" +N170 run experiment +=============================== + +This example demonstrates the initiation of an EEG stream with eeg-expy, and how to run +an experiment. + +""" + +################################################################################################### +# Setup +# --------------------- +# +# Imports +from eegnb import generate_save_fn +from eegnb.devices.eeg import EEG +from eegnb.experiments import VisualN170 + +# Define some variables +board_name = "muse2" # board name +experiment_name = "visual_n170" # experiment name +subject_id = 0 # test subject id +session_nb = 0 # session number +record_duration = 120 # recording duration + +# generate save path +save_fn = generate_save_fn(board_name, experiment_name, subject_id, session_nb) + +# create device object +eeg_device = EEG(device=board_name) + +# Experiment type +experiment = VisualN170(duration=record_duration, eeg=eeg_device, save_fn=save_fn, use_vr=False) + +################################################################################################### +# Run experiment +# --------------------- +# +experiment.run() + +# Saved csv location +print("Recording saved in", experiment.save_fn) diff --git a/develop/_downloads/6c7787744c46f6694529791768174f35/02r__ssvep_decoding.py b/develop/_downloads/6c7787744c46f6694529791768174f35/02r__ssvep_decoding.py new file mode 100644 index 00000000..52a7d11d --- /dev/null +++ b/develop/_downloads/6c7787744c46f6694529791768174f35/02r__ssvep_decoding.py @@ -0,0 +1,157 @@ +""" +SSVEP Decoding +=============================== + +This notebook runs only the data analysis part of experiment. + +Look at the notes to see how this can be run on the web with binder or google collab. + +All of the additional notes are removed; only the code cells are kept. + +""" + +################################################################################################### +# Setup +# --------------------- + +# Some standard pythonic imports +import warnings +warnings.filterwarnings('ignore') +import os,numpy as np,pandas as pd +from collections import OrderedDict +import seaborn as sns +from matplotlib import pyplot as plt + +# MNE functions +from mne import Epochs,find_events +from mne.decoding import Vectorizer + +# EEG-Notebooks functions +from eegnb.analysis.analysis_utils import load_data +from eegnb.datasets import fetch_dataset + +# Scikit-learn and Pyriemann ML functionalities +from sklearn.pipeline import make_pipeline +from sklearn.linear_model import LogisticRegression +from sklearn.preprocessing import StandardScaler +from sklearn.discriminant_analysis import LinearDiscriminantAnalysis as LDA +from sklearn.model_selection import cross_val_score, StratifiedShuffleSplit +from pyriemann.estimation import Covariances, ERPCovariances, XdawnCovariances +from pyriemann.spatialfilters import CSP +from pyriemann.tangentspace import TangentSpace +from pyriemann.classification import MDM + +################################################################################################### +# Load Data +# --------------------- +# +# ( See the ssvep `load_and_visualize` example for further description of this) +# + +eegnb_data_path = os.path.join(os.path.expanduser('~/'),'.eegnb', 'data') +ssvep_data_path = os.path.join(eegnb_data_path, 'visual-SSVEP', 'eegnb_examples') + +# If dataset hasn't been downloaded yet, download it +if not os.path.isdir(ssvep_data_path): + fetch_dataset(data_dir=eegnb_data_path, experiment='visual-SSVEP', site='eegnb_examples') + +subject = 1 +session = 1 +raw = load_data(subject, session, + experiment='visual-SSVEP', site='eegnb_examples', device_name='muse2016', + data_dir = eegnb_data_path, + replace_ch_names={'Right AUX': 'POz'}) + +################################################################################################### +# Epoching +# ---------------------------- + +# Next, we will chunk (epoch) the data into segments representing the data 100ms before to 800ms after each stimulus. +# Note: we will not reject epochs here because the amplitude of the SSVEP at POz is so large it is difficult to separate from eye blinks + +events = find_events(raw) +event_id = {'30 Hz': 1, '20 Hz': 2} +epochs = Epochs(raw, events=events, event_id=event_id, + tmin=-0.5, tmax=4, baseline=None, preload=True, + verbose=False, picks=[0, 1, 2, 3, 4]) +print('sample drop %: ', (1 - len(epochs.events)/len(events)) * 100) + +##################################################################################################### +# Decoding +# ---------- + +# We can use a filter bank approach on the original 4 Muse electrodes (to see how the headband alone without external electrodes could be used to classify SSVEP): + +# - Apply bandpass filters around both stimulation frequencies +# - Concatenate bandpass-filtered channels +# - Extract epochs (from 1 to 3 s after stimulus onset, to avoid classifying the ERP) +# - Apply common classification pipelines + +# Bandpass filter the raw data +muse_raw = raw.drop_channels(['POz']) +raw_filt_30Hz = muse_raw.copy().filter(25, 35, method='iir') +raw_filt_20Hz = muse_raw.copy().filter(15, 25, method='iir') +raw_filt_30Hz.rename_channels(lambda x: x + '_30Hz') +raw_filt_20Hz.rename_channels(lambda x: x + '_20Hz') + +# Concatenate with the bandpass filtered channels +raw_all = raw_filt_30Hz.add_channels([raw_filt_20Hz], + force_update_info=True) + +# Extract epochs +events = find_events(raw_all) +event_id = {'30 Hz': 1, '20 Hz': 2} + +epochs_all = Epochs(raw_all, events=events, event_id=event_id, + tmin=1, tmax=3, baseline=None, + reject={'eeg': 100e-6}, preload=True, verbose=False,) + +epochs_all.pick_types(eeg=True) +X = epochs_all.get_data() * 1e6 +times = epochs.times +y = epochs_all.events[:, -1] + +################################################################################################### +# Decoding +# ---------------------------- + +# Next, we will use 4 different machine learning pipelines to classify the SSVEP based on the data we collected. The + +# - CSP + RegLDA : Common Spatial Patterns + Regularized Linear Discriminat Analysis. This is a very common EEG analysis pipeline. +# - Cov + TS : Covariance + Tangent space mapping. One of the most reliable Riemannian geometry-based pipelines. +# - Cov + MDM: Covariance + MDM. A very simple, yet effective (for low channel count), Riemannian geometry classifier. +# - CSP + Cov + TS: Common Spatial Patterns + Covariance + Tangent spacem mapping. Riemannian pipeline with the standard CSP procedure beforehand + +# Evaluation is done through cross-validation, with area-under-the-curve (AUC) as metric (AUC is probably the best metric for binary and unbalanced classification problem) + +# Note: because we're doing machine learning here, the following cell may take a while to complete + +clfs = OrderedDict() +clfs['CSP + RegLDA'] = make_pipeline(Covariances(), CSP(4), LDA(shrinkage='auto', solver='eigen')) +clfs['Cov + TS'] = make_pipeline(Covariances(), TangentSpace(), LogisticRegression()) +clfs['Cov + MDM'] = make_pipeline(Covariances(), MDM()) +clfs['CSP + Cov + TS'] = make_pipeline(Covariances(), CSP(4, log=False), TangentSpace(), LogisticRegression()) + +# define cross validation +cv = StratifiedShuffleSplit(n_splits=20, test_size=0.25, + random_state=42) + +# run cross validation for each pipeline +auc = [] +methods = [] +for m in clfs: + print(m) + try: + res = cross_val_score(clfs[m], X, y==2, scoring='roc_auc',cv=cv, n_jobs=-1) + auc.extend(res) + methods.extend([m]*len(res)) + except: + pass + +results = pd.DataFrame(data=auc, columns=['AUC']) +results['Method'] = methods + +fig = plt.figure(figsize=[8,4]) +sns.barplot(data=results, x='AUC', y='Method') +plt.xlim(0.4, 1) +sns.despine() diff --git a/develop/_downloads/6f1e7a639e0699d6164445b55e6c116d/auto_examples_jupyter.zip b/develop/_downloads/6f1e7a639e0699d6164445b55e6c116d/auto_examples_jupyter.zip new file mode 100644 index 00000000..fc311f1b Binary files /dev/null and b/develop/_downloads/6f1e7a639e0699d6164445b55e6c116d/auto_examples_jupyter.zip differ diff --git a/develop/_downloads/872178a6cf309a4de1aa7d759d171a7b/02r__n170_decoding.ipynb b/develop/_downloads/872178a6cf309a4de1aa7d759d171a7b/02r__n170_decoding.ipynb new file mode 100644 index 00000000..f832d2cc --- /dev/null +++ b/develop/_downloads/872178a6cf309a4de1aa7d759d171a7b/02r__n170_decoding.ipynb @@ -0,0 +1,140 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "\n# N170 Decoding\n\nThis example runs a set of machine learning algorithms on the N170 faces/houses \ndataset, and compares them in terms of classification performance. \n\nThe data used is exactly the same as in the N170 `load_and_visualize` example. \n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Setup\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# Some standard pythonic imports\nimport warnings\nwarnings.filterwarnings('ignore')\nimport os,numpy as np,pandas as pd\nfrom collections import OrderedDict\nimport seaborn as sns\nfrom matplotlib import pyplot as plt\n\n# MNE functions\nfrom mne import Epochs,find_events\nfrom mne.decoding import Vectorizer\n\n# EEG-Notebooks functions\nfrom eegnb.analysis.analysis_utils import load_data\nfrom eegnb.datasets import fetch_dataset\n\n# Scikit-learn and Pyriemann ML functionalities\nfrom sklearn.pipeline import make_pipeline\nfrom sklearn.linear_model import LogisticRegression\nfrom sklearn.preprocessing import StandardScaler\nfrom sklearn.discriminant_analysis import LinearDiscriminantAnalysis as LDA\nfrom sklearn.model_selection import cross_val_score, StratifiedShuffleSplit\nfrom pyriemann.estimation import ERPCovariances, XdawnCovariances\nfrom pyriemann.tangentspace import TangentSpace\nfrom pyriemann.classification import MDM" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Load Data\n\n( See the n170 `load_and_visualize` example for further description of this)\n\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "eegnb_data_path = os.path.join(os.path.expanduser('~/'),'.eegnb', 'data') \nn170_data_path = os.path.join(eegnb_data_path, 'visual-N170', 'eegnb_examples')\n\n# If dataset hasn't been downloaded yet, download it \nif not os.path.isdir(n170_data_path):\n fetch_dataset(data_dir=eegnb_data_path, experiment='visual-N170', site='eegnb_examples') \n\nsubject = 1\nsession = 1\nraw = load_data(subject,session,\n experiment='visual-N170', site='eegnb_examples', device_name='muse2016',\n data_dir = eegnb_data_path)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Filteriing\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "raw.filter(1,30, method='iir')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Epoching\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# Create an array containing the timestamps and type of each stimulus (i.e. face or house)\nevents = find_events(raw)\nevent_id = {'House': 1, 'Face': 2}\n\n# Create an MNE Epochs object representing all the epochs around stimulus presentation\nepochs = Epochs(raw, events=events, event_id=event_id, \n tmin=-0.1, tmax=0.8, baseline=None,\n reject={'eeg': 75e-6}, preload=True,\n verbose=False, picks=[0,1,2,3])\n\nprint('sample drop %: ', (1 - len(epochs.events)/len(events)) * 100)\nepochs" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Run classification\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "clfs = OrderedDict()\nclfs['Vect + LR'] = make_pipeline(Vectorizer(), StandardScaler(), LogisticRegression())\nclfs['Vect + RegLDA'] = make_pipeline(Vectorizer(), LDA(shrinkage='auto', solver='eigen'))\nclfs['ERPCov + TS'] = make_pipeline(ERPCovariances(estimator='oas'), TangentSpace(), LogisticRegression())\nclfs['ERPCov + MDM'] = make_pipeline(ERPCovariances(estimator='oas'), MDM())\nclfs['XdawnCov + TS'] = make_pipeline(XdawnCovariances(estimator='oas'), TangentSpace(), LogisticRegression())\nclfs['XdawnCov + MDM'] = make_pipeline(XdawnCovariances(estimator='oas'), MDM())\n\n# format data\nepochs.pick_types(eeg=True)\nX = epochs.get_data() * 1e6\ntimes = epochs.times\ny = epochs.events[:, -1]\n\n# define cross validation \ncv = StratifiedShuffleSplit(n_splits=20, test_size=0.25, \n random_state=42)\n\n# run cross validation for each pipeline\nauc = []\nmethods = []\nfor m in clfs:\n print(m)\n try:\n res = cross_val_score(clfs[m], X, y==2, scoring='roc_auc', \n cv=cv, n_jobs=-1)\n auc.extend(res)\n methods.extend([m]*len(res))\n except:\n pass" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Plot Decoding Results\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "results = pd.DataFrame(data=auc, columns=['AUC'])\nresults['Method'] = methods\n\nfig = plt.figure(figsize=[8,4])\nsns.barplot(data=results, x='AUC', y='Method')\nplt.xlim(0.4, 0.9)\nsns.despine()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.18" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} \ No newline at end of file diff --git a/develop/_downloads/89f5ff349033ec3c3d48bcfb2c3c3de2/01r__ssvep_viz.py b/develop/_downloads/89f5ff349033ec3c3d48bcfb2c3c3de2/01r__ssvep_viz.py new file mode 100644 index 00000000..55220e43 --- /dev/null +++ b/develop/_downloads/89f5ff349033ec3c3d48bcfb2c3c3de2/01r__ssvep_viz.py @@ -0,0 +1,153 @@ +""" +SSVEP Visualization +=============================== + +This example demonstrates loading, organizing, and visualizing data from the steady-state visual evoked potentials (SSVEP) experiment. + +The data used is the first subject and first session of the one of the eeg-expy ssvep example datasets, recorded using the InteraXon MUSE EEG headset (2016 model). This session consists of six two-minute blocks of continuous recording. + +We first use the `fetch_datasets` to obtain a list of filenames. If these files are not already present +in the specified data directory, they will be quickly downloaded from the cloud. + +After loading the data, we place it in an MNE `Epochs` object, and obtain the trial-averaged response. + +The final figures show the visual frequencies appearing in the measured power spectrum. + +""" + +################################################################################################### + +# Some standard pythonic imports +import os, numpy as np, pandas as pd +from collections import OrderedDict +import warnings +warnings.filterwarnings('ignore') +from matplotlib import pyplot as plt + +# MNE functions +from mne import Epochs,find_events +from mne.time_frequency import tfr_morlet + +# EEG-Notebooks functions +from eegnb.analysis.analysis_utils import load_data,plot_conditions +from eegnb.datasets import fetch_dataset + +# sphinx_gallery_thumbnail_number = 3 + +################################################################################################### +# Load Data +# --------------------- +# +# We will use the eeg-expy SSVEP example dataset +# +# Note that if you are running this locally, the following cell will download +# the example dataset, if you do not already have it. +# +################################################################################################### + +eegnb_data_path = os.path.join(os.path.expanduser('~/'),'.eegnb', 'data') +ssvep_data_path = os.path.join(eegnb_data_path, 'visual-SSVEP', 'eegnb_examples') + +# If dataset hasn't been downloaded yet, download it +if not os.path.isdir(ssvep_data_path): + fetch_dataset(data_dir=eegnb_data_path, experiment='visual-SSVEP', site='eegnb_examples'); + + +subject = 1 +session = 1 +raw = load_data(subject, session, + experiment='visual-SSVEP', site='eegnb_examples', device_name='muse2016', + data_dir = eegnb_data_path, + replace_ch_names={'Right AUX': 'POz'}) +raw.set_channel_types({'POz': 'eeg'}) + +################################################################################################### +# Visualize the power spectrum +# ---------------------------- + +raw.plot_psd() + +################################################################################################### +# Epoching +# ---------------------------- + +# Next, we will chunk (epoch) the data into segments representing the data 100ms before to 800ms after each stimulus. +# Note: we will not reject epochs here because the amplitude of the SSVEP at POz is so large it is difficult to separate from eye blinks + +events = find_events(raw) +event_id = {'30 Hz': 1, '20 Hz': 2} +epochs = Epochs(raw, events=events, event_id=event_id, + tmin=-0.5, tmax=4, baseline=None, preload=True, + verbose=False, picks=[0, 1, 2, 3, 4]) +print('sample drop %: ', (1 - len(epochs.events)/len(events)) * 100) + +#################################################################################################### +# Stimuli-Specific PSD +# ---------------------- + +# Next, we can compare the PSD of epochs specifically during 20hz and 30hz stimulus presentation + +f, axs = plt.subplots(2, 1, figsize=(10, 10)) + +welch_params=dict(method='welch', + n_fft=1028, + n_per_seg=256 * 3, + picks='all') + +psd1, freq1 = epochs['30 Hz'].compute_psd(**welch_params).get_data(return_freqs=True) +psd2, freq2 = epochs['20 Hz'].compute_psd(**welch_params).get_data(return_freqs=True) +psd1 = 10 * np.log10(psd1) +psd2 = 10 * np.log10(psd2) + +psd1_mean = psd1.mean(0) +psd1_std = psd1.mean(0) + +psd2_mean = psd2.mean(0) +psd2_std = psd2.mean(0) + +axs[0].plot(freq1, psd1_mean[[0, 3], :].mean(0), color='b', label='30 Hz') +axs[0].plot(freq2, psd2_mean[[0, 3], :].mean(0), color='r', label='20 Hz') + +axs[1].plot(freq1, psd1_mean[4, :], color='b', label='30 Hz') +axs[1].plot(freq2, psd2_mean[4, :], color='r', label='20 Hz') + +axs[0].set_title('TP9 and TP10') +axs[1].set_title('POz') + +axs[0].set_ylabel('Power Spectral Density (dB)') +axs[1].set_ylabel('Power Spectral Density (dB)') + +axs[0].set_xlim((2, 50)) +axs[1].set_xlim((2, 50)) + +axs[1].set_xlabel('Frequency (Hz)') + +axs[0].legend() +axs[1].legend() + +plt.show(); + +# With this visualization we can clearly see distinct peaks at 30hz and 20hz in the PSD, corresponding to the frequency of the visual stimulation. The peaks are much larger at the POz electrode, but still visible at TP9 and TP10 + +#################################################################################################### +# Spectrogram +# ----------- + +# We can also look for SSVEPs in the spectrogram, which uses color to represent the power of frequencies in the EEG signal over time + +frequencies = np.logspace(1, 1.75, 60) +tfr, itc = tfr_morlet(epochs['30 Hz'], freqs=frequencies,picks='all', + n_cycles=15, return_itc=True) +tfr.plot(picks=[4], baseline=(-0.5, -0.1), mode='logratio', + title='POz - 30 Hz stim'); + +tfr, itc = tfr_morlet(epochs['20 Hz'], freqs=frequencies,picks='all', + n_cycles=15, return_itc=True) +tfr.plot(picks=[4], baseline=(-0.5, -0.1), mode='logratio', + title='POz - 20 Hz stim'); + +# Set Layout engine to tight to fix error with using colorbar layout error +plt.figure().set_layout_engine('tight'); +plt.tight_layout() + +# Once again we can see clear SSVEPs at 30hz and 20hz diff --git a/develop/_downloads/8d83acdbf2986ad81d4d9e035ff593ef/01r__n170_viz.ipynb b/develop/_downloads/8d83acdbf2986ad81d4d9e035ff593ef/01r__n170_viz.ipynb new file mode 100644 index 00000000..2da48717 --- /dev/null +++ b/develop/_downloads/8d83acdbf2986ad81d4d9e035ff593ef/01r__n170_viz.ipynb @@ -0,0 +1,140 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "\n# N170 Load and Visualize Data\n\nThis example demonstrates loading, organizing, and visualizing ERP response data from the visual N170 experiment. \n\nImages of faces and houses are shown in a rapid serial visual presentation (RSVP) stream.\n\nThe data used is the first subject and first session of the one of the eeg-expy N170 example datasets, recorded using the InteraXon MUSE EEG headset (2016 model). \nThis session consists of six two-minute blocks of continuous recording. \n\nWe first use the `fetch_datasets` to obtain a list of filenames. If these files are not already present \nin the specified data directory, they will be quickly downloaded from the cloud. \n\nAfter loading the data, we place it in an MNE `Epochs` object, and obtain the trial-averaged response. \n\nThe final figure plotted at the end shows the N170 response ERP waveform. \n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Setup\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# Some standard pythonic imports\nimport os\nfrom matplotlib import pyplot as plt \nfrom collections import OrderedDict\nimport warnings\nwarnings.filterwarnings('ignore')\n\n# MNE functions\nfrom mne import Epochs,find_events\n\n# EEG-Notebooks functions\nfrom eegnb.analysis.analysis_utils import load_data,plot_conditions\nfrom eegnb.datasets import fetch_dataset" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Load Data\n\nWe will use the eeg-expy N170 example dataset\n\nNote that if you are running this locally, the following cell will download\nthe example dataset, if you do not already have it.\n\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "eegnb_data_path = os.path.join(os.path.expanduser('~/'),'.eegnb', 'data') \nn170_data_path = os.path.join(eegnb_data_path, 'visual-N170', 'eegnb_examples')\n\n# If dataset hasn't been downloaded yet, download it \nif not os.path.isdir(n170_data_path):\n fetch_dataset(data_dir=eegnb_data_path, experiment='visual-N170', site='eegnb_examples');\n\nsubject = 1\nsession = 1\nraw = load_data(subject,session,\n experiment='visual-N170', site='eegnb_examples', device_name='muse2016_bfn',\n data_dir = eegnb_data_path)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Visualize the power spectrum\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "raw.plot_psd()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Filtering\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "raw.filter(1,30, method='iir')\nraw.plot_psd(fmin=1, fmax=30);" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Epoching\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# Create an array containing the timestamps and type of each stimulus (i.e. face or house)\nevents = find_events(raw)\nevent_id = {'House': 1, 'Face': 2}\n\n# Create an MNE Epochs object representing all the epochs around stimulus presentation\nepochs = Epochs(raw, events=events, event_id=event_id, \n tmin=-0.1, tmax=0.6, baseline=None,\n reject={'eeg': 5e-5}, preload=True, \n verbose=False, picks=[0,1,2,3])\nprint('sample drop %: ', (1 - len(epochs.events)/len(events)) * 100)\nepochs" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Epoch average\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "conditions = OrderedDict()\n#conditions['House'] = [1]\n#conditions['Face'] = [2]\nconditions['House'] = ['House']\nconditions['Face'] = ['Face']\ndiffwav = ('Face', 'House')\n\nfig, ax = plot_conditions(epochs, conditions=conditions, \n ci=97.5, n_boot=1000, title='',\n diff_waveform=diffwav,\n channel_order=[1,0,2,3]) \n# reordering of epochs.ch_names according to [[0,2],[1,3]] of subplot axes\n\n# Manually adjust the ylims\nfor i in [0,2]: ax[i].set_ylim([-0.5e6,0.5e6])\nfor i in [1,3]: ax[i].set_ylim([-1.5e6,2.5e6])\nplt.tight_layout()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.18" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} \ No newline at end of file diff --git a/develop/_downloads/a74331ba25f1e4b93e2ecf2b3efd4a13/02r__ssvep_decoding.ipynb b/develop/_downloads/a74331ba25f1e4b93e2ecf2b3efd4a13/02r__ssvep_decoding.ipynb new file mode 100644 index 00000000..56f7168f --- /dev/null +++ b/develop/_downloads/a74331ba25f1e4b93e2ecf2b3efd4a13/02r__ssvep_decoding.ipynb @@ -0,0 +1,122 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "\n# SSVEP Decoding\n\nThis notebook runs only the data analysis part of experiment.\n\nLook at the notes to see how this can be run on the web with binder or google collab.\n\nAll of the additional notes are removed; only the code cells are kept.\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Setup\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# Some standard pythonic imports\nimport warnings\nwarnings.filterwarnings('ignore')\nimport os,numpy as np,pandas as pd\nfrom collections import OrderedDict\nimport seaborn as sns\nfrom matplotlib import pyplot as plt\n\n# MNE functions\nfrom mne import Epochs,find_events\nfrom mne.decoding import Vectorizer\n\n# EEG-Notebooks functions\nfrom eegnb.analysis.analysis_utils import load_data\nfrom eegnb.datasets import fetch_dataset\n\n# Scikit-learn and Pyriemann ML functionalities\nfrom sklearn.pipeline import make_pipeline\nfrom sklearn.linear_model import LogisticRegression\nfrom sklearn.preprocessing import StandardScaler\nfrom sklearn.discriminant_analysis import LinearDiscriminantAnalysis as LDA\nfrom sklearn.model_selection import cross_val_score, StratifiedShuffleSplit\nfrom pyriemann.estimation import Covariances, ERPCovariances, XdawnCovariances\nfrom pyriemann.spatialfilters import CSP\nfrom pyriemann.tangentspace import TangentSpace\nfrom pyriemann.classification import MDM" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Load Data\n\n( See the ssvep `load_and_visualize` example for further description of this)\n\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "eegnb_data_path = os.path.join(os.path.expanduser('~/'),'.eegnb', 'data') \nssvep_data_path = os.path.join(eegnb_data_path, 'visual-SSVEP', 'eegnb_examples')\n\n# If dataset hasn't been downloaded yet, download it \nif not os.path.isdir(ssvep_data_path):\n fetch_dataset(data_dir=eegnb_data_path, experiment='visual-SSVEP', site='eegnb_examples') \n\nsubject = 1\nsession = 1\nraw = load_data(subject, session, \n experiment='visual-SSVEP', site='eegnb_examples', device_name='muse2016',\n data_dir = eegnb_data_path,\n replace_ch_names={'Right AUX': 'POz'})" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Epoching\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# Next, we will chunk (epoch) the data into segments representing the data 100ms before to 800ms after each stimulus.\n# Note: we will not reject epochs here because the amplitude of the SSVEP at POz is so large it is difficult to separate from eye blinks\n\nevents = find_events(raw)\nevent_id = {'30 Hz': 1, '20 Hz': 2}\nepochs = Epochs(raw, events=events, event_id=event_id, \n tmin=-0.5, tmax=4, baseline=None, preload=True,\n verbose=False, picks=[0, 1, 2, 3, 4])\nprint('sample drop %: ', (1 - len(epochs.events)/len(events)) * 100)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Decoding\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# We can use a filter bank approach on the original 4 Muse electrodes (to see how the headband alone without external electrodes could be used to classify SSVEP):\n\n# - Apply bandpass filters around both stimulation frequencies\n# - Concatenate bandpass-filtered channels\n# - Extract epochs (from 1 to 3 s after stimulus onset, to avoid classifying the ERP)\n# - Apply common classification pipelines\n\n# Bandpass filter the raw data\nmuse_raw = raw.drop_channels(['POz'])\nraw_filt_30Hz = muse_raw.copy().filter(25, 35, method='iir')\nraw_filt_20Hz = muse_raw.copy().filter(15, 25, method='iir')\nraw_filt_30Hz.rename_channels(lambda x: x + '_30Hz')\nraw_filt_20Hz.rename_channels(lambda x: x + '_20Hz')\n\n# Concatenate with the bandpass filtered channels\nraw_all = raw_filt_30Hz.add_channels([raw_filt_20Hz], \n force_update_info=True)\n\n# Extract epochs\nevents = find_events(raw_all)\nevent_id = {'30 Hz': 1, '20 Hz': 2}\n\nepochs_all = Epochs(raw_all, events=events, event_id=event_id, \n tmin=1, tmax=3, baseline=None, \n reject={'eeg': 100e-6}, preload=True, verbose=False,)\n\nepochs_all.pick_types(eeg=True)\nX = epochs_all.get_data() * 1e6\ntimes = epochs.times\ny = epochs_all.events[:, -1]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Decoding\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# Next, we will use 4 different machine learning pipelines to classify the SSVEP based on the data we collected. The\n\n# - CSP + RegLDA : Common Spatial Patterns + Regularized Linear Discriminat Analysis. This is a very common EEG analysis pipeline.\n# - Cov + TS : Covariance + Tangent space mapping. One of the most reliable Riemannian geometry-based pipelines.\n# - Cov + MDM: Covariance + MDM. A very simple, yet effective (for low channel count), Riemannian geometry classifier.\n# - CSP + Cov + TS: Common Spatial Patterns + Covariance + Tangent spacem mapping. Riemannian pipeline with the standard CSP procedure beforehand\n\n# Evaluation is done through cross-validation, with area-under-the-curve (AUC) as metric (AUC is probably the best metric for binary and unbalanced classification problem)\n\n# Note: because we're doing machine learning here, the following cell may take a while to complete\n\nclfs = OrderedDict()\nclfs['CSP + RegLDA'] = make_pipeline(Covariances(), CSP(4), LDA(shrinkage='auto', solver='eigen'))\nclfs['Cov + TS'] = make_pipeline(Covariances(), TangentSpace(), LogisticRegression())\nclfs['Cov + MDM'] = make_pipeline(Covariances(), MDM())\nclfs['CSP + Cov + TS'] = make_pipeline(Covariances(), CSP(4, log=False), TangentSpace(), LogisticRegression())\n\n# define cross validation \ncv = StratifiedShuffleSplit(n_splits=20, test_size=0.25, \n random_state=42)\n\n# run cross validation for each pipeline\nauc = []\nmethods = []\nfor m in clfs:\n print(m) \n try: \n res = cross_val_score(clfs[m], X, y==2, scoring='roc_auc',cv=cv, n_jobs=-1)\n auc.extend(res)\n methods.extend([m]*len(res))\n except:\n pass\n \nresults = pd.DataFrame(data=auc, columns=['AUC'])\nresults['Method'] = methods\n\nfig = plt.figure(figsize=[8,4])\nsns.barplot(data=results, x='AUC', y='Method')\nplt.xlim(0.4, 1)\nsns.despine()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.18" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} \ No newline at end of file diff --git a/develop/_downloads/b1193b749e4caeff4dbfdf0c20ae6801/visual_ssvep_jupyter.zip b/develop/_downloads/b1193b749e4caeff4dbfdf0c20ae6801/visual_ssvep_jupyter.zip new file mode 100644 index 00000000..ec3e6c65 Binary files /dev/null and b/develop/_downloads/b1193b749e4caeff4dbfdf0c20ae6801/visual_ssvep_jupyter.zip differ diff --git a/develop/_downloads/b1c28450479cda89a20f021d24809e55/visual_n170_jupyter.zip b/develop/_downloads/b1c28450479cda89a20f021d24809e55/visual_n170_jupyter.zip new file mode 100644 index 00000000..f7c58e1d Binary files /dev/null and b/develop/_downloads/b1c28450479cda89a20f021d24809e55/visual_n170_jupyter.zip differ diff --git a/develop/_downloads/b2ef39fb5bc5abfa985af6266c497c15/01r__p300_viz.ipynb b/develop/_downloads/b2ef39fb5bc5abfa985af6266c497c15/01r__p300_viz.ipynb new file mode 100644 index 00000000..25a31ea4 --- /dev/null +++ b/develop/_downloads/b2ef39fb5bc5abfa985af6266c497c15/01r__p300_viz.ipynb @@ -0,0 +1,140 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "\n# P300 Load and Visualize Data\n\nThis example demonstrates loading, organizing, and visualizing ERP response data from the visual P300 experiment. The experiment uses a visual oddball paradigm. Images of cats and dogs are shwn in a rapid serial visual presentation (RSVP) stream, with cats and dogs categorized respectively as 'targets' or 'non-targets', according to which has high or low probability of occurring, respectively. \n\nThe data used is the first subject and first session of the one of the eeg-expy P300 example datasets, recorded using the InteraXon MUSE EEG headset (2016 model). This session consists of six two-minute blocks of continuous recording. \n\nWe first use the `fetch_datasets` to obtain a list of filenames. If these files are not already present \nin the specified data directory, they will be quickly downloaded from the cloud. \n\nAfter loading the data, we place it in an MNE `Epochs` object, and obtain the trial-averaged response. \n\nThe final figure plotted at the end shows the P300 response ERP waveform. \n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Setup\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# Some standard pythonic imports\nimport os\nfrom matplotlib import pyplot as plt\nfrom collections import OrderedDict\nimport warnings\nwarnings.filterwarnings('ignore')\n\n# MNE functions\nfrom mne import Epochs,find_events\n\n# EEG-Notebooks functions\nfrom eegnb.analysis.analysis_utils import load_data,plot_conditions\nfrom eegnb.datasets import fetch_dataset" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Load Data\n ---------------------\n\n We will use the eeg-expy N170 example dataset\n\n Note that if you are running this locally, the following cell will download\n the example dataset, if you do not already have it.\n\n##################################################################################################\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "eegnb_data_path = os.path.join(os.path.expanduser('~/'),'.eegnb', 'data') \np300_data_path = os.path.join(eegnb_data_path, 'visual-P300', 'eegnb_examples')\n\n# If dataset hasn't been downloaded yet, download it \nif not os.path.isdir(p300_data_path):\n fetch_dataset(data_dir=eegnb_data_path, experiment='visual-P300', site='eegnb_examples'); \n\n\nsubject = 1\nsession = 1\nraw = load_data(subject,session,\n experiment='visual-P300', site='eegnb_examples', device_name='muse2016',\n data_dir = eegnb_data_path)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Visualize the power spectrum\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "raw.plot_psd()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Filteriing\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "raw.filter(1,30, method='iir')\nraw.plot_psd(fmin=1, fmax=30);" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Epoching\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# Create an array containing the timestamps and type of each stimulus (i.e. face or house)\nevents = find_events(raw)\nevent_id = {'non-target': 1, 'target': 2}\nepochs = Epochs(raw, events=events, event_id=event_id,\n tmin=-0.1, tmax=0.8, baseline=None, reject={'eeg': 100e-6}, preload=True, \n verbose=False, picks=[0,1,2,3])\n\nprint('sample drop %: ', (1 - len(epochs.events)/len(events)) * 100)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Epoch average\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "conditions = OrderedDict()\nconditions['non-target'] = ['non-target']\nconditions['target'] = ['target']\ndiffwav = [\"non-target\", \"target\"]\n\nfig, ax = plot_conditions(epochs, conditions=conditions, \n ci=97.5, n_boot=1000, title='',\n channel_order=[1,0,2,3],ylim=[-2E6,2.5E6],\n diff_waveform = diffwav)\n\n# Manually adjust the ylims\nfor i in [0,2]: ax[i].set_ylim([-0.5e6,0.5e6])\nfor i in [1,3]: ax[i].set_ylim([-1.5e6,2.5e6])\n\nplt.tight_layout()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.18" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} \ No newline at end of file diff --git a/develop/_downloads/b6763bc95c5e2ede1c4ce186cc0c606a/00x__p300_run_experiment.ipynb b/develop/_downloads/b6763bc95c5e2ede1c4ce186cc0c606a/00x__p300_run_experiment.ipynb new file mode 100644 index 00000000..1f2385d3 --- /dev/null +++ b/develop/_downloads/b6763bc95c5e2ede1c4ce186cc0c606a/00x__p300_run_experiment.ipynb @@ -0,0 +1,86 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "\n# P300 run experiment\n\nThis example demonstrates the initiation of an EEG stream with eeg-expy, and how to run \nan experiment. \n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Setup\n--------------------- \n\nImports\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "import os\nfrom eegnb import generate_save_fn\nfrom eegnb.devices.eeg import EEG\nfrom eegnb.experiments import VisualP300\n\n# Define some variables\nboard_name = \"muse2\"\nexperiment = \"visual_p300\"\nsubject_id = 0\nsession_nb = 0\nrecord_duration = 120" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Initiate EEG device\n\nStart EEG device\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "eeg_device = EEG(device=board_name)\n\n# Create save file name\nsave_fn = generate_save_fn(board_name, experiment, subject_id, session_nb)\nprint(save_fn)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Run experiment\n--------------------- \n\nCreate Experiment Object\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "p300 = VisualP300(duration=record_duration, eeg=eeg_device, save_fn=save_fn)\np300.run()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.18" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} \ No newline at end of file diff --git a/develop/_downloads/b6fdaf0dd3351d46675abacd38607890/00x__p300_run_experiment.py b/develop/_downloads/b6fdaf0dd3351d46675abacd38607890/00x__p300_run_experiment.py new file mode 100644 index 00000000..f5905360 --- /dev/null +++ b/develop/_downloads/b6fdaf0dd3351d46675abacd38607890/00x__p300_run_experiment.py @@ -0,0 +1,44 @@ +""" +P300 run experiment +=============================== + +This example demonstrates the initiation of an EEG stream with eeg-expy, and how to run +an experiment. + +""" + +################################################################################################### +# Setup +# --------------------- +# +# Imports +import os +from eegnb import generate_save_fn +from eegnb.devices.eeg import EEG +from eegnb.experiments import VisualP300 + +# Define some variables +board_name = "muse2" +experiment = "visual_p300" +subject_id = 0 +session_nb = 0 +record_duration = 120 + +################################################################################################### +# Initiate EEG device +# --------------------- +# +# Start EEG device +eeg_device = EEG(device=board_name) + +# Create save file name +save_fn = generate_save_fn(board_name, experiment, subject_id, session_nb) +print(save_fn) + +################################################################################################### +# Run experiment +# --------------------- +# +# Create Experiment Object +p300 = VisualP300(duration=record_duration, eeg=eeg_device, save_fn=save_fn) +p300.run() diff --git a/develop/_downloads/bffc7389ff14934d31aa05a911f58bf0/01r__cueing_singlesub_analysis.ipynb b/develop/_downloads/bffc7389ff14934d31aa05a911f58bf0/01r__cueing_singlesub_analysis.ipynb new file mode 100644 index 00000000..9920fa77 --- /dev/null +++ b/develop/_downloads/bffc7389ff14934d31aa05a911f58bf0/01r__cueing_singlesub_analysis.ipynb @@ -0,0 +1,194 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "\n# Cueing Single Subject Analysis\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Setup\n\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# Some standard pythonic imports\nimport os,numpy as np#,sys,glob,pandas as pd\nfrom collections import OrderedDict\nimport warnings\nwarnings.filterwarnings('ignore')\nfrom matplotlib import pyplot as plt\nimport matplotlib.patches as patches\n\n# MNE functions\nfrom mne import Epochs,find_events#, concatenate_raws\nfrom mne.time_frequency import tfr_morlet\n\n# EEG-Notebooks functions\nfrom eegnb.analysis.analysis_utils import load_data,plot_conditions\nfrom eegnb.datasets import fetch_dataset" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Load Data\n\nWe will use the eeg-expy visual cueing example dataset\n\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "eegnb_data_path = os.path.join(os.path.expanduser('~/'),'.eegnb', 'data') \ncueing_data_path = os.path.join(eegnb_data_path, 'visual-cueing', 'kylemathlab_dev')\n\n# If dataset hasn't been downloaded yet, download it \nif not os.path.isdir(cueing_data_path):\n fetch_dataset(data_dir=eegnb_data_path, experiment='visual-cueing', site='kylemathlab_dev');\n\n\nsub = 302\nsess = 1\nraw = load_data(sub,1, # subject, session\n experiment='visual-cueing',site='kylemathlab_dev',device_name='muse2016',\n data_dir = eegnb_data_path)\n \nraw.append(\n load_data(sub,2, # subject, session\n experiment='visual-cueing', site='kylemathlab_dev', device_name='muse2016',\n data_dir = eegnb_data_path))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Visualize the power spectrum\n\nPlot raw data\n\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "raw.plot();" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Power Spectral Density\n\nOne way to analyze the SSVEP is to plot the power spectral density, or PSD. SSVEPs should appear as peaks in power for certain frequencies. We expect clear peaks in the spectral domain at the stimulation frequencies of 30 and 20 Hz.\n\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "raw.compute_psd().plot();\n\n# Should see the electrical noise at 60 Hz, and maybe a peak at the red and blue channels between 7-14 Hz (Alpha)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Filtering\n\nMost ERP components are composed of lower frequency fluctuations in the EEG signal. Thus, we can filter out all frequencies between 1 and 30 hz in order to increase our ability to detect them.\n\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "raw.filter(1,30, method='iir');\nraw.compute_psd(fmin=1, fmax=30).plot();" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Epoching\n\nNext, we will chunk (epoch) the data into segments representing the data 1000ms before to 2000ms after each cue, we will reject every epoch where the amplitude of the signal exceeded 100 uV, which should most eye blinks.\n\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "raw.filter(1,30, method='iir')\nevents = find_events(raw)\nevent_id = {'LeftCue': 1, 'RightCue': 2}\n\nrej_thresh_uV = 150\nrej_thresh = rej_thresh_uV*1e-6\n\nepochs = Epochs(raw, events=events, event_id=event_id, \n tmin=-1, tmax=2, baseline=(-1, 0), \n reject={'eeg':rej_thresh}, preload=True,\n verbose=False, picks=[0, 1, 2, 3])\n\nprint('sample drop %: ', (1 - len(epochs.events)/len(events)) * 100)\n\nconditions = OrderedDict()\n#conditions['LeftCue'] = [1]\n#conditions['RightCue'] = [2]\nconditions['LeftCue'] = ['LeftCue']\nconditions['RightCue'] = ['RightCue']\ndiffwave = ('LeftCue', 'RightCue')\n\nfig, ax = plot_conditions(epochs, conditions=conditions, \n ci=97.5, n_boot=1000, title='',\n diff_waveform=diffwave, ylim=(-20,20))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Spectrogram\n\nWe can also look for SSVEPs in the spectrogram, which uses color to represent the power of frequencies in the EEG signal over time\n\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "frequencies = np.linspace(6, 30, 100, endpoint=True)\n\nwave_cycles = 6\n\n# Compute morlet wavelet\n\n# Left Cue\ntfr, itc = tfr_morlet(epochs['LeftCue'], freqs=frequencies, \n n_cycles=wave_cycles, return_itc=True)\ntfr = tfr.apply_baseline((-1,-.5),mode='mean')\ntfr.plot(picks=[0], mode='logratio', \n title='TP9 - Ipsi');\ntfr.plot(picks=[1], mode='logratio', \n title='TP10 - Contra');\npower_Ipsi_TP9 = tfr.data[0,:,:]\npower_Contra_TP10 = tfr.data[1,:,:]\n\n# Right Cue\ntfr, itc = tfr_morlet(epochs['RightCue'], freqs=frequencies, \n n_cycles=wave_cycles, return_itc=True)\ntfr = tfr.apply_baseline((-1,-.5),mode='mean')\ntfr.plot(picks=[0], mode='logratio', \n title='TP9 - Contra');\ntfr.plot(picks=[1], mode='logratio', \n title='TP10 - Ipsi');\npower_Contra_TP9 = tfr.data[0,:,:]\npower_Ipsi_TP10 = tfr.data[1,:,:]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Now we compute and plot the differences\n\ntime frequency window for analysis\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "f_low = 7 # Hz\nf_high = 10\nf_diff = f_high-f_low\n \nt_low = 0 # s\nt_high = 1\nt_diff = t_high-t_low\n\n# Plot Differences\ntimes = epochs.times\npower_Avg_Ipsi = (power_Ipsi_TP9+power_Ipsi_TP10)/2;\npower_Avg_Contra = (power_Contra_TP9+power_Contra_TP10)/2;\npower_Avg_Diff = power_Avg_Ipsi-power_Avg_Contra;\n\n# find max to make color range\nplot_max = np.max([np.max(np.abs(power_Avg_Ipsi)), np.max(np.abs(power_Avg_Contra))])\nplot_diff_max = np.max(np.abs(power_Avg_Diff))\n\n# Ipsi\nfig, ax = plt.subplots(1)\nim = plt.imshow(power_Avg_Ipsi,\n extent=[times[0], times[-1], frequencies[0], frequencies[-1]],\n aspect='auto', origin='lower', cmap='coolwarm', vmin=-plot_max, vmax=plot_max)\nplt.xlabel('Time (sec)')\nplt.ylabel('Frequency (Hz)')\nplt.title('Power Average Ipsilateral to Cue')\ncb = fig.colorbar(im)\ncb.set_label('Power')\n# Create a Rectangle patch\nrect = patches.Rectangle((t_low,f_low),t_diff,f_diff,linewidth=1,edgecolor='k',facecolor='none')\n# Add the patch to the Axes\nax.add_patch(rect)\n\n#TP10\nfig, ax = plt.subplots(1)\nim = plt.imshow(power_Avg_Contra,\n extent=[times[0], times[-1], frequencies[0], frequencies[-1]],\n aspect='auto', origin='lower', cmap='coolwarm', vmin=-plot_max, vmax=plot_max)\nplt.xlabel('Time (sec)')\nplt.ylabel('Frequency (Hz)')\nplt.title(str(sub) + ' - Power Average Contra to Cue')\ncb = fig.colorbar(im)\ncb.set_label('Power')\n# Create a Rectangle patch\nrect = patches.Rectangle((t_low,f_low),t_diff,f_diff,linewidth=1,edgecolor='k',facecolor='none')\n# Add the patch to the Axes\nax.add_patch(rect)\n\n#difference between conditions\nfig, ax = plt.subplots(1)\nim = plt.imshow(power_Avg_Diff,\n extent=[times[0], times[-1], frequencies[0], frequencies[-1]],\n aspect='auto', origin='lower', cmap='coolwarm', vmin=-plot_diff_max, vmax=plot_diff_max)\nplt.xlabel('Time (sec)')\nplt.ylabel('Frequency (Hz)')\nplt.title('Power Difference Ipsi-Contra')\ncb = fig.colorbar(im)\ncb.set_label('Ipsi-Contra Power')\n# Create a Rectangle patch\nrect = patches.Rectangle((t_low,f_low),t_diff,f_diff,linewidth=1,edgecolor='k',facecolor='none')\n# Add the patch to the Axes\nax.add_patch(rect)\n\n\n# We expect greater alpha power ipsilateral to the cue direction (positive values) from 0 to 1.5 seconds" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Target Epoching\n\nNext, we will chunk (epoch) the data into segments representing the data .200ms before to 1000ms after each target, we will reject every epoch where the amplitude of the signal exceeded ? uV, which should most eye blinks.\n\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "events = find_events(raw)\nevent_id = {'InvalidTarget_Left': 11, 'InvalidTarget_Right': 12,\n 'ValidTarget_Left': 21,'ValidTarget_Right': 11}\n\nepochs = Epochs(raw, events=events, event_id=event_id, \n tmin=-.2, tmax=1, baseline=(-.2, 0), \n reject={'eeg':.0001}, preload=True,\n verbose=False, picks=[0, 1, 2, 3])\nprint('sample drop %: ', (1 - len(epochs.events)/len(events)) * 100)\n\nconditions = OrderedDict()\nconditions['ValidTarget'] = ['ValidTarget_Left', 'ValidTarget_Right']\nconditions['InvalidTarget'] = ['InvalidTarget_Left', 'InvalidTarget_Right']\ndiffwave = ('ValidTarget', 'InvalidTarget')\n\nfig, ax = plot_conditions(epochs, conditions=conditions, \n ci=97.5, n_boot=1000, title='',\n diff_waveform=diffwave, ylim=(-20,20))" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.18" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} \ No newline at end of file diff --git a/develop/_downloads/c1612d0515ac035cc3602f0d2eb3810b/02r__p300_decoding.py b/develop/_downloads/c1612d0515ac035cc3602f0d2eb3810b/02r__p300_decoding.py new file mode 100644 index 00000000..e29db895 --- /dev/null +++ b/develop/_downloads/c1612d0515ac035cc3602f0d2eb3810b/02r__p300_decoding.py @@ -0,0 +1,124 @@ +""" +P300 Decoding +=============================== + +This example runs a set of machine learning algorithms on the P300 cats/dogs +dataset, and compares them in terms of classification performance. + +The data used is exactly the same as in the P300 `load_and_visualize` example. + +""" + +################################################################################################### +# Setup +# --------------------- + +# Some standard pythonic imports +import warnings +warnings.filterwarnings('ignore') +import os,numpy as np,pandas as pd +from collections import OrderedDict +import seaborn as sns +from matplotlib import pyplot as plt + +# MNE functions +from mne import Epochs,find_events +from mne.decoding import Vectorizer + +# EEG-Notebooks functions +from eegnb.analysis.analysis_utils import load_data +from eegnb.datasets import fetch_dataset + +# Scikit-learn and Pyriemann ML functionalities +from sklearn.pipeline import make_pipeline +from sklearn.linear_model import LogisticRegression +from sklearn.preprocessing import StandardScaler +from sklearn.discriminant_analysis import LinearDiscriminantAnalysis as LDA +from sklearn.model_selection import cross_val_score, StratifiedShuffleSplit +from pyriemann.estimation import ERPCovariances, XdawnCovariances, Xdawn +from pyriemann.tangentspace import TangentSpace +from pyriemann.classification import MDM + +################################################################################################### +# Load Data +# --------------------- +# +# ( See the P300 `load_and_visualize` example for further description of this) +# + +eegnb_data_path = os.path.join(os.path.expanduser('~/'),'.eegnb', 'data') +p300_data_path = os.path.join(eegnb_data_path, 'visual-P300', 'eegnb_examples') + +# If dataset hasn't been downloaded yet, download it +if not os.path.isdir(p300_data_path): + fetch_dataset(data_dir=eegnb_data_path, experiment='visual-P300', site='eegnb_examples') + + +subject = 1 +session = 1 +raw = load_data(subject,session, + experiment='visual-P300', site='eegnb_examples', device_name='muse2016', + data_dir = eegnb_data_path) + +################################################################################################### + +################################################################################################### +# Filteriing +# ---------------------------- + +raw.filter(1,30, method='iir') + +################################################################################################### +# Epoching +# ---------------------------- + +# Create an array containing the timestamps and type of each stimulus (i.e. face or house) +events = find_events(raw) +event_id = {'Non-Target': 1, 'Target': 2} +epochs = Epochs(raw, events=events, event_id=event_id, + tmin=-0.1, tmax=0.8, baseline=None, reject={'eeg': 100e-6}, preload=True, verbose=False, picks=[0,1,2,3]) + +print('sample drop %: ', (1 - len(epochs.events)/len(events)) * 100) + +epochs + +################################################################################################### +# Classfication +# ---------------------------- + +clfs = OrderedDict() +clfs['Vect + LR'] = make_pipeline(Vectorizer(), StandardScaler(), LogisticRegression()) +clfs['Vect + RegLDA'] = make_pipeline(Vectorizer(), LDA(shrinkage='auto', solver='eigen')) +clfs['Xdawn + RegLDA'] = make_pipeline(Xdawn(2, classes=[1]), Vectorizer(), LDA(shrinkage='auto', solver='eigen')) + +clfs['XdawnCov + TS'] = make_pipeline(XdawnCovariances(estimator='oas'), TangentSpace(), LogisticRegression()) +clfs['XdawnCov + MDM'] = make_pipeline(XdawnCovariances(estimator='oas'), MDM()) + + +clfs['ERPCov + TS'] = make_pipeline(ERPCovariances(), TangentSpace(), LogisticRegression()) +clfs['ERPCov + MDM'] = make_pipeline(ERPCovariances(), MDM()) + +# format data +epochs.pick_types(eeg=True) +X = epochs.get_data() * 1e6 +times = epochs.times +y = epochs.events[:, -1] + +# define cross validation +cv = StratifiedShuffleSplit(n_splits=10, test_size=0.25, random_state=42) + +# run cross validation for each pipeline +auc = [] +methods = [] +for m in clfs: + res = cross_val_score(clfs[m], X, y==2, scoring='roc_auc', cv=cv, n_jobs=-1) + auc.extend(res) + methods.extend([m]*len(res)) + +results = pd.DataFrame(data=auc, columns=['AUC']) +results['Method'] = methods + +plt.figure(figsize=[8,4]) +sns.barplot(data=results, x='AUC', y='Method') +plt.xlim(0.2, 0.85) +sns.despine() diff --git a/develop/_downloads/c8168bec3b40b12111ac87ce4dfeac13/01r__cueing_singlesub_analysis.py b/develop/_downloads/c8168bec3b40b12111ac87ce4dfeac13/01r__cueing_singlesub_analysis.py new file mode 100644 index 00000000..9aaabdbf --- /dev/null +++ b/develop/_downloads/c8168bec3b40b12111ac87ce4dfeac13/01r__cueing_singlesub_analysis.py @@ -0,0 +1,255 @@ +""" +Cueing Single Subject Analysis +=============================== + + + + + +""" + +################################################################################################### +# Setup +# --------------------- +# + +# Some standard pythonic imports +import os,numpy as np#,sys,glob,pandas as pd +from collections import OrderedDict +import warnings +warnings.filterwarnings('ignore') +from matplotlib import pyplot as plt +import matplotlib.patches as patches + +# MNE functions +from mne import Epochs,find_events#, concatenate_raws +from mne.time_frequency import tfr_morlet + +# EEG-Notebooks functions +from eegnb.analysis.analysis_utils import load_data,plot_conditions +from eegnb.datasets import fetch_dataset + +# sphinx_gallery_thumbnail_number = 1 + +################################################################################################### +# Load Data +# --------------------- +# +# We will use the eeg-expy visual cueing example dataset +# + +eegnb_data_path = os.path.join(os.path.expanduser('~/'),'.eegnb', 'data') +cueing_data_path = os.path.join(eegnb_data_path, 'visual-cueing', 'kylemathlab_dev') + +# If dataset hasn't been downloaded yet, download it +if not os.path.isdir(cueing_data_path): + fetch_dataset(data_dir=eegnb_data_path, experiment='visual-cueing', site='kylemathlab_dev'); + + +sub = 302 +sess = 1 +raw = load_data(sub,1, # subject, session + experiment='visual-cueing',site='kylemathlab_dev',device_name='muse2016', + data_dir = eegnb_data_path) + +raw.append( + load_data(sub,2, # subject, session + experiment='visual-cueing', site='kylemathlab_dev', device_name='muse2016', + data_dir = eegnb_data_path)) + +################################################################################################### +# Visualize the power spectrum +# ---------------------------- +# +# Plot raw data +# + +raw.plot(); + +################################################################################################### +# Power Spectral Density +# ----------------------------- +# +# One way to analyze the SSVEP is to plot the power spectral density, or PSD. SSVEPs should appear as peaks in power for certain frequencies. We expect clear peaks in the spectral domain at the stimulation frequencies of 30 and 20 Hz. +# + +raw.compute_psd().plot(); + +# Should see the electrical noise at 60 Hz, and maybe a peak at the red and blue channels between 7-14 Hz (Alpha) + +################################################################################################### +# Filtering +# ----------------------------- +# +# Most ERP components are composed of lower frequency fluctuations in the EEG signal. Thus, we can filter out all frequencies between 1 and 30 hz in order to increase our ability to detect them. +# + +raw.filter(1,30, method='iir'); +raw.compute_psd(fmin=1, fmax=30).plot(); + +################################################################################################### +# Epoching +# ----------------------------- +# +# Next, we will chunk (epoch) the data into segments representing the data 1000ms before to 2000ms after each cue, we will reject every epoch where the amplitude of the signal exceeded 100 uV, which should most eye blinks. +# + +raw.filter(1,30, method='iir') +events = find_events(raw) +event_id = {'LeftCue': 1, 'RightCue': 2} + +rej_thresh_uV = 150 +rej_thresh = rej_thresh_uV*1e-6 + +epochs = Epochs(raw, events=events, event_id=event_id, + tmin=-1, tmax=2, baseline=(-1, 0), + reject={'eeg':rej_thresh}, preload=True, + verbose=False, picks=[0, 1, 2, 3]) + +print('sample drop %: ', (1 - len(epochs.events)/len(events)) * 100) + +conditions = OrderedDict() +#conditions['LeftCue'] = [1] +#conditions['RightCue'] = [2] +conditions['LeftCue'] = ['LeftCue'] +conditions['RightCue'] = ['RightCue'] +diffwave = ('LeftCue', 'RightCue') + +fig, ax = plot_conditions(epochs, conditions=conditions, + ci=97.5, n_boot=1000, title='', + diff_waveform=diffwave, ylim=(-20,20)) + +################################################################################################### +# Spectrogram +# ----------------------------- +# +# We can also look for SSVEPs in the spectrogram, which uses color to represent the power of frequencies in the EEG signal over time +# + +frequencies = np.linspace(6, 30, 100, endpoint=True) + +wave_cycles = 6 + +# Compute morlet wavelet + +# Left Cue +tfr, itc = tfr_morlet(epochs['LeftCue'], freqs=frequencies, + n_cycles=wave_cycles, return_itc=True) +tfr = tfr.apply_baseline((-1,-.5),mode='mean') +tfr.plot(picks=[0], mode='logratio', + title='TP9 - Ipsi'); +tfr.plot(picks=[1], mode='logratio', + title='TP10 - Contra'); +power_Ipsi_TP9 = tfr.data[0,:,:] +power_Contra_TP10 = tfr.data[1,:,:] + +# Right Cue +tfr, itc = tfr_morlet(epochs['RightCue'], freqs=frequencies, + n_cycles=wave_cycles, return_itc=True) +tfr = tfr.apply_baseline((-1,-.5),mode='mean') +tfr.plot(picks=[0], mode='logratio', + title='TP9 - Contra'); +tfr.plot(picks=[1], mode='logratio', + title='TP10 - Ipsi'); +power_Contra_TP9 = tfr.data[0,:,:] +power_Ipsi_TP10 = tfr.data[1,:,:] + + +################################################################################################### +# Now we compute and plot the differences +# ----------------------------- +# +# time frequency window for analysis + +f_low = 7 # Hz +f_high = 10 +f_diff = f_high-f_low + +t_low = 0 # s +t_high = 1 +t_diff = t_high-t_low + +# Plot Differences +times = epochs.times +power_Avg_Ipsi = (power_Ipsi_TP9+power_Ipsi_TP10)/2; +power_Avg_Contra = (power_Contra_TP9+power_Contra_TP10)/2; +power_Avg_Diff = power_Avg_Ipsi-power_Avg_Contra; + +# find max to make color range +plot_max = np.max([np.max(np.abs(power_Avg_Ipsi)), np.max(np.abs(power_Avg_Contra))]) +plot_diff_max = np.max(np.abs(power_Avg_Diff)) + +# Ipsi +fig, ax = plt.subplots(1) +im = plt.imshow(power_Avg_Ipsi, + extent=[times[0], times[-1], frequencies[0], frequencies[-1]], + aspect='auto', origin='lower', cmap='coolwarm', vmin=-plot_max, vmax=plot_max) +plt.xlabel('Time (sec)') +plt.ylabel('Frequency (Hz)') +plt.title('Power Average Ipsilateral to Cue') +cb = fig.colorbar(im) +cb.set_label('Power') +# Create a Rectangle patch +rect = patches.Rectangle((t_low,f_low),t_diff,f_diff,linewidth=1,edgecolor='k',facecolor='none') +# Add the patch to the Axes +ax.add_patch(rect) + +#TP10 +fig, ax = plt.subplots(1) +im = plt.imshow(power_Avg_Contra, + extent=[times[0], times[-1], frequencies[0], frequencies[-1]], + aspect='auto', origin='lower', cmap='coolwarm', vmin=-plot_max, vmax=plot_max) +plt.xlabel('Time (sec)') +plt.ylabel('Frequency (Hz)') +plt.title(str(sub) + ' - Power Average Contra to Cue') +cb = fig.colorbar(im) +cb.set_label('Power') +# Create a Rectangle patch +rect = patches.Rectangle((t_low,f_low),t_diff,f_diff,linewidth=1,edgecolor='k',facecolor='none') +# Add the patch to the Axes +ax.add_patch(rect) + +#difference between conditions +fig, ax = plt.subplots(1) +im = plt.imshow(power_Avg_Diff, + extent=[times[0], times[-1], frequencies[0], frequencies[-1]], + aspect='auto', origin='lower', cmap='coolwarm', vmin=-plot_diff_max, vmax=plot_diff_max) +plt.xlabel('Time (sec)') +plt.ylabel('Frequency (Hz)') +plt.title('Power Difference Ipsi-Contra') +cb = fig.colorbar(im) +cb.set_label('Ipsi-Contra Power') +# Create a Rectangle patch +rect = patches.Rectangle((t_low,f_low),t_diff,f_diff,linewidth=1,edgecolor='k',facecolor='none') +# Add the patch to the Axes +ax.add_patch(rect) + + +# We expect greater alpha power ipsilateral to the cue direction (positive values) from 0 to 1.5 seconds + +################################################################################################### +# Target Epoching +# ----------------------------- +# +# Next, we will chunk (epoch) the data into segments representing the data .200ms before to 1000ms after each target, we will reject every epoch where the amplitude of the signal exceeded ? uV, which should most eye blinks. +# + +events = find_events(raw) +event_id = {'InvalidTarget_Left': 11, 'InvalidTarget_Right': 12, + 'ValidTarget_Left': 21,'ValidTarget_Right': 11} + +epochs = Epochs(raw, events=events, event_id=event_id, + tmin=-.2, tmax=1, baseline=(-.2, 0), + reject={'eeg':.0001}, preload=True, + verbose=False, picks=[0, 1, 2, 3]) +print('sample drop %: ', (1 - len(epochs.events)/len(events)) * 100) + +conditions = OrderedDict() +conditions['ValidTarget'] = ['ValidTarget_Left', 'ValidTarget_Right'] +conditions['InvalidTarget'] = ['InvalidTarget_Left', 'InvalidTarget_Right'] +diffwave = ('ValidTarget', 'InvalidTarget') + +fig, ax = plot_conditions(epochs, conditions=conditions, + ci=97.5, n_boot=1000, title='', + diff_waveform=diffwave, ylim=(-20,20)) + diff --git a/develop/_downloads/c9435ee669e38dd54bad678a26f2c8c1/00x__ssvep_run_experiment.ipynb b/develop/_downloads/c9435ee669e38dd54bad678a26f2c8c1/00x__ssvep_run_experiment.ipynb new file mode 100644 index 00000000..1a31750d --- /dev/null +++ b/develop/_downloads/c9435ee669e38dd54bad678a26f2c8c1/00x__ssvep_run_experiment.ipynb @@ -0,0 +1,86 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "\n# SSVEP run experiment\n\nThis example demonstrates the initiation of an EEG stream with eeg-expy, and how to run \nan experiment. \n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Setup\n--------------------- \n\nImports\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "import os\nfrom eegnb import generate_save_fn\nfrom eegnb.devices.eeg import EEG\nfrom eegnb.experiments import VisualSSVEP\n\n# Define some variables\nboard_name = \"muse2\"\nexperiment = \"visual_ssvep\"\nsubject_id = 0\nsession_nb = 0\nrecord_duration = 120" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Initiate EEG device\n\nStart EEG device\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "eeg_device = EEG(device=board_name)\n\n# Create save file name\nsave_fn = generate_save_fn(board_name, experiment, subject_id, session_nb)\nprint(save_fn)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Run experiment\n--------------------- \n\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "ssvep = VisualSSVEP(duration=record_duration, eeg=eeg_device, save_fn=save_fn)\nssvep.run()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.18" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} \ No newline at end of file diff --git a/develop/_downloads/d57fff739827ca575034e03b51d68a54/02r__cueing_group_analysis.py b/develop/_downloads/d57fff739827ca575034e03b51d68a54/02r__cueing_group_analysis.py new file mode 100644 index 00000000..36bc0872 --- /dev/null +++ b/develop/_downloads/d57fff739827ca575034e03b51d68a54/02r__cueing_group_analysis.py @@ -0,0 +1,374 @@ +""" +Cueing Group Analysis +=============================== + +""" + +################################################################################################### +# Setup +# --------------------- + +# Standard pythonic importa +import os,sys,glob,numpy as np,pandas as pd +from collections import OrderedDict +import warnings +warnings.filterwarnings('ignore') +from matplotlib import pyplot as plt +import matplotlib.patches as patches + +# MNE functions +from mne import Epochs, find_events, concatenate_raws +from mne.time_frequency import tfr_morlet + +# EEG-Notebooks functions +from eegnb.datasets import datasets +from eegnb.analysis.analysis_utils import load_data + +# sphinx_gallery_thumbnail_number = 1 + +################################################################################################### +# Download the data +# ----------------------------- + +eegnb_data_path = os.path.join(os.path.expanduser('~/'),'.eegnb', 'data') +cueing_data_path = os.path.join(eegnb_data_path, 'visual-cueing', 'kylemathlab_dev') + +# If dataset hasn't been downloaded yet, download it +if not os.path.isdir(cueing_data_path): + datasets.fetch_dataset(data_dir=eegnb_data_path, experiment='visual-cueing', site='kylemathlab_dev') + +################################################################################################### +# Load data into MNE objects +# ----------------------------- + +# MNE is a very powerful Python library for analyzing EEG data. It provides helpful functions for performing key tasks such as filtering EEG data, rejecting artifacts, and grouping EEG data into chunks (epochs). + +# The first step after loading dependencies is use MNE to read the data we've collected into an MNE Raw object + +#################################################################################################### + +subs = [101, 102, 103, 104, 105, 106, 108, 109, 110, 111, 112, + 202, 203, 204, 205, 207, 208, 209, 210, 211, + 301, 302, 303, 304, 305, 306, 307, 308, 309] + +diff_out = [] +Ipsi_out = [] +Contra_out = [] +Ipsi_spectra_out = [] +Contra_spectra_out = [] +diff_spectra_out = [] +ERSP_diff_out = [] +ERSP_Ipsi_out = [] +ERSP_Contra_out = [] + +frequencies = np.linspace(6, 30, 100, endpoint=True) +wave_cycles = 6 + +# time frequency window for analysis +f_low = 7 # Hz +f_high = 10 +f_diff = f_high-f_low + +t_low = 0 # s +t_high = 1 +t_diff = t_high-t_low + +bad_subs= [6, 7, 13, 26] +really_bad_subs = [11, 12, 19] +sub_count = 0 + + + +for sub in subs: + print(sub) + + sub_count += 1 + + + if (sub_count in really_bad_subs): + rej_thresh_uV = 90 + elif (sub_count in bad_subs): + rej_thresh_uV = 90 + else: + rej_thresh_uV = 90 + + rej_thresh = rej_thresh_uV*1e-6 + + + # Load both sessions + raw = load_data(sub,1, # subject, session + experiment='visual-cueing',site='kylemathlab_dev',device_name='muse2016', + data_dir = eegnb_data_path) + + raw.append( + load_data(sub,2, # subject, session + experiment='visual-cueing', site='kylemathlab_dev', device_name='muse2016', + data_dir = eegnb_data_path)) + + + # Filter Raw Data + raw.filter(1,30, method='iir') + + #Select Events + events = find_events(raw) + event_id = {'LeftCue': 1, 'RightCue': 2} + epochs = Epochs(raw, events=events, event_id=event_id, + tmin=-1, tmax=2, baseline=(-1, 0), + reject={'eeg':rej_thresh}, preload=True, + verbose=False, picks=[0, 3]) + print('Trials Remaining: ' + str(len(epochs.events)) + '.') + + # Compute morlet wavelet + + # Left Cue + tfr, itc = tfr_morlet(epochs['LeftCue'], freqs=frequencies, + n_cycles=wave_cycles, return_itc=True) + tfr = tfr.apply_baseline((-1,-.5),mode='mean') + #tfr.plot(picks=[0], mode='logratio', + # title='TP9 - Ipsi'); + #tfr.plot(picks=[3], mode='logratio', + # title='TP10 - Contra'); + power_Ipsi_TP9 = tfr.data[0,:,:] + power_Contra_TP10 = tfr.data[1,:,:] + + # Right Cue + tfr, itc = tfr_morlet(epochs['RightCue'], freqs=frequencies, + n_cycles=wave_cycles, return_itc=True) + tfr = tfr.apply_baseline((-1,-.5),mode='mean') + #tfr.plot(picks=[0], mode='logratio', + # title='TP9 - Contra'); + #tfr.plot(picks=[3], mode='logratio', + # title='TP10 - Ipsi'); + power_Contra_TP9 = tfr.data[0,:,:] + power_Ipsi_TP10 = tfr.data[1,:,:] + + # Plot Differences + #%matplotlib inline + times = epochs.times + power_Avg_Ipsi = (power_Ipsi_TP9+power_Ipsi_TP10)/2; + power_Avg_Contra = (power_Contra_TP9+power_Contra_TP10)/2; + power_Avg_Diff = power_Avg_Ipsi-power_Avg_Contra; + + + #find max to make color range + plot_max = np.max([np.max(np.abs(power_Avg_Ipsi)), np.max(np.abs(power_Avg_Contra))]) + plot_diff_max = np.max(np.abs(power_Avg_Diff)) + + + + #Ipsi + fig, ax = plt.subplots(1) + im = plt.imshow(power_Avg_Ipsi, + extent=[times[0], times[-1], frequencies[0], frequencies[-1]], + aspect='auto', origin='lower', cmap='coolwarm', vmin=-plot_max, vmax=plot_max) + plt.xlabel('Time (sec)') + plt.ylabel('Frequency (Hz)') + plt.title('Power Average Ipsilateral to Cue') + cb = fig.colorbar(im) + cb.set_label('Power') + # Create a Rectangle patch + rect = patches.Rectangle((t_low,f_low),t_diff,f_diff,linewidth=1,edgecolor='k',facecolor='none') + # Add the patch to the Axes + ax.add_patch(rect) + + #TP10 + fig, ax = plt.subplots(1) + im = plt.imshow(power_Avg_Contra, + extent=[times[0], times[-1], frequencies[0], frequencies[-1]], + aspect='auto', origin='lower', cmap='coolwarm', vmin=-plot_max, vmax=plot_max) + plt.xlabel('Time (sec)') + plt.ylabel('Frequency (Hz)') + plt.title(str(sub) + ' - Power Average Contra to Cue') + cb = fig.colorbar(im) + cb.set_label('Power') + # Create a Rectangle patch + rect = patches.Rectangle((t_low,f_low),t_diff,f_diff,linewidth=1,edgecolor='k',facecolor='none') + # Add the patch to the Axes + ax.add_patch(rect) + + #difference between conditions + fig, ax = plt.subplots(1) + im = plt.imshow(power_Avg_Diff, + extent=[times[0], times[-1], frequencies[0], frequencies[-1]], + aspect='auto', origin='lower', cmap='coolwarm', vmin=-plot_diff_max, vmax=plot_diff_max) + plt.xlabel('Time (sec)') + plt.ylabel('Frequency (Hz)') + plt.title('Power Difference Ipsi-Contra') + cb = fig.colorbar(im) + cb.set_label('Ipsi-Contra Power') + # Create a Rectangle patch + rect = patches.Rectangle((t_low,f_low),t_diff,f_diff,linewidth=1,edgecolor='k',facecolor='none') + # Add the patch to the Axes + ax.add_patch(rect) + + + + + #output data into array + Ipsi_out.append(np.mean(power_Avg_Ipsi[np.argmax(frequencies>f_low): + np.argmax(frequencies>f_high)-1, + np.argmax(times>t_low):np.argmax(times>t_high)-1 ] + ) + ) + Ipsi_spectra_out.append(np.mean(power_Avg_Ipsi[:,np.argmax(times>t_low): + np.argmax(times>t_high)-1 ],1 + ) + ) + + Contra_out.append(np.mean(power_Avg_Contra[np.argmax(frequencies>f_low): + np.argmax(frequencies>f_high)-1, + np.argmax(times>t_low):np.argmax(times>t_high)-1 ] + ) + ) + + Contra_spectra_out.append(np.mean(power_Avg_Contra[:,np.argmax(times>t_low): + np.argmax(times>t_high)-1 ],1)) + + + diff_out.append(np.mean(power_Avg_Diff[np.argmax(frequencies>f_low): + np.argmax(frequencies>f_high)-1, + np.argmax(times>t_low):np.argmax(times>t_high)-1 ] + ) + ) + diff_spectra_out.append(np.mean(power_Avg_Diff[:,np.argmax(times>t_low): + np.argmax(times>t_high)-1 ],1 + ) + ) + + + ERSP_diff_out.append(power_Avg_Diff) + ERSP_Ipsi_out.append(power_Avg_Ipsi) + ERSP_Contra_out.append(power_Avg_Contra) + + + +print(np.shape(ERSP_diff_out)) +print(np.shape(Contra_spectra_out)) + +print(diff_out) + + + +################################################################################################### +# Combine subjects +# ---------------------------- + +GrandAvg_diff = np.nanmean(ERSP_diff_out,0) +GrandAvg_Ipsi = np.nanmean(ERSP_Ipsi_out,0) +GrandAvg_Contra = np.nanmean(ERSP_Contra_out,0) + +GrandAvg_spec_Ipsi = np.nanmean(Ipsi_spectra_out,0) +GrandAvg_spec_Contra = np.nanmean(Contra_spectra_out,0) +GrandAvg_spec_diff = np.nanmean(diff_spectra_out,0) + +num_good = len(diff_out) - sum(np.isnan(diff_out)) +GrandAvg_spec_Ipsi_ste = np.nanstd(Ipsi_spectra_out,0)/np.sqrt(num_good) +GrandAvg_spec_Contra_ste = np.nanstd(Contra_spectra_out,0)/np.sqrt(num_good) +GrandAvg_spec_diff_ste = np.nanstd(diff_spectra_out,0)/np.sqrt(num_good) + +#Spectra error bars +fig, ax = plt.subplots(1) +plt.errorbar(frequencies,GrandAvg_spec_Ipsi,yerr=GrandAvg_spec_Ipsi_ste) +plt.errorbar(frequencies,GrandAvg_spec_Contra,yerr=GrandAvg_spec_Contra_ste) + +plt.legend(('Ipsi','Contra')) +plt.xlabel('Frequency (Hz)') +plt.ylabel('Power (uV^2)') +plt.hlines(0,3,33) + +#Spectra Diff error bars +fig, ax = plt.subplots(1) +plt.errorbar(frequencies,GrandAvg_spec_diff,yerr=GrandAvg_spec_diff_ste) + +plt.legend('Ipsi-Contra') +plt.xlabel('Frequency (Hz)') +plt.ylabel('Power (uV^2)') +plt.hlines(0,3,33) + +#Grand Average Ipsi +plot_max = np.max([np.max(np.abs(GrandAvg_Ipsi)), np.max(np.abs(GrandAvg_Contra))]) +fig, ax = plt.subplots(1) +im = plt.imshow(GrandAvg_Ipsi, + extent=[times[0], times[-1], frequencies[0], frequencies[-1]], + aspect='auto', origin='lower', cmap='coolwarm', vmin=-plot_max, vmax=plot_max) +plt.xlabel('Time (sec)') +plt.ylabel('Frequency (Hz)') +plt.title('Power Ipsi') +cb = fig.colorbar(im) +cb.set_label('Power') +# Create a Rectangle patch +rect = patches.Rectangle((t_low,f_low),t_diff,f_diff,linewidth=1,edgecolor='k',facecolor='none') +# Add the patch to the Axes +ax.add_patch(rect) + +#Grand Average Contra +fig, ax = plt.subplots(1) +im = plt.imshow(GrandAvg_Contra, + extent=[times[0], times[-1], frequencies[0], frequencies[-1]], + aspect='auto', origin='lower', cmap='coolwarm', vmin=-plot_max, vmax=plot_max) +plt.xlabel('Time (sec)') +plt.ylabel('Frequency (Hz)') +plt.title('Power Contra') +cb = fig.colorbar(im) +cb.set_label('Power') +# Create a Rectangle patch +rect = patches.Rectangle((t_low,f_low),t_diff,f_diff,linewidth=1,edgecolor='k',facecolor='none') +# Add the patch to the Axes +ax.add_patch(rect) + +#Grand Average Ipsi-Contra Difference +plot_max_diff = np.max(np.abs(GrandAvg_diff)) +fig, ax = plt.subplots(1) +im = plt.imshow(GrandAvg_diff, + extent=[times[0], times[-1], frequencies[0], frequencies[-1]], + aspect='auto', origin='lower', cmap='coolwarm', vmin=-plot_max_diff, vmax=plot_max_diff) +plt.xlabel('Time (sec)') +plt.ylabel('Frequency (Hz)') +plt.title('Power Difference Ipsi-Contra') +cb = fig.colorbar(im) +cb.set_label('Ipsi-Contra Power') +# Create a Rectangle patch +rect = patches.Rectangle((t_low,f_low),t_diff,f_diff,linewidth=1,edgecolor='k',facecolor='none') +# Add the patch to the Axes +ax.add_patch(rect) + +################################################################################################### +# Compute t test +# ---------------------------- + +import scipy +num_good = len(diff_out) - sum(np.isnan(diff_out)) + +[tstat, pval] = scipy.stats.ttest_ind(diff_out,np.zeros(len(diff_out)),nan_policy='omit') +print('Ipsi Mean: '+ str(np.nanmean(Ipsi_out))) +print('Contra Mean: '+ str(np.nanmean(Contra_out))) +print('Mean Diff: '+ str(np.nanmean(diff_out))) +print('t(' + str(num_good-1) + ') = ' + str(round(tstat,3))) +print('p = ' + str(round(pval,3))) + +################################################################################################### +# Save average powers ipsi and contra +# ---------------------------- + +import pandas as pd +print(diff_out) +raw_data = {'Ipsi Power': Ipsi_out, + 'Contra Power': Contra_out} +df = pd.DataFrame(raw_data, columns = ['Ipsi Power', 'Contra Power']) +df.to_csv('375CueingEEG.csv') +print('Saved subject averages for each condition to 375CueingEEG.csv file in present directory') + + +################################################################################################### +# Save spectra +# ---------------------------- + +df = pd.DataFrame(Ipsi_spectra_out,columns=frequencies) +df.to_csv('375CueingIpsiSpec.csv') + +df = pd.DataFrame(Contra_spectra_out,columns=frequencies) +df.to_csv('375CueingContraSpec.csv') +print('Saved Spectra to 375Cueing*Spec.csv file in present directory') + + + diff --git a/develop/_downloads/d5962f611a1c0434100c2f3706093684/02r__n170_decoding.py b/develop/_downloads/d5962f611a1c0434100c2f3706093684/02r__n170_decoding.py new file mode 100644 index 00000000..7e154290 --- /dev/null +++ b/develop/_downloads/d5962f611a1c0434100c2f3706093684/02r__n170_decoding.py @@ -0,0 +1,135 @@ +""" +N170 Decoding +=============================== + +This example runs a set of machine learning algorithms on the N170 faces/houses +dataset, and compares them in terms of classification performance. + +The data used is exactly the same as in the N170 `load_and_visualize` example. + +""" + +################################################################################################### +# Setup +# --------------------- + +# Some standard pythonic imports +import warnings +warnings.filterwarnings('ignore') +import os,numpy as np,pandas as pd +from collections import OrderedDict +import seaborn as sns +from matplotlib import pyplot as plt + +# MNE functions +from mne import Epochs,find_events +from mne.decoding import Vectorizer + +# EEG-Notebooks functions +from eegnb.analysis.analysis_utils import load_data +from eegnb.datasets import fetch_dataset + +# Scikit-learn and Pyriemann ML functionalities +from sklearn.pipeline import make_pipeline +from sklearn.linear_model import LogisticRegression +from sklearn.preprocessing import StandardScaler +from sklearn.discriminant_analysis import LinearDiscriminantAnalysis as LDA +from sklearn.model_selection import cross_val_score, StratifiedShuffleSplit +from pyriemann.estimation import ERPCovariances, XdawnCovariances +from pyriemann.tangentspace import TangentSpace +from pyriemann.classification import MDM + + +################################################################################################### +# Load Data +# --------------------- +# +# ( See the n170 `load_and_visualize` example for further description of this) +# + +eegnb_data_path = os.path.join(os.path.expanduser('~/'),'.eegnb', 'data') +n170_data_path = os.path.join(eegnb_data_path, 'visual-N170', 'eegnb_examples') + +# If dataset hasn't been downloaded yet, download it +if not os.path.isdir(n170_data_path): + fetch_dataset(data_dir=eegnb_data_path, experiment='visual-N170', site='eegnb_examples') + +subject = 1 +session = 1 +raw = load_data(subject,session, + experiment='visual-N170', site='eegnb_examples', device_name='muse2016', + data_dir = eegnb_data_path) + +################################################################################################### + +################################################################################################### +# Filteriing +# ---------------------------- + +raw.filter(1,30, method='iir') + +################################################################################################### +# Epoching +# ---------------------------- + +# Create an array containing the timestamps and type of each stimulus (i.e. face or house) +events = find_events(raw) +event_id = {'House': 1, 'Face': 2} + +# Create an MNE Epochs object representing all the epochs around stimulus presentation +epochs = Epochs(raw, events=events, event_id=event_id, + tmin=-0.1, tmax=0.8, baseline=None, + reject={'eeg': 75e-6}, preload=True, + verbose=False, picks=[0,1,2,3]) + +print('sample drop %: ', (1 - len(epochs.events)/len(events)) * 100) +epochs + +################################################################################################### +# Run classification +# ---------------------------- + +clfs = OrderedDict() +clfs['Vect + LR'] = make_pipeline(Vectorizer(), StandardScaler(), LogisticRegression()) +clfs['Vect + RegLDA'] = make_pipeline(Vectorizer(), LDA(shrinkage='auto', solver='eigen')) +clfs['ERPCov + TS'] = make_pipeline(ERPCovariances(estimator='oas'), TangentSpace(), LogisticRegression()) +clfs['ERPCov + MDM'] = make_pipeline(ERPCovariances(estimator='oas'), MDM()) +clfs['XdawnCov + TS'] = make_pipeline(XdawnCovariances(estimator='oas'), TangentSpace(), LogisticRegression()) +clfs['XdawnCov + MDM'] = make_pipeline(XdawnCovariances(estimator='oas'), MDM()) + +# format data +epochs.pick_types(eeg=True) +X = epochs.get_data() * 1e6 +times = epochs.times +y = epochs.events[:, -1] + +# define cross validation +cv = StratifiedShuffleSplit(n_splits=20, test_size=0.25, + random_state=42) + +# run cross validation for each pipeline +auc = [] +methods = [] +for m in clfs: + print(m) + try: + res = cross_val_score(clfs[m], X, y==2, scoring='roc_auc', + cv=cv, n_jobs=-1) + auc.extend(res) + methods.extend([m]*len(res)) + except: + pass + +################################################################################################### +# Plot Decoding Results +# ---------------------------- + +results = pd.DataFrame(data=auc, columns=['AUC']) +results['Method'] = methods + +fig = plt.figure(figsize=[8,4]) +sns.barplot(data=results, x='AUC', y='Method') +plt.xlim(0.4, 0.9) +sns.despine() + + diff --git a/develop/_downloads/d8a61e599ef859175e65b658c0800a68/visual_ssvep_python.zip b/develop/_downloads/d8a61e599ef859175e65b658c0800a68/visual_ssvep_python.zip new file mode 100644 index 00000000..e90f2696 Binary files /dev/null and b/develop/_downloads/d8a61e599ef859175e65b658c0800a68/visual_ssvep_python.zip differ diff --git a/develop/_downloads/d8e5dee04f613448d49984a260dabc0b/visual_p300_jupyter.zip b/develop/_downloads/d8e5dee04f613448d49984a260dabc0b/visual_p300_jupyter.zip new file mode 100644 index 00000000..eef1e937 Binary files /dev/null and b/develop/_downloads/d8e5dee04f613448d49984a260dabc0b/visual_p300_jupyter.zip differ diff --git a/develop/_downloads/db47b930ce10faf6f74a13d608925887/02r__p300_decoding.ipynb b/develop/_downloads/db47b930ce10faf6f74a13d608925887/02r__p300_decoding.ipynb new file mode 100644 index 00000000..2cc7da11 --- /dev/null +++ b/develop/_downloads/db47b930ce10faf6f74a13d608925887/02r__p300_decoding.ipynb @@ -0,0 +1,122 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "\n# P300 Decoding\n\nThis example runs a set of machine learning algorithms on the P300 cats/dogs\ndataset, and compares them in terms of classification performance. \n\nThe data used is exactly the same as in the P300 `load_and_visualize` example. \n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Setup\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# Some standard pythonic imports\nimport warnings\nwarnings.filterwarnings('ignore')\nimport os,numpy as np,pandas as pd\nfrom collections import OrderedDict\nimport seaborn as sns\nfrom matplotlib import pyplot as plt\n\n# MNE functions\nfrom mne import Epochs,find_events\nfrom mne.decoding import Vectorizer\n\n# EEG-Notebooks functions\nfrom eegnb.analysis.analysis_utils import load_data\nfrom eegnb.datasets import fetch_dataset\n\n# Scikit-learn and Pyriemann ML functionalities\nfrom sklearn.pipeline import make_pipeline\nfrom sklearn.linear_model import LogisticRegression\nfrom sklearn.preprocessing import StandardScaler\nfrom sklearn.discriminant_analysis import LinearDiscriminantAnalysis as LDA\nfrom sklearn.model_selection import cross_val_score, StratifiedShuffleSplit\nfrom pyriemann.estimation import ERPCovariances, XdawnCovariances, Xdawn\nfrom pyriemann.tangentspace import TangentSpace\nfrom pyriemann.classification import MDM" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Load Data\n\n( See the P300 `load_and_visualize` example for further description of this)\n\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "eegnb_data_path = os.path.join(os.path.expanduser('~/'),'.eegnb', 'data') \np300_data_path = os.path.join(eegnb_data_path, 'visual-P300', 'eegnb_examples')\n\n# If dataset hasn't been downloaded yet, download it \nif not os.path.isdir(p300_data_path):\n fetch_dataset(data_dir=eegnb_data_path, experiment='visual-P300', site='eegnb_examples') \n\n\nsubject = 1\nsession = 1\nraw = load_data(subject,session,\n experiment='visual-P300', site='eegnb_examples', device_name='muse2016',\n data_dir = eegnb_data_path)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Filteriing\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "raw.filter(1,30, method='iir')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Epoching\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# Create an array containing the timestamps and type of each stimulus (i.e. face or house)\nevents = find_events(raw)\nevent_id = {'Non-Target': 1, 'Target': 2}\nepochs = Epochs(raw, events=events, event_id=event_id,\n tmin=-0.1, tmax=0.8, baseline=None, reject={'eeg': 100e-6}, preload=True, verbose=False, picks=[0,1,2,3])\n\nprint('sample drop %: ', (1 - len(epochs.events)/len(events)) * 100)\n\nepochs" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Classfication\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "clfs = OrderedDict()\nclfs['Vect + LR'] = make_pipeline(Vectorizer(), StandardScaler(), LogisticRegression())\nclfs['Vect + RegLDA'] = make_pipeline(Vectorizer(), LDA(shrinkage='auto', solver='eigen'))\nclfs['Xdawn + RegLDA'] = make_pipeline(Xdawn(2, classes=[1]), Vectorizer(), LDA(shrinkage='auto', solver='eigen'))\n\nclfs['XdawnCov + TS'] = make_pipeline(XdawnCovariances(estimator='oas'), TangentSpace(), LogisticRegression())\nclfs['XdawnCov + MDM'] = make_pipeline(XdawnCovariances(estimator='oas'), MDM())\n\n\nclfs['ERPCov + TS'] = make_pipeline(ERPCovariances(), TangentSpace(), LogisticRegression())\nclfs['ERPCov + MDM'] = make_pipeline(ERPCovariances(), MDM())\n\n# format data\nepochs.pick_types(eeg=True)\nX = epochs.get_data() * 1e6\ntimes = epochs.times\ny = epochs.events[:, -1]\n\n# define cross validation \ncv = StratifiedShuffleSplit(n_splits=10, test_size=0.25, random_state=42)\n\n# run cross validation for each pipeline\nauc = []\nmethods = []\nfor m in clfs:\n res = cross_val_score(clfs[m], X, y==2, scoring='roc_auc', cv=cv, n_jobs=-1)\n auc.extend(res)\n methods.extend([m]*len(res))\n \nresults = pd.DataFrame(data=auc, columns=['AUC'])\nresults['Method'] = methods\n\nplt.figure(figsize=[8,4])\nsns.barplot(data=results, x='AUC', y='Method')\nplt.xlim(0.2, 0.85)\nsns.despine()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.18" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} \ No newline at end of file diff --git a/develop/_downloads/db7df1055da643ad96640e6377001dbf/01r__n170_viz.py b/develop/_downloads/db7df1055da643ad96640e6377001dbf/01r__n170_viz.py new file mode 100644 index 00000000..52556e8a --- /dev/null +++ b/develop/_downloads/db7df1055da643ad96640e6377001dbf/01r__n170_viz.py @@ -0,0 +1,116 @@ +""" +N170 Load and Visualize Data +=============================== + +This example demonstrates loading, organizing, and visualizing ERP response data from the visual N170 experiment. + +Images of faces and houses are shown in a rapid serial visual presentation (RSVP) stream. + +The data used is the first subject and first session of the one of the eeg-expy N170 example datasets, recorded using the InteraXon MUSE EEG headset (2016 model). +This session consists of six two-minute blocks of continuous recording. + +We first use the `fetch_datasets` to obtain a list of filenames. If these files are not already present +in the specified data directory, they will be quickly downloaded from the cloud. + +After loading the data, we place it in an MNE `Epochs` object, and obtain the trial-averaged response. + +The final figure plotted at the end shows the N170 response ERP waveform. + +""" + +################################################################################################### +# Setup +# --------------------- + +# Some standard pythonic imports +import os +from matplotlib import pyplot as plt +from collections import OrderedDict +import warnings +warnings.filterwarnings('ignore') + +# MNE functions +from mne import Epochs,find_events + +# EEG-Notebooks functions +from eegnb.analysis.analysis_utils import load_data,plot_conditions +from eegnb.datasets import fetch_dataset + +# sphinx_gallery_thumbnail_number = 3 + +################################################################################################### +# Load Data +# --------------------- +# +# We will use the eeg-expy N170 example dataset +# +# Note that if you are running this locally, the following cell will download +# the example dataset, if you do not already have it. +# + +################################################################################################### + +eegnb_data_path = os.path.join(os.path.expanduser('~/'),'.eegnb', 'data') +n170_data_path = os.path.join(eegnb_data_path, 'visual-N170', 'eegnb_examples') + +# If dataset hasn't been downloaded yet, download it +if not os.path.isdir(n170_data_path): + fetch_dataset(data_dir=eegnb_data_path, experiment='visual-N170', site='eegnb_examples'); + +subject = 1 +session = 1 +raw = load_data(subject,session, + experiment='visual-N170', site='eegnb_examples', device_name='muse2016_bfn', + data_dir = eegnb_data_path) + +################################################################################################### +# Visualize the power spectrum +# ---------------------------- + +raw.plot_psd() + +################################################################################################### +# Filtering +# ---------------------------- + +raw.filter(1,30, method='iir') +raw.plot_psd(fmin=1, fmax=30); + +################################################################################################### +# Epoching +# ---------------------------- + +# Create an array containing the timestamps and type of each stimulus (i.e. face or house) +events = find_events(raw) +event_id = {'House': 1, 'Face': 2} + +# Create an MNE Epochs object representing all the epochs around stimulus presentation +epochs = Epochs(raw, events=events, event_id=event_id, + tmin=-0.1, tmax=0.6, baseline=None, + reject={'eeg': 5e-5}, preload=True, + verbose=False, picks=[0,1,2,3]) +print('sample drop %: ', (1 - len(epochs.events)/len(events)) * 100) +epochs + +################################################################################################### +# Epoch average +# ---------------------------- + +conditions = OrderedDict() +#conditions['House'] = [1] +#conditions['Face'] = [2] +conditions['House'] = ['House'] +conditions['Face'] = ['Face'] +diffwav = ('Face', 'House') + +fig, ax = plot_conditions(epochs, conditions=conditions, + ci=97.5, n_boot=1000, title='', + diff_waveform=diffwav, + channel_order=[1,0,2,3]) +# reordering of epochs.ch_names according to [[0,2],[1,3]] of subplot axes + +# Manually adjust the ylims +for i in [0,2]: ax[i].set_ylim([-0.5e6,0.5e6]) +for i in [1,3]: ax[i].set_ylim([-1.5e6,2.5e6]) +plt.tight_layout() + diff --git a/develop/_downloads/e500d6e9072380c7b8d43c4a40d8e4f1/01r__p300_viz.py b/develop/_downloads/e500d6e9072380c7b8d43c4a40d8e4f1/01r__p300_viz.py new file mode 100644 index 00000000..138f20d6 --- /dev/null +++ b/develop/_downloads/e500d6e9072380c7b8d43c4a40d8e4f1/01r__p300_viz.py @@ -0,0 +1,110 @@ +""" +P300 Load and Visualize Data +=============================== + +This example demonstrates loading, organizing, and visualizing ERP response data from the visual P300 experiment. The experiment uses a visual oddball paradigm. Images of cats and dogs are shwn in a rapid serial visual presentation (RSVP) stream, with cats and dogs categorized respectively as 'targets' or 'non-targets', according to which has high or low probability of occurring, respectively. + +The data used is the first subject and first session of the one of the eeg-expy P300 example datasets, recorded using the InteraXon MUSE EEG headset (2016 model). This session consists of six two-minute blocks of continuous recording. + +We first use the `fetch_datasets` to obtain a list of filenames. If these files are not already present +in the specified data directory, they will be quickly downloaded from the cloud. + +After loading the data, we place it in an MNE `Epochs` object, and obtain the trial-averaged response. + +The final figure plotted at the end shows the P300 response ERP waveform. + +""" + +################################################################################################### +# Setup +# --------------------- + +# Some standard pythonic imports +import os +from matplotlib import pyplot as plt +from collections import OrderedDict +import warnings +warnings.filterwarnings('ignore') + +# MNE functions +from mne import Epochs,find_events + +# EEG-Notebooks functions +from eegnb.analysis.analysis_utils import load_data,plot_conditions +from eegnb.datasets import fetch_dataset + +# sphinx_gallery_thumbnail_number = 3 + +################################################################################################### +# Load Data +# --------------------- +# +# We will use the eeg-expy N170 example dataset +# +# Note that if you are running this locally, the following cell will download +# the example dataset, if you do not already have it. +# +################################################################################################### + +eegnb_data_path = os.path.join(os.path.expanduser('~/'),'.eegnb', 'data') +p300_data_path = os.path.join(eegnb_data_path, 'visual-P300', 'eegnb_examples') + +# If dataset hasn't been downloaded yet, download it +if not os.path.isdir(p300_data_path): + fetch_dataset(data_dir=eegnb_data_path, experiment='visual-P300', site='eegnb_examples'); + + +subject = 1 +session = 1 +raw = load_data(subject,session, + experiment='visual-P300', site='eegnb_examples', device_name='muse2016', + data_dir = eegnb_data_path) + +################################################################################################### + +################################################################################################### +# Visualize the power spectrum +# ---------------------------- + +raw.plot_psd() + +################################################################################################### +# Filteriing +# ---------------------------- + +raw.filter(1,30, method='iir') +raw.plot_psd(fmin=1, fmax=30); + +################################################################################################### +# Epoching +# ---------------------------- + +# Create an array containing the timestamps and type of each stimulus (i.e. face or house) +events = find_events(raw) +event_id = {'non-target': 1, 'target': 2} +epochs = Epochs(raw, events=events, event_id=event_id, + tmin=-0.1, tmax=0.8, baseline=None, reject={'eeg': 100e-6}, preload=True, + verbose=False, picks=[0,1,2,3]) + +print('sample drop %: ', (1 - len(epochs.events)/len(events)) * 100) + +################################################################################################### +# Epoch average +# ---------------------------- + +conditions = OrderedDict() +conditions['non-target'] = ['non-target'] +conditions['target'] = ['target'] +diffwav = ["non-target", "target"] + +fig, ax = plot_conditions(epochs, conditions=conditions, + ci=97.5, n_boot=1000, title='', + channel_order=[1,0,2,3],ylim=[-2E6,2.5E6], + diff_waveform = diffwav) + +# Manually adjust the ylims +for i in [0,2]: ax[i].set_ylim([-0.5e6,0.5e6]) +for i in [1,3]: ax[i].set_ylim([-1.5e6,2.5e6]) + +plt.tight_layout() + diff --git a/develop/_downloads/e9b19063c25fc9c3a038b9f1dad5d806/03r__cueing_behaviour_analysis_winter2019.py b/develop/_downloads/e9b19063c25fc9c3a038b9f1dad5d806/03r__cueing_behaviour_analysis_winter2019.py new file mode 100644 index 00000000..54c98ff9 --- /dev/null +++ b/develop/_downloads/e9b19063c25fc9c3a038b9f1dad5d806/03r__cueing_behaviour_analysis_winter2019.py @@ -0,0 +1,216 @@ +""" +Cueing Behavioural Analysis Winter 2019 +======================================= + +""" + + +################################################################################################### +# Setup +# --------------------- +# + +# Standard Pythonic imports +import os,sys,glob,numpy as np,pandas as pd +import matplotlib.pyplot as plt +import scipy.io as sio + +# EEG-Notebooks imports +from eegnb.datasets import datasets + +# sphinx_gallery_thumbnail_number = 1 + +################################################################################################### +# Download the data + +eegnb_data_path = os.path.join(os.path.expanduser('~/'),'.eegnb', 'data') +cueing_data_path = os.path.join(eegnb_data_path, 'visual-cueing', 'kylemathlab_dev') + +# If dataset hasn't been downloaded yet, download it +if not os.path.isdir(cueing_data_path): + datasets.fetch_dataset(data_dir=eegnb_data_path, experiment='visual-cueing', site='kylemathlab_dev') + + + +################################################################################################### +# Analyze .mat behavioural data for Accuracy and RT +# ----------------------------- +# +# Load in subjects + +# # Fall 2018 +subs = [101, 102, 103, 104, 106, 108, 109, 110, 111, 112, + 202, 203, 204, 205, 207, 208, 209, 210, 211, + 301, 302, 303, 304, 305, 306, 307, 308, 309] + +# 105 - no trials in one condition + +# # Winter 2019 +# subs = [1101, 1102, 1103, 1104, 1105, 1106, 1108, 1109, 1110, +# 1202, 1203, 1205, 1206, 1209, 1210, 1211, 1215, +# 1301, 1302, 1313, +# 1401, 1402, 1403, 1404, 1405, 1408, 1410, 1411, 1412, 1413, 1413, 1414, 1415, 1416] + +# # 1107 - no csv session 1 +# # 1201 - no csv session 1 +# # 1304 - Muse 2 +# # 1308 - Muse 2 +# # 1311 - Muse 2 +# # 1314 - Muse 2 +# # 1407 - only session1 + +# Both + +# Fall 2018 +# subs = [101, 102, 103, 104, 106, 108, 109, 110, 111, 112, +# 202, 203, 204, 205, 207, 208, 209, 210, 211, +# 301, 302, 303, 304, 305, 306, 307, 308, 309, +# 1101, 1102, 1103, 1104, 1105, 1106, 1108, 1109, 1110, +# 1202, 1203, 1205, 1206, 1209, 1210, 1211, 1215, +# 1301, 1302, 1313, +# 1401, 1402, 1403, 1404, 1405, 1408, 1410, 1411, 1412, 1413, 1413, 1414, 1415, 1416] + +################################################################################################### +# Set some settings + + +# basic numbers +n_subs = len(subs) +n_sesh = 2 +conditions = ['valid','invalid'] +n_cond = len(conditions) + +# cutoff trials that are too slow or fast +rt_toofast = 250 +rt_tooslow = 1500 + +#creates arrays to save output +count_rt = np.zeros((n_subs, n_sesh, n_cond)) +median_rt = np.zeros((n_subs, n_sesh, n_cond)) +prop_accu = np.zeros((n_subs, n_sesh, n_cond)) + + +################################################################################################### +# Single Subject example + +#select single subject +sub = subs[0] +print('Subject - ' + str(sub)) + +#just one session +sesh = 1 + +#load file +#path = './subject' + str(sub) + '/session' + str(sesh) + '/' +path = cueing_data_path + '/muse2016/subject' + str('%04.f' %sub) + '/session' + str('%03.f' %(sesh+1)) + '/' +file = [x for x in os.listdir(path) if x.endswith('.mat')][0] +output_dict = sio.loadmat(path + file) +print(path + file) + +#pull out important info +output = output_dict['output'] +accuracy = output[:,6] +rt = output[:,7] +validity = output[:,3] +print(accuracy,rt,validity) + +# median rt on each condition +print('') +print(rt) +print(rt[validity == 0]) +print(rt[(validity == 0) & (rt <= rt_tooslow)]) + +validRT = np.nanmedian(rt[ (validity == 1) & + (rt >= rt_toofast) & + (rt <= rt_tooslow)]) + +print('Valid RT = ' + str(validRT) + ' ms') + +InvalidRT = np.nanmedian(rt[ (validity == 0) & + (rt >= rt_toofast) & + (rt <= rt_tooslow)]) + +print('Invalid RT = ' + str(InvalidRT) + ' ms') + + +################################################################################################### +# Loop through subjects + +for isub, sub in enumerate(subs): + print('Subject - ' + str(sub)) + for sesh in range(n_sesh): + # get the path and file name and load data + #path = './subject' + str(sub) + '/session' + str(sesh+1) + '/' + path = cueing_data_path + '/muse2016/subject' + str('%04.f' %sub) + '/session' + str('%03.f' %(sesh+1)) + '/' + + file = [x for x in os.listdir(path) if x.endswith('.mat')][0] + output_dict = sio.loadmat(path + file) + + # pull out important stuff + output = output_dict['output'] + accuracy = output[:,6] + rt = output[:,7] + validity = output[:,3] + + # median rt on each condition + median_rt[isub,sesh,:] = [ np.nanmedian(rt[ (validity == 1) & (rt >= rt_toofast) & (rt <= rt_tooslow)]), + np.nanmedian(rt[ (validity == 0) & (rt >= rt_toofast) & (rt <= rt_tooslow)]) ] + + # proportion accurate (number accurate / count) + prop_accu[isub,sesh,:] = [ np.sum(accuracy[(validity == 1) & (rt >= rt_toofast) & (rt <= rt_tooslow)]) / + np.sum((validity == 1) & (rt >= rt_toofast) & (rt <= rt_tooslow)), + np.sum(accuracy[(validity == 0) & (rt >= rt_toofast) & (rt <= rt_tooslow)]) / + np.sum((validity == 0) & (rt >= rt_toofast) & (rt <= rt_tooslow)) ] + + +################################################################################################### +# Average over sessions and print data + +# Summary stats and collapse sessions +Out_median_RT = np.squeeze(np.nanmean(median_rt,axis=1)) +Out_prop_accu = np.squeeze(np.nanmean(prop_accu,axis=1)) + +print('Median RT') +print(Out_median_RT) +print('Proportion Accurate') +print(Out_prop_accu) + + +################################################################################################### +# Plot barplot of results + +# bar plot results +plt.figure() +# Accuracy +ax = plt.subplot(211) +plt.bar([0,1], np.nanmean(Out_prop_accu,axis=0), 0.6, yerr = np.nanstd(Out_prop_accu,axis=0)/np.sqrt(n_subs)) +plt.ylim(.9,.96) +plt.title('Accuracy') +plt.ylabel('Proportion Correct') +ax.set_xticks([0,1]) +ax.set_xticklabels(conditions) +# RT +ax = plt.subplot(212) +plt.bar([0,1], np.nanmean(Out_median_RT,axis=0), 0.6, yerr = np.nanstd(Out_median_RT,axis=0)/np.sqrt(n_subs)) +plt.ylim(450,600) +plt.title('Reaction Time') +plt.ylabel('RT (ms)') +plt.xlabel('Condition') +ax.set_xticks([0,1]) +ax.set_xticklabels(conditions) +plt.show() + +################################################################################################### +# Output spreadsheet + +## CSV output +column_dict = {'Participant':subs, + 'AccValid':Out_prop_accu[:,0], + 'AccInvalid':Out_prop_accu[:,1], + 'RTValid':Out_median_RT[:,0], + 'RTInvalid':Out_median_RT[:,1] } +df = pd.DataFrame(column_dict) +print(df) +df.to_csv('375CueingBehPy.csv',index=False) + + diff --git a/develop/_downloads/fd5112e79635b428992c2be1eb2fdd3a/03r__cueing_behaviour_analysis_winter2019.ipynb b/develop/_downloads/fd5112e79635b428992c2be1eb2fdd3a/03r__cueing_behaviour_analysis_winter2019.ipynb new file mode 100644 index 00000000..5ef5f9fe --- /dev/null +++ b/develop/_downloads/fd5112e79635b428992c2be1eb2fdd3a/03r__cueing_behaviour_analysis_winter2019.ipynb @@ -0,0 +1,194 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "\n# Cueing Behavioural Analysis Winter 2019\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Setup\n\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# Standard Pythonic imports\nimport os,sys,glob,numpy as np,pandas as pd\nimport matplotlib.pyplot as plt \nimport scipy.io as sio \n\n# EEG-Notebooks imports\nfrom eegnb.datasets import datasets" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Download the data\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "eegnb_data_path = os.path.join(os.path.expanduser('~/'),'.eegnb', 'data')\ncueing_data_path = os.path.join(eegnb_data_path, 'visual-cueing', 'kylemathlab_dev')\n\n# If dataset hasn't been downloaded yet, download it\nif not os.path.isdir(cueing_data_path):\n datasets.fetch_dataset(data_dir=eegnb_data_path, experiment='visual-cueing', site='kylemathlab_dev')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Analyze .mat behavioural data for Accuracy and RT\n\nLoad in subjects\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# # Fall 2018\nsubs = [101, 102, 103, 104, 106, 108, 109, 110, 111, 112,\n 202, 203, 204, 205, 207, 208, 209, 210, 211, \n 301, 302, 303, 304, 305, 306, 307, 308, 309]\n\n# 105 - no trials in one condition\n\n# # Winter 2019\n# subs = [1101, 1102, 1103, 1104, 1105, 1106, 1108, 1109, 1110,\n# 1202, 1203, 1205, 1206, 1209, 1210, 1211, 1215,\n# 1301, 1302, 1313, \n# 1401, 1402, 1403, 1404, 1405, 1408, 1410, 1411, 1412, 1413, 1413, 1414, 1415, 1416]\n\n# # 1107 - no csv session 1\n# # 1201 - no csv session 1\n# # 1304 - Muse 2\n# # 1308 - Muse 2\n# # 1311 - Muse 2\n# # 1314 - Muse 2\n# # 1407 - only session1\n\n# Both \n\n# Fall 2018\n# subs = [101, 102, 103, 104, 106, 108, 109, 110, 111, 112,\n# 202, 203, 204, 205, 207, 208, 209, 210, 211, \n# 301, 302, 303, 304, 305, 306, 307, 308, 309,\n# 1101, 1102, 1103, 1104, 1105, 1106, 1108, 1109, 1110,\n# 1202, 1203, 1205, 1206, 1209, 1210, 1211, 1215,\n# 1301, 1302, 1313, \n# 1401, 1402, 1403, 1404, 1405, 1408, 1410, 1411, 1412, 1413, 1413, 1414, 1415, 1416]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Set some settings\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# basic numbers\nn_subs = len(subs)\nn_sesh = 2\nconditions = ['valid','invalid']\nn_cond = len(conditions)\n\n# cutoff trials that are too slow or fast\nrt_toofast = 250\nrt_tooslow = 1500\n\n#creates arrays to save output\ncount_rt = np.zeros((n_subs, n_sesh, n_cond))\nmedian_rt = np.zeros((n_subs, n_sesh, n_cond))\nprop_accu = np.zeros((n_subs, n_sesh, n_cond))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Single Subject example\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "#select single subject\nsub = subs[0]\nprint('Subject - ' + str(sub))\n\n#just one session \nsesh = 1\n\n#load file\n#path = './subject' + str(sub) + '/session' + str(sesh) + '/'\npath = cueing_data_path + '/muse2016/subject' + str('%04.f' %sub) + '/session' + str('%03.f' %(sesh+1)) + '/'\nfile = [x for x in os.listdir(path) if x.endswith('.mat')][0]\noutput_dict = sio.loadmat(path + file)\nprint(path + file)\n\n#pull out important info\noutput = output_dict['output']\naccuracy = output[:,6]\nrt = output[:,7]\nvalidity = output[:,3]\nprint(accuracy,rt,validity)\n\n# median rt on each condition\t\nprint('')\nprint(rt)\nprint(rt[validity == 0])\nprint(rt[(validity == 0) & (rt <= rt_tooslow)])\n\nvalidRT \t= np.nanmedian(rt[ (validity == 1) &\n (rt >= rt_toofast) &\n (rt <= rt_tooslow)])\n\nprint('Valid RT = ' + str(validRT) + ' ms')\n\nInvalidRT = np.nanmedian(rt[ (validity == 0) &\n (rt >= rt_toofast) &\n (rt <= rt_tooslow)]) \n\nprint('Invalid RT = ' + str(InvalidRT) + ' ms')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Loop through subjects\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "for isub, sub in enumerate(subs):\n print('Subject - ' + str(sub))\n for sesh in range(n_sesh):\n # get the path and file name and load data\n #path = './subject' + str(sub) + '/session' + str(sesh+1) + '/'\n path = cueing_data_path + '/muse2016/subject' + str('%04.f' %sub) + '/session' + str('%03.f' %(sesh+1)) + '/'\n \n file = [x for x in os.listdir(path) if x.endswith('.mat')][0]\n output_dict = sio.loadmat(path + file)\n\n # pull out important stuff\n output = output_dict['output']\n accuracy = output[:,6]\n rt = output[:,7]\n validity = output[:,3]\n \n # median rt on each condition\t\n median_rt[isub,sesh,:] \t= [ np.nanmedian(rt[ (validity == 1) & (rt >= rt_toofast) & (rt <= rt_tooslow)]),\n np.nanmedian(rt[ (validity == 0) & (rt >= rt_toofast) & (rt <= rt_tooslow)]) ]\n \n # proportion accurate (number accurate / count)\n prop_accu[isub,sesh,:] = [ np.sum(accuracy[(validity == 1) & (rt >= rt_toofast) & (rt <= rt_tooslow)]) / \n np.sum((validity == 1) & (rt >= rt_toofast) & (rt <= rt_tooslow)),\n np.sum(accuracy[(validity == 0) & (rt >= rt_toofast) & (rt <= rt_tooslow)]) /\n np.sum((validity == 0) & (rt >= rt_toofast) & (rt <= rt_tooslow)) ]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Average over sessions and print data\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# Summary stats and collapse sessions\nOut_median_RT = np.squeeze(np.nanmean(median_rt,axis=1))\nOut_prop_accu = np.squeeze(np.nanmean(prop_accu,axis=1))\n\nprint('Median RT')\nprint(Out_median_RT)\nprint('Proportion Accurate')\nprint(Out_prop_accu)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Plot barplot of results\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# bar plot results\nplt.figure()\n# Accuracy\nax = plt.subplot(211)\nplt.bar([0,1], np.nanmean(Out_prop_accu,axis=0), 0.6, yerr = np.nanstd(Out_prop_accu,axis=0)/np.sqrt(n_subs))\nplt.ylim(.9,.96)\nplt.title('Accuracy')\nplt.ylabel('Proportion Correct')\nax.set_xticks([0,1])\nax.set_xticklabels(conditions)\n# RT\nax = plt.subplot(212)\nplt.bar([0,1], np.nanmean(Out_median_RT,axis=0), 0.6, yerr = np.nanstd(Out_median_RT,axis=0)/np.sqrt(n_subs))\nplt.ylim(450,600)\nplt.title('Reaction Time')\nplt.ylabel('RT (ms)')\nplt.xlabel('Condition')\nax.set_xticks([0,1])\nax.set_xticklabels(conditions)\nplt.show()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Output spreadsheet\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "## CSV output\ncolumn_dict = {'Participant':subs,\n 'AccValid':Out_prop_accu[:,0],\n 'AccInvalid':Out_prop_accu[:,1],\n 'RTValid':Out_median_RT[:,0],\n 'RTInvalid':Out_median_RT[:,1] }\ndf = pd.DataFrame(column_dict)\nprint(df)\ndf.to_csv('375CueingBehPy.csv',index=False)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.18" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} \ No newline at end of file diff --git a/develop/_images/attaching_electrode.png b/develop/_images/attaching_electrode.png new file mode 100644 index 00000000..9b92ee63 Binary files /dev/null and b/develop/_images/attaching_electrode.png differ diff --git a/develop/_images/bluemuse.PNG b/develop/_images/bluemuse.PNG new file mode 100644 index 00000000..fc803c3b Binary files /dev/null and b/develop/_images/bluemuse.PNG differ diff --git a/develop/_images/brainbit.png b/develop/_images/brainbit.png new file mode 100644 index 00000000..fe58fcd1 Binary files /dev/null and b/develop/_images/brainbit.png differ diff --git a/develop/_images/cyton.png b/develop/_images/cyton.png new file mode 100644 index 00000000..b506db14 Binary files /dev/null and b/develop/_images/cyton.png differ diff --git a/develop/_images/cyton_daisy.png b/develop/_images/cyton_daisy.png new file mode 100644 index 00000000..f8e1a415 Binary files /dev/null and b/develop/_images/cyton_daisy.png differ diff --git a/develop/_images/extra_electrode.png b/develop/_images/extra_electrode.png new file mode 100644 index 00000000..4904f388 Binary files /dev/null and b/develop/_images/extra_electrode.png differ diff --git a/develop/_images/ganglion.png b/develop/_images/ganglion.png new file mode 100644 index 00000000..d36e7938 Binary files /dev/null and b/develop/_images/ganglion.png differ diff --git a/develop/_images/gtec-unicorn.jpg b/develop/_images/gtec-unicorn.jpg new file mode 100644 index 00000000..bc7cb254 Binary files /dev/null and b/develop/_images/gtec-unicorn.jpg differ diff --git a/develop/_images/notion.png b/develop/_images/notion.png new file mode 100644 index 00000000..c53da699 Binary files /dev/null and b/develop/_images/notion.png differ diff --git a/develop/_images/sphx_glr_00x__n170_run_experiment_thumb.png b/develop/_images/sphx_glr_00x__n170_run_experiment_thumb.png new file mode 100644 index 00000000..b8af0be6 Binary files /dev/null and b/develop/_images/sphx_glr_00x__n170_run_experiment_thumb.png differ diff --git a/develop/_images/sphx_glr_00x__p300_run_experiment_thumb.png b/develop/_images/sphx_glr_00x__p300_run_experiment_thumb.png new file mode 100644 index 00000000..b8af0be6 Binary files /dev/null and b/develop/_images/sphx_glr_00x__p300_run_experiment_thumb.png differ diff --git a/develop/_images/sphx_glr_00x__ssvep_run_experiment_thumb.png b/develop/_images/sphx_glr_00x__ssvep_run_experiment_thumb.png new file mode 100644 index 00000000..b8af0be6 Binary files /dev/null and b/develop/_images/sphx_glr_00x__ssvep_run_experiment_thumb.png differ diff --git a/develop/_images/sphx_glr_01r__cueing_singlesub_analysis_001.png b/develop/_images/sphx_glr_01r__cueing_singlesub_analysis_001.png new file mode 100644 index 00000000..08934501 Binary files /dev/null and b/develop/_images/sphx_glr_01r__cueing_singlesub_analysis_001.png differ diff --git a/develop/_images/sphx_glr_01r__cueing_singlesub_analysis_002.png b/develop/_images/sphx_glr_01r__cueing_singlesub_analysis_002.png new file mode 100644 index 00000000..b8e963d5 Binary files /dev/null and b/develop/_images/sphx_glr_01r__cueing_singlesub_analysis_002.png differ diff --git a/develop/_images/sphx_glr_01r__cueing_singlesub_analysis_003.png b/develop/_images/sphx_glr_01r__cueing_singlesub_analysis_003.png new file mode 100644 index 00000000..d71e8eb1 Binary files /dev/null and b/develop/_images/sphx_glr_01r__cueing_singlesub_analysis_003.png differ diff --git a/develop/_images/sphx_glr_01r__cueing_singlesub_analysis_004.png b/develop/_images/sphx_glr_01r__cueing_singlesub_analysis_004.png new file mode 100644 index 00000000..51cc8b5c Binary files /dev/null and b/develop/_images/sphx_glr_01r__cueing_singlesub_analysis_004.png differ diff --git a/develop/_images/sphx_glr_01r__cueing_singlesub_analysis_005.png b/develop/_images/sphx_glr_01r__cueing_singlesub_analysis_005.png new file mode 100644 index 00000000..5b494b1e Binary files /dev/null and b/develop/_images/sphx_glr_01r__cueing_singlesub_analysis_005.png differ diff --git a/develop/_images/sphx_glr_01r__cueing_singlesub_analysis_006.png b/develop/_images/sphx_glr_01r__cueing_singlesub_analysis_006.png new file mode 100644 index 00000000..0946c737 Binary files /dev/null and b/develop/_images/sphx_glr_01r__cueing_singlesub_analysis_006.png differ diff --git a/develop/_images/sphx_glr_01r__cueing_singlesub_analysis_007.png b/develop/_images/sphx_glr_01r__cueing_singlesub_analysis_007.png new file mode 100644 index 00000000..370d913a Binary files /dev/null and b/develop/_images/sphx_glr_01r__cueing_singlesub_analysis_007.png differ diff --git a/develop/_images/sphx_glr_01r__cueing_singlesub_analysis_008.png b/develop/_images/sphx_glr_01r__cueing_singlesub_analysis_008.png new file mode 100644 index 00000000..307cd99b Binary files /dev/null and b/develop/_images/sphx_glr_01r__cueing_singlesub_analysis_008.png differ diff --git a/develop/_images/sphx_glr_01r__cueing_singlesub_analysis_009.png b/develop/_images/sphx_glr_01r__cueing_singlesub_analysis_009.png new file mode 100644 index 00000000..ec1c6869 Binary files /dev/null and b/develop/_images/sphx_glr_01r__cueing_singlesub_analysis_009.png differ diff --git a/develop/_images/sphx_glr_01r__cueing_singlesub_analysis_010.png b/develop/_images/sphx_glr_01r__cueing_singlesub_analysis_010.png new file mode 100644 index 00000000..4e3bfc6d Binary files /dev/null and b/develop/_images/sphx_glr_01r__cueing_singlesub_analysis_010.png differ diff --git a/develop/_images/sphx_glr_01r__cueing_singlesub_analysis_011.png b/develop/_images/sphx_glr_01r__cueing_singlesub_analysis_011.png new file mode 100644 index 00000000..c2f09048 Binary files /dev/null and b/develop/_images/sphx_glr_01r__cueing_singlesub_analysis_011.png differ diff --git a/develop/_images/sphx_glr_01r__cueing_singlesub_analysis_012.png b/develop/_images/sphx_glr_01r__cueing_singlesub_analysis_012.png new file mode 100644 index 00000000..6fad0b6c Binary files /dev/null and b/develop/_images/sphx_glr_01r__cueing_singlesub_analysis_012.png differ diff --git a/develop/_images/sphx_glr_01r__cueing_singlesub_analysis_thumb.png b/develop/_images/sphx_glr_01r__cueing_singlesub_analysis_thumb.png new file mode 100644 index 00000000..9bc1456f Binary files /dev/null and b/develop/_images/sphx_glr_01r__cueing_singlesub_analysis_thumb.png differ diff --git a/develop/_images/sphx_glr_01r__n170_viz_001.png b/develop/_images/sphx_glr_01r__n170_viz_001.png new file mode 100644 index 00000000..4e9fa465 Binary files /dev/null and b/develop/_images/sphx_glr_01r__n170_viz_001.png differ diff --git a/develop/_images/sphx_glr_01r__n170_viz_002.png b/develop/_images/sphx_glr_01r__n170_viz_002.png new file mode 100644 index 00000000..255d6c76 Binary files /dev/null and b/develop/_images/sphx_glr_01r__n170_viz_002.png differ diff --git a/develop/_images/sphx_glr_01r__n170_viz_003.png b/develop/_images/sphx_glr_01r__n170_viz_003.png new file mode 100644 index 00000000..9585e281 Binary files /dev/null and b/develop/_images/sphx_glr_01r__n170_viz_003.png differ diff --git a/develop/_images/sphx_glr_01r__n170_viz_thumb.png b/develop/_images/sphx_glr_01r__n170_viz_thumb.png new file mode 100644 index 00000000..14a4011e Binary files /dev/null and b/develop/_images/sphx_glr_01r__n170_viz_thumb.png differ diff --git a/develop/_images/sphx_glr_01r__p300_viz_001.png b/develop/_images/sphx_glr_01r__p300_viz_001.png new file mode 100644 index 00000000..9c1587c9 Binary files /dev/null and b/develop/_images/sphx_glr_01r__p300_viz_001.png differ diff --git a/develop/_images/sphx_glr_01r__p300_viz_002.png b/develop/_images/sphx_glr_01r__p300_viz_002.png new file mode 100644 index 00000000..739d8a68 Binary files /dev/null and b/develop/_images/sphx_glr_01r__p300_viz_002.png differ diff --git a/develop/_images/sphx_glr_01r__p300_viz_003.png b/develop/_images/sphx_glr_01r__p300_viz_003.png new file mode 100644 index 00000000..aac72346 Binary files /dev/null and b/develop/_images/sphx_glr_01r__p300_viz_003.png differ diff --git a/develop/_images/sphx_glr_01r__p300_viz_thumb.png b/develop/_images/sphx_glr_01r__p300_viz_thumb.png new file mode 100644 index 00000000..cc0e3562 Binary files /dev/null and b/develop/_images/sphx_glr_01r__p300_viz_thumb.png differ diff --git a/develop/_images/sphx_glr_01r__ssvep_viz_001.png b/develop/_images/sphx_glr_01r__ssvep_viz_001.png new file mode 100644 index 00000000..bdf1adfc Binary files /dev/null and b/develop/_images/sphx_glr_01r__ssvep_viz_001.png differ diff --git a/develop/_images/sphx_glr_01r__ssvep_viz_002.png b/develop/_images/sphx_glr_01r__ssvep_viz_002.png new file mode 100644 index 00000000..060656c6 Binary files /dev/null and b/develop/_images/sphx_glr_01r__ssvep_viz_002.png differ diff --git a/develop/_images/sphx_glr_01r__ssvep_viz_003.png b/develop/_images/sphx_glr_01r__ssvep_viz_003.png new file mode 100644 index 00000000..b95509cd Binary files /dev/null and b/develop/_images/sphx_glr_01r__ssvep_viz_003.png differ diff --git a/develop/_images/sphx_glr_01r__ssvep_viz_004.png b/develop/_images/sphx_glr_01r__ssvep_viz_004.png new file mode 100644 index 00000000..bfd69468 Binary files /dev/null and b/develop/_images/sphx_glr_01r__ssvep_viz_004.png differ diff --git a/develop/_images/sphx_glr_01r__ssvep_viz_005.png b/develop/_images/sphx_glr_01r__ssvep_viz_005.png new file mode 100644 index 00000000..16ac15b6 Binary files /dev/null and b/develop/_images/sphx_glr_01r__ssvep_viz_005.png differ diff --git a/develop/_images/sphx_glr_01r__ssvep_viz_thumb.png b/develop/_images/sphx_glr_01r__ssvep_viz_thumb.png new file mode 100644 index 00000000..cfb1d5d1 Binary files /dev/null and b/develop/_images/sphx_glr_01r__ssvep_viz_thumb.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_001.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_001.png new file mode 100644 index 00000000..e069231b Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_001.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_002.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_002.png new file mode 100644 index 00000000..62c9b02f Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_002.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_003.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_003.png new file mode 100644 index 00000000..b9d45fcf Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_003.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_004.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_004.png new file mode 100644 index 00000000..c7579ee5 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_004.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_005.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_005.png new file mode 100644 index 00000000..c9634b94 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_005.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_006.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_006.png new file mode 100644 index 00000000..a9abd967 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_006.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_007.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_007.png new file mode 100644 index 00000000..922c149c Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_007.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_008.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_008.png new file mode 100644 index 00000000..94486238 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_008.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_009.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_009.png new file mode 100644 index 00000000..ec5fa56a Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_009.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_010.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_010.png new file mode 100644 index 00000000..b18cb829 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_010.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_011.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_011.png new file mode 100644 index 00000000..b52088f1 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_011.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_012.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_012.png new file mode 100644 index 00000000..f97d246a Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_012.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_013.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_013.png new file mode 100644 index 00000000..84d5c802 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_013.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_014.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_014.png new file mode 100644 index 00000000..6e2fec0b Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_014.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_015.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_015.png new file mode 100644 index 00000000..617aee58 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_015.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_016.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_016.png new file mode 100644 index 00000000..8e315cb3 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_016.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_017.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_017.png new file mode 100644 index 00000000..62396a70 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_017.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_018.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_018.png new file mode 100644 index 00000000..817951b2 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_018.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_019.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_019.png new file mode 100644 index 00000000..8e315cb3 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_019.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_020.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_020.png new file mode 100644 index 00000000..32684714 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_020.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_021.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_021.png new file mode 100644 index 00000000..817951b2 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_021.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_022.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_022.png new file mode 100644 index 00000000..e28343f9 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_022.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_023.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_023.png new file mode 100644 index 00000000..616e1ebb Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_023.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_024.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_024.png new file mode 100644 index 00000000..56de885b Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_024.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_025.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_025.png new file mode 100644 index 00000000..932aecbe Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_025.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_026.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_026.png new file mode 100644 index 00000000..b30d27cb Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_026.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_027.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_027.png new file mode 100644 index 00000000..b22de7e7 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_027.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_028.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_028.png new file mode 100644 index 00000000..8e7b6c41 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_028.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_029.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_029.png new file mode 100644 index 00000000..c92cd33d Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_029.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_030.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_030.png new file mode 100644 index 00000000..eafabd00 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_030.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_031.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_031.png new file mode 100644 index 00000000..8e315cb3 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_031.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_032.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_032.png new file mode 100644 index 00000000..d5124eb3 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_032.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_033.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_033.png new file mode 100644 index 00000000..817951b2 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_033.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_034.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_034.png new file mode 100644 index 00000000..b3c68faf Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_034.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_035.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_035.png new file mode 100644 index 00000000..3ead29e1 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_035.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_036.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_036.png new file mode 100644 index 00000000..556beda6 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_036.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_037.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_037.png new file mode 100644 index 00000000..8446a64e Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_037.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_038.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_038.png new file mode 100644 index 00000000..79eb965e Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_038.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_039.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_039.png new file mode 100644 index 00000000..438fd536 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_039.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_040.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_040.png new file mode 100644 index 00000000..1fb59ccc Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_040.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_041.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_041.png new file mode 100644 index 00000000..357c8926 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_041.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_042.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_042.png new file mode 100644 index 00000000..e2d73ab0 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_042.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_043.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_043.png new file mode 100644 index 00000000..c2800d05 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_043.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_044.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_044.png new file mode 100644 index 00000000..19a8cbfb Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_044.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_045.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_045.png new file mode 100644 index 00000000..c43539b5 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_045.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_046.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_046.png new file mode 100644 index 00000000..8ce1a584 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_046.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_047.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_047.png new file mode 100644 index 00000000..7cd2b134 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_047.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_048.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_048.png new file mode 100644 index 00000000..379f09ca Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_048.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_049.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_049.png new file mode 100644 index 00000000..66d2a1ae Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_049.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_050.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_050.png new file mode 100644 index 00000000..2e273b05 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_050.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_051.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_051.png new file mode 100644 index 00000000..62cfd6e3 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_051.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_052.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_052.png new file mode 100644 index 00000000..0acc525f Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_052.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_053.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_053.png new file mode 100644 index 00000000..eb4163b2 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_053.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_054.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_054.png new file mode 100644 index 00000000..4fcf255e Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_054.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_055.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_055.png new file mode 100644 index 00000000..d8626e87 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_055.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_056.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_056.png new file mode 100644 index 00000000..448c2dd8 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_056.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_057.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_057.png new file mode 100644 index 00000000..e45c0d06 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_057.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_058.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_058.png new file mode 100644 index 00000000..6db8f2f0 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_058.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_059.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_059.png new file mode 100644 index 00000000..bdbc05b0 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_059.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_060.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_060.png new file mode 100644 index 00000000..63d673ed Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_060.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_061.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_061.png new file mode 100644 index 00000000..b25fd129 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_061.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_062.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_062.png new file mode 100644 index 00000000..353e6aae Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_062.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_063.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_063.png new file mode 100644 index 00000000..66a7f677 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_063.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_064.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_064.png new file mode 100644 index 00000000..aff33b4d Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_064.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_065.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_065.png new file mode 100644 index 00000000..cf763aa3 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_065.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_066.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_066.png new file mode 100644 index 00000000..16b0a1e8 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_066.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_067.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_067.png new file mode 100644 index 00000000..6733a6cd Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_067.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_068.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_068.png new file mode 100644 index 00000000..b9b8656f Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_068.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_069.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_069.png new file mode 100644 index 00000000..241675a4 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_069.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_070.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_070.png new file mode 100644 index 00000000..6b674809 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_070.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_071.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_071.png new file mode 100644 index 00000000..fa49b770 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_071.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_072.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_072.png new file mode 100644 index 00000000..469807e3 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_072.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_073.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_073.png new file mode 100644 index 00000000..d2584651 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_073.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_074.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_074.png new file mode 100644 index 00000000..1a606beb Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_074.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_075.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_075.png new file mode 100644 index 00000000..fd56251c Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_075.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_076.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_076.png new file mode 100644 index 00000000..8e315cb3 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_076.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_077.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_077.png new file mode 100644 index 00000000..e7196bd4 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_077.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_078.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_078.png new file mode 100644 index 00000000..817951b2 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_078.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_079.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_079.png new file mode 100644 index 00000000..dd958962 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_079.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_080.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_080.png new file mode 100644 index 00000000..7dbad703 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_080.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_081.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_081.png new file mode 100644 index 00000000..4ac5cd02 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_081.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_082.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_082.png new file mode 100644 index 00000000..6401877e Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_082.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_083.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_083.png new file mode 100644 index 00000000..76baaa5f Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_083.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_084.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_084.png new file mode 100644 index 00000000..4ed89ee3 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_084.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_085.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_085.png new file mode 100644 index 00000000..2d0886fd Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_085.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_086.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_086.png new file mode 100644 index 00000000..c651cb94 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_086.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_087.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_087.png new file mode 100644 index 00000000..4add1f32 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_087.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_088.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_088.png new file mode 100644 index 00000000..c2153acc Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_088.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_089.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_089.png new file mode 100644 index 00000000..5dff10c5 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_089.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_090.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_090.png new file mode 100644 index 00000000..f387190e Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_090.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_091.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_091.png new file mode 100644 index 00000000..86e963b7 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_091.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_092.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_092.png new file mode 100644 index 00000000..c73551d2 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_092.png differ diff --git a/develop/_images/sphx_glr_02r__cueing_group_analysis_thumb.png b/develop/_images/sphx_glr_02r__cueing_group_analysis_thumb.png new file mode 100644 index 00000000..6d109394 Binary files /dev/null and b/develop/_images/sphx_glr_02r__cueing_group_analysis_thumb.png differ diff --git a/develop/_images/sphx_glr_02r__n170_decoding_001.png b/develop/_images/sphx_glr_02r__n170_decoding_001.png new file mode 100644 index 00000000..29550d1c Binary files /dev/null and b/develop/_images/sphx_glr_02r__n170_decoding_001.png differ diff --git a/develop/_images/sphx_glr_02r__n170_decoding_thumb.png b/develop/_images/sphx_glr_02r__n170_decoding_thumb.png new file mode 100644 index 00000000..1bf35f39 Binary files /dev/null and b/develop/_images/sphx_glr_02r__n170_decoding_thumb.png differ diff --git a/develop/_images/sphx_glr_02r__p300_decoding_001.png b/develop/_images/sphx_glr_02r__p300_decoding_001.png new file mode 100644 index 00000000..4fabcc2c Binary files /dev/null and b/develop/_images/sphx_glr_02r__p300_decoding_001.png differ diff --git a/develop/_images/sphx_glr_02r__p300_decoding_thumb.png b/develop/_images/sphx_glr_02r__p300_decoding_thumb.png new file mode 100644 index 00000000..f22e598c Binary files /dev/null and b/develop/_images/sphx_glr_02r__p300_decoding_thumb.png differ diff --git a/develop/_images/sphx_glr_02r__ssvep_decoding_001.png b/develop/_images/sphx_glr_02r__ssvep_decoding_001.png new file mode 100644 index 00000000..63b5840d Binary files /dev/null and b/develop/_images/sphx_glr_02r__ssvep_decoding_001.png differ diff --git a/develop/_images/sphx_glr_02r__ssvep_decoding_thumb.png b/develop/_images/sphx_glr_02r__ssvep_decoding_thumb.png new file mode 100644 index 00000000..721e055c Binary files /dev/null and b/develop/_images/sphx_glr_02r__ssvep_decoding_thumb.png differ diff --git a/develop/_images/sphx_glr_03r__cueing_behaviour_analysis_winter2019_001.png b/develop/_images/sphx_glr_03r__cueing_behaviour_analysis_winter2019_001.png new file mode 100644 index 00000000..b9430de3 Binary files /dev/null and b/develop/_images/sphx_glr_03r__cueing_behaviour_analysis_winter2019_001.png differ diff --git a/develop/_images/sphx_glr_03r__cueing_behaviour_analysis_winter2019_thumb.png b/develop/_images/sphx_glr_03r__cueing_behaviour_analysis_winter2019_thumb.png new file mode 100644 index 00000000..bcd3b17e Binary files /dev/null and b/develop/_images/sphx_glr_03r__cueing_behaviour_analysis_winter2019_thumb.png differ diff --git a/develop/_images/sphx_glr_04r__cueing_group_analysis_winter2019_001.png b/develop/_images/sphx_glr_04r__cueing_group_analysis_winter2019_001.png new file mode 100644 index 00000000..e43f7c7c Binary files /dev/null and b/develop/_images/sphx_glr_04r__cueing_group_analysis_winter2019_001.png differ diff --git a/develop/_images/sphx_glr_04r__cueing_group_analysis_winter2019_002.png b/develop/_images/sphx_glr_04r__cueing_group_analysis_winter2019_002.png new file mode 100644 index 00000000..95e879f5 Binary files /dev/null and b/develop/_images/sphx_glr_04r__cueing_group_analysis_winter2019_002.png differ diff --git a/develop/_images/sphx_glr_04r__cueing_group_analysis_winter2019_003.png b/develop/_images/sphx_glr_04r__cueing_group_analysis_winter2019_003.png new file mode 100644 index 00000000..bb315746 Binary files /dev/null and b/develop/_images/sphx_glr_04r__cueing_group_analysis_winter2019_003.png differ diff --git a/develop/_images/sphx_glr_04r__cueing_group_analysis_winter2019_004.png b/develop/_images/sphx_glr_04r__cueing_group_analysis_winter2019_004.png new file mode 100644 index 00000000..be4d38da Binary files /dev/null and b/develop/_images/sphx_glr_04r__cueing_group_analysis_winter2019_004.png differ diff --git a/develop/_images/sphx_glr_04r__cueing_group_analysis_winter2019_005.png b/develop/_images/sphx_glr_04r__cueing_group_analysis_winter2019_005.png new file mode 100644 index 00000000..dfa69c36 Binary files /dev/null and b/develop/_images/sphx_glr_04r__cueing_group_analysis_winter2019_005.png differ diff --git a/develop/_images/sphx_glr_04r__cueing_group_analysis_winter2019_thumb.png b/develop/_images/sphx_glr_04r__cueing_group_analysis_winter2019_thumb.png new file mode 100644 index 00000000..65347738 Binary files /dev/null and b/develop/_images/sphx_glr_04r__cueing_group_analysis_winter2019_thumb.png differ diff --git a/develop/_images/windows_default_directory.PNG b/develop/_images/windows_default_directory.PNG new file mode 100644 index 00000000..1fe2a13c Binary files /dev/null and b/develop/_images/windows_default_directory.PNG differ diff --git a/develop/_images/windows_usb_select.PNG b/develop/_images/windows_usb_select.PNG new file mode 100644 index 00000000..7282a097 Binary files /dev/null and b/develop/_images/windows_usb_select.PNG differ diff --git a/develop/_static/_sphinx_javascript_frameworks_compat.js b/develop/_static/_sphinx_javascript_frameworks_compat.js new file mode 100644 index 00000000..81415803 --- /dev/null +++ b/develop/_static/_sphinx_javascript_frameworks_compat.js @@ -0,0 +1,123 @@ +/* Compatability shim for jQuery and underscores.js. + * + * Copyright Sphinx contributors + * Released under the two clause BSD licence + */ + +/** + * small helper function to urldecode strings + * + * See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/decodeURIComponent#Decoding_query_parameters_from_a_URL + */ +jQuery.urldecode = function(x) { + if (!x) { + return x + } + return decodeURIComponent(x.replace(/\+/g, ' ')); +}; + +/** + * small helper function to urlencode strings + */ +jQuery.urlencode = encodeURIComponent; + +/** + * This function returns the parsed url parameters of the + * current request. Multiple values per key are supported, + * it will always return arrays of strings for the value parts. + */ +jQuery.getQueryParameters = function(s) { + if (typeof s === 'undefined') + s = document.location.search; + var parts = s.substr(s.indexOf('?') + 1).split('&'); + var result = {}; + for (var i = 0; i < parts.length; i++) { + var tmp = parts[i].split('=', 2); + var key = jQuery.urldecode(tmp[0]); + var value = jQuery.urldecode(tmp[1]); + if (key in result) + result[key].push(value); + else + result[key] = [value]; + } + return result; +}; + +/** + * highlight a given string on a jquery object by wrapping it in + * span elements with the given class name. + */ +jQuery.fn.highlightText = function(text, className) { + function highlight(node, addItems) { + if (node.nodeType === 3) { + var val = node.nodeValue; + var pos = val.toLowerCase().indexOf(text); + if (pos >= 0 && + !jQuery(node.parentNode).hasClass(className) && + !jQuery(node.parentNode).hasClass("nohighlight")) { + var span; + var isInSVG = jQuery(node).closest("body, svg, foreignObject").is("svg"); + if (isInSVG) { + span = document.createElementNS("http://www.w3.org/2000/svg", "tspan"); + } else { + span = document.createElement("span"); + span.className = className; + } + span.appendChild(document.createTextNode(val.substr(pos, text.length))); + node.parentNode.insertBefore(span, node.parentNode.insertBefore( + document.createTextNode(val.substr(pos + text.length)), + node.nextSibling)); + node.nodeValue = val.substr(0, pos); + if (isInSVG) { + var rect = document.createElementNS("http://www.w3.org/2000/svg", "rect"); + var bbox = node.parentElement.getBBox(); + rect.x.baseVal.value = bbox.x; + rect.y.baseVal.value = bbox.y; + rect.width.baseVal.value = bbox.width; + rect.height.baseVal.value = bbox.height; + rect.setAttribute('class', className); + addItems.push({ + "parent": node.parentNode, + "target": rect}); + } + } + } + else if (!jQuery(node).is("button, select, textarea")) { + jQuery.each(node.childNodes, function() { + highlight(this, addItems); + }); + } + } + var addItems = []; + var result = this.each(function() { + highlight(this, addItems); + }); + for (var i = 0; i < addItems.length; ++i) { + jQuery(addItems[i].parent).before(addItems[i].target); + } + return result; +}; + +/* + * backward compatibility for jQuery.browser + * This will be supported until firefox bug is fixed. + */ +if (!jQuery.browser) { + jQuery.uaMatch = function(ua) { + ua = ua.toLowerCase(); + + var match = /(chrome)[ \/]([\w.]+)/.exec(ua) || + /(webkit)[ \/]([\w.]+)/.exec(ua) || + /(opera)(?:.*version|)[ \/]([\w.]+)/.exec(ua) || + /(msie) ([\w.]+)/.exec(ua) || + ua.indexOf("compatible") < 0 && /(mozilla)(?:.*? rv:([\w.]+)|)/.exec(ua) || + []; + + return { + browser: match[ 1 ] || "", + version: match[ 2 ] || "0" + }; + }; + jQuery.browser = {}; + jQuery.browser[jQuery.uaMatch(navigator.userAgent).browser] = true; +} diff --git a/develop/_static/basic.css b/develop/_static/basic.css new file mode 100644 index 00000000..cfc60b86 --- /dev/null +++ b/develop/_static/basic.css @@ -0,0 +1,921 @@ +/* + * basic.css + * ~~~~~~~~~ + * + * Sphinx stylesheet -- basic theme. + * + * :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +/* -- main layout ----------------------------------------------------------- */ + +div.clearer { + clear: both; +} + +div.section::after { + display: block; + content: ''; + clear: left; +} + +/* -- relbar ---------------------------------------------------------------- */ + +div.related { + width: 100%; + font-size: 90%; +} + +div.related h3 { + display: none; +} + +div.related ul { + margin: 0; + padding: 0 0 0 10px; + list-style: none; +} + +div.related li { + display: inline; +} + +div.related li.right { + float: right; + margin-right: 5px; +} + +/* -- sidebar --------------------------------------------------------------- */ + +div.sphinxsidebarwrapper { + padding: 10px 5px 0 10px; +} + +div.sphinxsidebar { + float: left; + width: 230px; + margin-left: -100%; + font-size: 90%; + word-wrap: break-word; + overflow-wrap : break-word; +} + +div.sphinxsidebar ul { + list-style: none; +} + +div.sphinxsidebar ul ul, +div.sphinxsidebar ul.want-points { + margin-left: 20px; + list-style: square; +} + +div.sphinxsidebar ul ul { + margin-top: 0; + margin-bottom: 0; +} + +div.sphinxsidebar form { + margin-top: 10px; +} + +div.sphinxsidebar input { + border: 1px solid #98dbcc; + font-family: sans-serif; + font-size: 1em; +} + +div.sphinxsidebar #searchbox form.search { + overflow: hidden; +} + +div.sphinxsidebar #searchbox input[type="text"] { + float: left; + width: 80%; + padding: 0.25em; + box-sizing: border-box; +} + +div.sphinxsidebar #searchbox input[type="submit"] { + float: left; + width: 20%; + border-left: none; + padding: 0.25em; + box-sizing: border-box; +} + + +img { + border: 0; + max-width: 100%; +} + +/* -- search page ----------------------------------------------------------- */ + +ul.search { + margin: 10px 0 0 20px; + padding: 0; +} + +ul.search li { + padding: 5px 0 5px 20px; + background-image: url(file.png); + background-repeat: no-repeat; + background-position: 0 7px; +} + +ul.search li a { + font-weight: bold; +} + +ul.search li p.context { + color: #888; + margin: 2px 0 0 30px; + text-align: left; +} + +ul.keywordmatches li.goodmatch a { + font-weight: bold; +} + +/* -- index page ------------------------------------------------------------ */ + +table.contentstable { + width: 90%; + margin-left: auto; + margin-right: auto; +} + +table.contentstable p.biglink { + line-height: 150%; +} + +a.biglink { + font-size: 1.3em; +} + +span.linkdescr { + font-style: italic; + padding-top: 5px; + font-size: 90%; +} + +/* -- general index --------------------------------------------------------- */ + +table.indextable { + width: 100%; +} + +table.indextable td { + text-align: left; + vertical-align: top; +} + +table.indextable ul { + margin-top: 0; + margin-bottom: 0; + list-style-type: none; +} + +table.indextable > tbody > tr > td > ul { + padding-left: 0em; +} + +table.indextable tr.pcap { + height: 10px; +} + +table.indextable tr.cap { + margin-top: 10px; + background-color: #f2f2f2; +} + +img.toggler { + margin-right: 3px; + margin-top: 3px; + cursor: pointer; +} + +div.modindex-jumpbox { + border-top: 1px solid #ddd; + border-bottom: 1px solid #ddd; + margin: 1em 0 1em 0; + padding: 0.4em; +} + +div.genindex-jumpbox { + border-top: 1px solid #ddd; + border-bottom: 1px solid #ddd; + margin: 1em 0 1em 0; + padding: 0.4em; +} + +/* -- domain module index --------------------------------------------------- */ + +table.modindextable td { + padding: 2px; + border-collapse: collapse; +} + +/* -- general body styles --------------------------------------------------- */ + +div.body { + min-width: 360px; + max-width: 800px; +} + +div.body p, div.body dd, div.body li, div.body blockquote { + -moz-hyphens: auto; + -ms-hyphens: auto; + -webkit-hyphens: auto; + hyphens: auto; +} + +a.headerlink { + visibility: hidden; +} + +h1:hover > a.headerlink, +h2:hover > a.headerlink, +h3:hover > a.headerlink, +h4:hover > a.headerlink, +h5:hover > a.headerlink, +h6:hover > a.headerlink, +dt:hover > a.headerlink, +caption:hover > a.headerlink, +p.caption:hover > a.headerlink, +div.code-block-caption:hover > a.headerlink { + visibility: visible; +} + +div.body p.caption { + text-align: inherit; +} + +div.body td { + text-align: left; +} + +.first { + margin-top: 0 !important; +} + +p.rubric { + margin-top: 30px; + font-weight: bold; +} + +img.align-left, figure.align-left, .figure.align-left, object.align-left { + clear: left; + float: left; + margin-right: 1em; +} + +img.align-right, figure.align-right, .figure.align-right, object.align-right { + clear: right; + float: right; + margin-left: 1em; +} + +img.align-center, figure.align-center, .figure.align-center, object.align-center { + display: block; + margin-left: auto; + margin-right: auto; +} + +img.align-default, figure.align-default, .figure.align-default { + display: block; + margin-left: auto; + margin-right: auto; +} + +.align-left { + text-align: left; +} + +.align-center { + text-align: center; +} + +.align-default { + text-align: center; +} + +.align-right { + text-align: right; +} + +/* -- sidebars -------------------------------------------------------------- */ + +div.sidebar, +aside.sidebar { + margin: 0 0 0.5em 1em; + border: 1px solid #ddb; + padding: 7px; + background-color: #ffe; + width: 40%; + float: right; + clear: right; + overflow-x: auto; +} + +p.sidebar-title { + font-weight: bold; +} + +nav.contents, +aside.topic, +div.admonition, div.topic, blockquote { + clear: left; +} + +/* -- topics ---------------------------------------------------------------- */ + +nav.contents, +aside.topic, +div.topic { + border: 1px solid #ccc; + padding: 7px; + margin: 10px 0 10px 0; +} + +p.topic-title { + font-size: 1.1em; + font-weight: bold; + margin-top: 10px; +} + +/* -- admonitions ----------------------------------------------------------- */ + +div.admonition { + margin-top: 10px; + margin-bottom: 10px; + padding: 7px; +} + +div.admonition dt { + font-weight: bold; +} + +p.admonition-title { + margin: 0px 10px 5px 0px; + font-weight: bold; +} + +div.body p.centered { + text-align: center; + margin-top: 25px; +} + +/* -- content of sidebars/topics/admonitions -------------------------------- */ + +div.sidebar > :last-child, +aside.sidebar > :last-child, +nav.contents > :last-child, +aside.topic > :last-child, +div.topic > :last-child, +div.admonition > :last-child { + margin-bottom: 0; +} + +div.sidebar::after, +aside.sidebar::after, +nav.contents::after, +aside.topic::after, +div.topic::after, +div.admonition::after, +blockquote::after { + display: block; + content: ''; + clear: both; +} + +/* -- tables ---------------------------------------------------------------- */ + +table.docutils { + margin-top: 10px; + margin-bottom: 10px; + border: 0; + border-collapse: collapse; +} + +table.align-center { + margin-left: auto; + margin-right: auto; +} + +table.align-default { + margin-left: auto; + margin-right: auto; +} + +table caption span.caption-number { + font-style: italic; +} + +table caption span.caption-text { +} + +table.docutils td, table.docutils th { + padding: 1px 8px 1px 5px; + border-top: 0; + border-left: 0; + border-right: 0; + border-bottom: 1px solid #aaa; +} + +th { + text-align: left; + padding-right: 5px; +} + +table.citation { + border-left: solid 1px gray; + margin-left: 1px; +} + +table.citation td { + border-bottom: none; +} + +th > :first-child, +td > :first-child { + margin-top: 0px; +} + +th > :last-child, +td > :last-child { + margin-bottom: 0px; +} + +/* -- figures --------------------------------------------------------------- */ + +div.figure, figure { + margin: 0.5em; + padding: 0.5em; +} + +div.figure p.caption, figcaption { + padding: 0.3em; +} + +div.figure p.caption span.caption-number, +figcaption span.caption-number { + font-style: italic; +} + +div.figure p.caption span.caption-text, +figcaption span.caption-text { +} + +/* -- field list styles ----------------------------------------------------- */ + +table.field-list td, table.field-list th { + border: 0 !important; +} + +.field-list ul { + margin: 0; + padding-left: 1em; +} + +.field-list p { + margin: 0; +} + +.field-name { + -moz-hyphens: manual; + -ms-hyphens: manual; + -webkit-hyphens: manual; + hyphens: manual; +} + +/* -- hlist styles ---------------------------------------------------------- */ + +table.hlist { + margin: 1em 0; +} + +table.hlist td { + vertical-align: top; +} + +/* -- object description styles --------------------------------------------- */ + +.sig { + font-family: 'Consolas', 'Menlo', 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', monospace; +} + +.sig-name, code.descname { + background-color: transparent; + font-weight: bold; +} + +.sig-name { + font-size: 1.1em; +} + +code.descname { + font-size: 1.2em; +} + +.sig-prename, code.descclassname { + background-color: transparent; +} + +.optional { + font-size: 1.3em; +} + +.sig-paren { + font-size: larger; +} + +.sig-param.n { + font-style: italic; +} + +/* C++ specific styling */ + +.sig-inline.c-texpr, +.sig-inline.cpp-texpr { + font-family: unset; +} + +.sig.c .k, .sig.c .kt, +.sig.cpp .k, .sig.cpp .kt { + color: #0033B3; +} + +.sig.c .m, +.sig.cpp .m { + color: #1750EB; +} + +.sig.c .s, .sig.c .sc, +.sig.cpp .s, .sig.cpp .sc { + color: #067D17; +} + + +/* -- other body styles ----------------------------------------------------- */ + +ol.arabic { + list-style: decimal; +} + +ol.loweralpha { + list-style: lower-alpha; +} + +ol.upperalpha { + list-style: upper-alpha; +} + +ol.lowerroman { + list-style: lower-roman; +} + +ol.upperroman { + list-style: upper-roman; +} + +:not(li) > ol > li:first-child > :first-child, +:not(li) > ul > li:first-child > :first-child { + margin-top: 0px; +} + +:not(li) > ol > li:last-child > :last-child, +:not(li) > ul > li:last-child > :last-child { + margin-bottom: 0px; +} + +ol.simple ol p, +ol.simple ul p, +ul.simple ol p, +ul.simple ul p { + margin-top: 0; +} + +ol.simple > li:not(:first-child) > p, +ul.simple > li:not(:first-child) > p { + margin-top: 0; +} + +ol.simple p, +ul.simple p { + margin-bottom: 0; +} + +aside.footnote > span, +div.citation > span { + float: left; +} +aside.footnote > span:last-of-type, +div.citation > span:last-of-type { + padding-right: 0.5em; +} +aside.footnote > p { + margin-left: 2em; +} +div.citation > p { + margin-left: 4em; +} +aside.footnote > p:last-of-type, +div.citation > p:last-of-type { + margin-bottom: 0em; +} +aside.footnote > p:last-of-type:after, +div.citation > p:last-of-type:after { + content: ""; + clear: both; +} + +dl.field-list { + display: grid; + grid-template-columns: fit-content(30%) auto; +} + +dl.field-list > dt { + font-weight: bold; + word-break: break-word; + padding-left: 0.5em; + padding-right: 5px; +} + +dl.field-list > dd { + padding-left: 0.5em; + margin-top: 0em; + margin-left: 0em; + margin-bottom: 0em; +} + +dl { + margin-bottom: 15px; +} + +dd > :first-child { + margin-top: 0px; +} + +dd ul, dd table { + margin-bottom: 10px; +} + +dd { + margin-top: 3px; + margin-bottom: 10px; + margin-left: 30px; +} + +.sig dd { + margin-top: 0px; + margin-bottom: 0px; +} + +.sig dl { + margin-top: 0px; + margin-bottom: 0px; +} + +dl > dd:last-child, +dl > dd:last-child > :last-child { + margin-bottom: 0; +} + +dt:target, span.highlighted { + background-color: #fbe54e; +} + +rect.highlighted { + fill: #fbe54e; +} + +dl.glossary dt { + font-weight: bold; + font-size: 1.1em; +} + +.versionmodified { + font-style: italic; +} + +.system-message { + background-color: #fda; + padding: 5px; + border: 3px solid red; +} + +.footnote:target { + background-color: #ffa; +} + +.line-block { + display: block; + margin-top: 1em; + margin-bottom: 1em; +} + +.line-block .line-block { + margin-top: 0; + margin-bottom: 0; + margin-left: 1.5em; +} + +.guilabel, .menuselection { + font-family: sans-serif; +} + +.accelerator { + text-decoration: underline; +} + +.classifier { + font-style: oblique; +} + +.classifier:before { + font-style: normal; + margin: 0 0.5em; + content: ":"; + display: inline-block; +} + +abbr, acronym { + border-bottom: dotted 1px; + cursor: help; +} + +.translated { + background-color: rgba(207, 255, 207, 0.2) +} + +.untranslated { + background-color: rgba(255, 207, 207, 0.2) +} + +/* -- code displays --------------------------------------------------------- */ + +pre { + overflow: auto; + overflow-y: hidden; /* fixes display issues on Chrome browsers */ +} + +pre, div[class*="highlight-"] { + clear: both; +} + +span.pre { + -moz-hyphens: none; + -ms-hyphens: none; + -webkit-hyphens: none; + hyphens: none; + white-space: nowrap; +} + +div[class*="highlight-"] { + margin: 1em 0; +} + +td.linenos pre { + border: 0; + background-color: transparent; + color: #aaa; +} + +table.highlighttable { + display: block; +} + +table.highlighttable tbody { + display: block; +} + +table.highlighttable tr { + display: flex; +} + +table.highlighttable td { + margin: 0; + padding: 0; +} + +table.highlighttable td.linenos { + padding-right: 0.5em; +} + +table.highlighttable td.code { + flex: 1; + overflow: hidden; +} + +.highlight .hll { + display: block; +} + +div.highlight pre, +table.highlighttable pre { + margin: 0; +} + +div.code-block-caption + div { + margin-top: 0; +} + +div.code-block-caption { + margin-top: 1em; + padding: 2px 5px; + font-size: small; +} + +div.code-block-caption code { + background-color: transparent; +} + +table.highlighttable td.linenos, +span.linenos, +div.highlight span.gp { /* gp: Generic.Prompt */ + user-select: none; + -webkit-user-select: text; /* Safari fallback only */ + -webkit-user-select: none; /* Chrome/Safari */ + -moz-user-select: none; /* Firefox */ + -ms-user-select: none; /* IE10+ */ +} + +div.code-block-caption span.caption-number { + padding: 0.1em 0.3em; + font-style: italic; +} + +div.code-block-caption span.caption-text { +} + +div.literal-block-wrapper { + margin: 1em 0; +} + +code.xref, a code { + background-color: transparent; + font-weight: bold; +} + +h1 code, h2 code, h3 code, h4 code, h5 code, h6 code { + background-color: transparent; +} + +.viewcode-link { + float: right; +} + +.viewcode-back { + float: right; + font-family: sans-serif; +} + +div.viewcode-block:target { + margin: -1px -10px; + padding: 0 10px; +} + +/* -- math display ---------------------------------------------------------- */ + +img.math { + vertical-align: middle; +} + +div.body div.math p { + text-align: center; +} + +span.eqno { + float: right; +} + +span.eqno a.headerlink { + position: absolute; + z-index: 1; +} + +div.math:hover a.headerlink { + visibility: visible; +} + +/* -- printout stylesheet --------------------------------------------------- */ + +@media print { + div.document, + div.documentwrapper, + div.bodywrapper { + margin: 0 !important; + width: 100%; + } + + div.sphinxsidebar, + div.related, + div.footer, + #top-link { + display: none; + } +} \ No newline at end of file diff --git a/develop/_static/binder_badge_logo.svg b/develop/_static/binder_badge_logo.svg new file mode 100644 index 00000000..327f6b63 --- /dev/null +++ b/develop/_static/binder_badge_logo.svg @@ -0,0 +1 @@ + launchlaunchbinderbinder \ No newline at end of file diff --git a/develop/_static/broken_example.png b/develop/_static/broken_example.png new file mode 100644 index 00000000..4fea24e7 Binary files /dev/null and b/develop/_static/broken_example.png differ diff --git a/develop/_static/check-solid.svg b/develop/_static/check-solid.svg new file mode 100644 index 00000000..92fad4b5 --- /dev/null +++ b/develop/_static/check-solid.svg @@ -0,0 +1,4 @@ + + + + diff --git a/develop/_static/clipboard.min.js b/develop/_static/clipboard.min.js new file mode 100644 index 00000000..54b3c463 --- /dev/null +++ b/develop/_static/clipboard.min.js @@ -0,0 +1,7 @@ +/*! + * clipboard.js v2.0.8 + * https://clipboardjs.com/ + * + * Licensed MIT © Zeno Rocha + */ +!function(t,e){"object"==typeof exports&&"object"==typeof module?module.exports=e():"function"==typeof define&&define.amd?define([],e):"object"==typeof exports?exports.ClipboardJS=e():t.ClipboardJS=e()}(this,function(){return n={686:function(t,e,n){"use strict";n.d(e,{default:function(){return o}});var e=n(279),i=n.n(e),e=n(370),u=n.n(e),e=n(817),c=n.n(e);function a(t){try{return document.execCommand(t)}catch(t){return}}var f=function(t){t=c()(t);return a("cut"),t};var l=function(t){var e,n,o,r=1 + + + + diff --git a/develop/_static/copybutton.css b/develop/_static/copybutton.css new file mode 100644 index 00000000..f1916ec7 --- /dev/null +++ b/develop/_static/copybutton.css @@ -0,0 +1,94 @@ +/* Copy buttons */ +button.copybtn { + position: absolute; + display: flex; + top: .3em; + right: .3em; + width: 1.7em; + height: 1.7em; + opacity: 0; + transition: opacity 0.3s, border .3s, background-color .3s; + user-select: none; + padding: 0; + border: none; + outline: none; + border-radius: 0.4em; + /* The colors that GitHub uses */ + border: #1b1f2426 1px solid; + background-color: #f6f8fa; + color: #57606a; +} + +button.copybtn.success { + border-color: #22863a; + color: #22863a; +} + +button.copybtn svg { + stroke: currentColor; + width: 1.5em; + height: 1.5em; + padding: 0.1em; +} + +div.highlight { + position: relative; +} + +/* Show the copybutton */ +.highlight:hover button.copybtn, button.copybtn.success { + opacity: 1; +} + +.highlight button.copybtn:hover { + background-color: rgb(235, 235, 235); +} + +.highlight button.copybtn:active { + background-color: rgb(187, 187, 187); +} + +/** + * A minimal CSS-only tooltip copied from: + * https://codepen.io/mildrenben/pen/rVBrpK + * + * To use, write HTML like the following: + * + *

Short

+ */ + .o-tooltip--left { + position: relative; + } + + .o-tooltip--left:after { + opacity: 0; + visibility: hidden; + position: absolute; + content: attr(data-tooltip); + padding: .2em; + font-size: .8em; + left: -.2em; + background: grey; + color: white; + white-space: nowrap; + z-index: 2; + border-radius: 2px; + transform: translateX(-102%) translateY(0); + transition: opacity 0.2s cubic-bezier(0.64, 0.09, 0.08, 1), transform 0.2s cubic-bezier(0.64, 0.09, 0.08, 1); +} + +.o-tooltip--left:hover:after { + display: block; + opacity: 1; + visibility: visible; + transform: translateX(-100%) translateY(0); + transition: opacity 0.2s cubic-bezier(0.64, 0.09, 0.08, 1), transform 0.2s cubic-bezier(0.64, 0.09, 0.08, 1); + transition-delay: .5s; +} + +/* By default the copy button shouldn't show up when printing a page */ +@media print { + button.copybtn { + display: none; + } +} diff --git a/develop/_static/copybutton.js b/develop/_static/copybutton.js new file mode 100644 index 00000000..d06852f6 --- /dev/null +++ b/develop/_static/copybutton.js @@ -0,0 +1,248 @@ +// Localization support +const messages = { + 'en': { + 'copy': 'Copy', + 'copy_to_clipboard': 'Copy to clipboard', + 'copy_success': 'Copied!', + 'copy_failure': 'Failed to copy', + }, + 'es' : { + 'copy': 'Copiar', + 'copy_to_clipboard': 'Copiar al portapapeles', + 'copy_success': '¡Copiado!', + 'copy_failure': 'Error al copiar', + }, + 'de' : { + 'copy': 'Kopieren', + 'copy_to_clipboard': 'In die Zwischenablage kopieren', + 'copy_success': 'Kopiert!', + 'copy_failure': 'Fehler beim Kopieren', + }, + 'fr' : { + 'copy': 'Copier', + 'copy_to_clipboard': 'Copier dans le presse-papier', + 'copy_success': 'Copié !', + 'copy_failure': 'Échec de la copie', + }, + 'ru': { + 'copy': 'Скопировать', + 'copy_to_clipboard': 'Скопировать в буфер', + 'copy_success': 'Скопировано!', + 'copy_failure': 'Не удалось скопировать', + }, + 'zh-CN': { + 'copy': '复制', + 'copy_to_clipboard': '复制到剪贴板', + 'copy_success': '复制成功!', + 'copy_failure': '复制失败', + }, + 'it' : { + 'copy': 'Copiare', + 'copy_to_clipboard': 'Copiato negli appunti', + 'copy_success': 'Copiato!', + 'copy_failure': 'Errore durante la copia', + } +} + +let locale = 'en' +if( document.documentElement.lang !== undefined + && messages[document.documentElement.lang] !== undefined ) { + locale = document.documentElement.lang +} + +let doc_url_root = DOCUMENTATION_OPTIONS.URL_ROOT; +if (doc_url_root == '#') { + doc_url_root = ''; +} + +/** + * SVG files for our copy buttons + */ +let iconCheck = ` + ${messages[locale]['copy_success']} + + +` + +// If the user specified their own SVG use that, otherwise use the default +let iconCopy = ``; +if (!iconCopy) { + iconCopy = ` + ${messages[locale]['copy_to_clipboard']} + + + +` +} + +/** + * Set up copy/paste for code blocks + */ + +const runWhenDOMLoaded = cb => { + if (document.readyState != 'loading') { + cb() + } else if (document.addEventListener) { + document.addEventListener('DOMContentLoaded', cb) + } else { + document.attachEvent('onreadystatechange', function() { + if (document.readyState == 'complete') cb() + }) + } +} + +const codeCellId = index => `codecell${index}` + +// Clears selected text since ClipboardJS will select the text when copying +const clearSelection = () => { + if (window.getSelection) { + window.getSelection().removeAllRanges() + } else if (document.selection) { + document.selection.empty() + } +} + +// Changes tooltip text for a moment, then changes it back +// We want the timeout of our `success` class to be a bit shorter than the +// tooltip and icon change, so that we can hide the icon before changing back. +var timeoutIcon = 2000; +var timeoutSuccessClass = 1500; + +const temporarilyChangeTooltip = (el, oldText, newText) => { + el.setAttribute('data-tooltip', newText) + el.classList.add('success') + // Remove success a little bit sooner than we change the tooltip + // So that we can use CSS to hide the copybutton first + setTimeout(() => el.classList.remove('success'), timeoutSuccessClass) + setTimeout(() => el.setAttribute('data-tooltip', oldText), timeoutIcon) +} + +// Changes the copy button icon for two seconds, then changes it back +const temporarilyChangeIcon = (el) => { + el.innerHTML = iconCheck; + setTimeout(() => {el.innerHTML = iconCopy}, timeoutIcon) +} + +const addCopyButtonToCodeCells = () => { + // If ClipboardJS hasn't loaded, wait a bit and try again. This + // happens because we load ClipboardJS asynchronously. + if (window.ClipboardJS === undefined) { + setTimeout(addCopyButtonToCodeCells, 250) + return + } + + // Add copybuttons to all of our code cells + const COPYBUTTON_SELECTOR = 'div.highlight pre'; + const codeCells = document.querySelectorAll(COPYBUTTON_SELECTOR) + codeCells.forEach((codeCell, index) => { + const id = codeCellId(index) + codeCell.setAttribute('id', id) + + const clipboardButton = id => + `` + codeCell.insertAdjacentHTML('afterend', clipboardButton(id)) + }) + +function escapeRegExp(string) { + return string.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); // $& means the whole matched string +} + +/** + * Removes excluded text from a Node. + * + * @param {Node} target Node to filter. + * @param {string} exclude CSS selector of nodes to exclude. + * @returns {DOMString} Text from `target` with text removed. + */ +function filterText(target, exclude) { + const clone = target.cloneNode(true); // clone as to not modify the live DOM + if (exclude) { + // remove excluded nodes + clone.querySelectorAll(exclude).forEach(node => node.remove()); + } + return clone.innerText; +} + +// Callback when a copy button is clicked. Will be passed the node that was clicked +// should then grab the text and replace pieces of text that shouldn't be used in output +function formatCopyText(textContent, copybuttonPromptText, isRegexp = false, onlyCopyPromptLines = true, removePrompts = true, copyEmptyLines = true, lineContinuationChar = "", hereDocDelim = "") { + var regexp; + var match; + + // Do we check for line continuation characters and "HERE-documents"? + var useLineCont = !!lineContinuationChar + var useHereDoc = !!hereDocDelim + + // create regexp to capture prompt and remaining line + if (isRegexp) { + regexp = new RegExp('^(' + copybuttonPromptText + ')(.*)') + } else { + regexp = new RegExp('^(' + escapeRegExp(copybuttonPromptText) + ')(.*)') + } + + const outputLines = []; + var promptFound = false; + var gotLineCont = false; + var gotHereDoc = false; + const lineGotPrompt = []; + for (const line of textContent.split('\n')) { + match = line.match(regexp) + if (match || gotLineCont || gotHereDoc) { + promptFound = regexp.test(line) + lineGotPrompt.push(promptFound) + if (removePrompts && promptFound) { + outputLines.push(match[2]) + } else { + outputLines.push(line) + } + gotLineCont = line.endsWith(lineContinuationChar) & useLineCont + if (line.includes(hereDocDelim) & useHereDoc) + gotHereDoc = !gotHereDoc + } else if (!onlyCopyPromptLines) { + outputLines.push(line) + } else if (copyEmptyLines && line.trim() === '') { + outputLines.push(line) + } + } + + // If no lines with the prompt were found then just use original lines + if (lineGotPrompt.some(v => v === true)) { + textContent = outputLines.join('\n'); + } + + // Remove a trailing newline to avoid auto-running when pasting + if (textContent.endsWith("\n")) { + textContent = textContent.slice(0, -1) + } + return textContent +} + + +var copyTargetText = (trigger) => { + var target = document.querySelector(trigger.attributes['data-clipboard-target'].value); + + // get filtered text + let exclude = '.linenos'; + + let text = filterText(target, exclude); + return formatCopyText(text, '$ ', false, true, true, true, '', '') +} + + // Initialize with a callback so we can modify the text before copy + const clipboard = new ClipboardJS('.copybtn', {text: copyTargetText}) + + // Update UI with error/success messages + clipboard.on('success', event => { + clearSelection() + temporarilyChangeTooltip(event.trigger, messages[locale]['copy'], messages[locale]['copy_success']) + temporarilyChangeIcon(event.trigger) + }) + + clipboard.on('error', event => { + temporarilyChangeTooltip(event.trigger, messages[locale]['copy'], messages[locale]['copy_failure']) + }) +} + +runWhenDOMLoaded(addCopyButtonToCodeCells) \ No newline at end of file diff --git a/develop/_static/copybutton_funcs.js b/develop/_static/copybutton_funcs.js new file mode 100644 index 00000000..dbe1aaad --- /dev/null +++ b/develop/_static/copybutton_funcs.js @@ -0,0 +1,73 @@ +function escapeRegExp(string) { + return string.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); // $& means the whole matched string +} + +/** + * Removes excluded text from a Node. + * + * @param {Node} target Node to filter. + * @param {string} exclude CSS selector of nodes to exclude. + * @returns {DOMString} Text from `target` with text removed. + */ +export function filterText(target, exclude) { + const clone = target.cloneNode(true); // clone as to not modify the live DOM + if (exclude) { + // remove excluded nodes + clone.querySelectorAll(exclude).forEach(node => node.remove()); + } + return clone.innerText; +} + +// Callback when a copy button is clicked. Will be passed the node that was clicked +// should then grab the text and replace pieces of text that shouldn't be used in output +export function formatCopyText(textContent, copybuttonPromptText, isRegexp = false, onlyCopyPromptLines = true, removePrompts = true, copyEmptyLines = true, lineContinuationChar = "", hereDocDelim = "") { + var regexp; + var match; + + // Do we check for line continuation characters and "HERE-documents"? + var useLineCont = !!lineContinuationChar + var useHereDoc = !!hereDocDelim + + // create regexp to capture prompt and remaining line + if (isRegexp) { + regexp = new RegExp('^(' + copybuttonPromptText + ')(.*)') + } else { + regexp = new RegExp('^(' + escapeRegExp(copybuttonPromptText) + ')(.*)') + } + + const outputLines = []; + var promptFound = false; + var gotLineCont = false; + var gotHereDoc = false; + const lineGotPrompt = []; + for (const line of textContent.split('\n')) { + match = line.match(regexp) + if (match || gotLineCont || gotHereDoc) { + promptFound = regexp.test(line) + lineGotPrompt.push(promptFound) + if (removePrompts && promptFound) { + outputLines.push(match[2]) + } else { + outputLines.push(line) + } + gotLineCont = line.endsWith(lineContinuationChar) & useLineCont + if (line.includes(hereDocDelim) & useHereDoc) + gotHereDoc = !gotHereDoc + } else if (!onlyCopyPromptLines) { + outputLines.push(line) + } else if (copyEmptyLines && line.trim() === '') { + outputLines.push(line) + } + } + + // If no lines with the prompt were found then just use original lines + if (lineGotPrompt.some(v => v === true)) { + textContent = outputLines.join('\n'); + } + + // Remove a trailing newline to avoid auto-running when pasting + if (textContent.endsWith("\n")) { + textContent = textContent.slice(0, -1) + } + return textContent +} diff --git a/develop/_static/css/badge_only.css b/develop/_static/css/badge_only.css new file mode 100644 index 00000000..c718cee4 --- /dev/null +++ b/develop/_static/css/badge_only.css @@ -0,0 +1 @@ +.clearfix{*zoom:1}.clearfix:after,.clearfix:before{display:table;content:""}.clearfix:after{clear:both}@font-face{font-family:FontAwesome;font-style:normal;font-weight:400;src:url(fonts/fontawesome-webfont.eot?674f50d287a8c48dc19ba404d20fe713?#iefix) format("embedded-opentype"),url(fonts/fontawesome-webfont.woff2?af7ae505a9eed503f8b8e6982036873e) format("woff2"),url(fonts/fontawesome-webfont.woff?fee66e712a8a08eef5805a46892932ad) format("woff"),url(fonts/fontawesome-webfont.ttf?b06871f281fee6b241d60582ae9369b9) format("truetype"),url(fonts/fontawesome-webfont.svg?912ec66d7572ff821749319396470bde#FontAwesome) format("svg")}.fa:before{font-family:FontAwesome;font-style:normal;font-weight:400;line-height:1}.fa:before,a .fa{text-decoration:inherit}.fa:before,a .fa,li .fa{display:inline-block}li .fa-large:before{width:1.875em}ul.fas{list-style-type:none;margin-left:2em;text-indent:-.8em}ul.fas li .fa{width:.8em}ul.fas li .fa-large:before{vertical-align:baseline}.fa-book:before,.icon-book:before{content:"\f02d"}.fa-caret-down:before,.icon-caret-down:before{content:"\f0d7"}.fa-caret-up:before,.icon-caret-up:before{content:"\f0d8"}.fa-caret-left:before,.icon-caret-left:before{content:"\f0d9"}.fa-caret-right:before,.icon-caret-right:before{content:"\f0da"}.rst-versions{position:fixed;bottom:0;left:0;width:300px;color:#fcfcfc;background:#1f1d1d;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;z-index:400}.rst-versions a{color:#2980b9;text-decoration:none}.rst-versions .rst-badge-small{display:none}.rst-versions .rst-current-version{padding:12px;background-color:#272525;display:block;text-align:right;font-size:90%;cursor:pointer;color:#27ae60}.rst-versions .rst-current-version:after{clear:both;content:"";display:block}.rst-versions .rst-current-version .fa{color:#fcfcfc}.rst-versions .rst-current-version .fa-book,.rst-versions .rst-current-version .icon-book{float:left}.rst-versions .rst-current-version.rst-out-of-date{background-color:#e74c3c;color:#fff}.rst-versions .rst-current-version.rst-active-old-version{background-color:#f1c40f;color:#000}.rst-versions.shift-up{height:auto;max-height:100%;overflow-y:scroll}.rst-versions.shift-up .rst-other-versions{display:block}.rst-versions .rst-other-versions{font-size:90%;padding:12px;color:grey;display:none}.rst-versions .rst-other-versions hr{display:block;height:1px;border:0;margin:20px 0;padding:0;border-top:1px solid #413d3d}.rst-versions .rst-other-versions dd{display:inline-block;margin:0}.rst-versions .rst-other-versions dd a{display:inline-block;padding:6px;color:#fcfcfc}.rst-versions.rst-badge{width:auto;bottom:20px;right:20px;left:auto;border:none;max-width:300px;max-height:90%}.rst-versions.rst-badge .fa-book,.rst-versions.rst-badge .icon-book{float:none;line-height:30px}.rst-versions.rst-badge.shift-up .rst-current-version{text-align:right}.rst-versions.rst-badge.shift-up .rst-current-version .fa-book,.rst-versions.rst-badge.shift-up .rst-current-version .icon-book{float:left}.rst-versions.rst-badge>.rst-current-version{width:auto;height:30px;line-height:30px;padding:0 6px;display:block;text-align:center}@media screen and (max-width:768px){.rst-versions{width:85%;display:none}.rst-versions.shift{display:block}} \ No newline at end of file diff --git a/develop/_static/css/fonts/Roboto-Slab-Bold.woff b/develop/_static/css/fonts/Roboto-Slab-Bold.woff new file mode 100644 index 00000000..6cb60000 Binary files /dev/null and b/develop/_static/css/fonts/Roboto-Slab-Bold.woff differ diff --git a/develop/_static/css/fonts/Roboto-Slab-Bold.woff2 b/develop/_static/css/fonts/Roboto-Slab-Bold.woff2 new file mode 100644 index 00000000..7059e231 Binary files /dev/null and b/develop/_static/css/fonts/Roboto-Slab-Bold.woff2 differ diff --git a/develop/_static/css/fonts/Roboto-Slab-Regular.woff b/develop/_static/css/fonts/Roboto-Slab-Regular.woff new file mode 100644 index 00000000..f815f63f Binary files /dev/null and b/develop/_static/css/fonts/Roboto-Slab-Regular.woff differ diff --git a/develop/_static/css/fonts/Roboto-Slab-Regular.woff2 b/develop/_static/css/fonts/Roboto-Slab-Regular.woff2 new file mode 100644 index 00000000..f2c76e5b Binary files /dev/null and b/develop/_static/css/fonts/Roboto-Slab-Regular.woff2 differ diff --git a/develop/_static/css/fonts/fontawesome-webfont.eot b/develop/_static/css/fonts/fontawesome-webfont.eot new file mode 100644 index 00000000..e9f60ca9 Binary files /dev/null and b/develop/_static/css/fonts/fontawesome-webfont.eot differ diff --git a/develop/_static/css/fonts/fontawesome-webfont.svg b/develop/_static/css/fonts/fontawesome-webfont.svg new file mode 100644 index 00000000..855c845e --- /dev/null +++ b/develop/_static/css/fonts/fontawesome-webfont.svg @@ -0,0 +1,2671 @@ + + + + +Created by FontForge 20120731 at Mon Oct 24 17:37:40 2016 + By ,,, +Copyright Dave Gandy 2016. All rights reserved. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/develop/_static/css/fonts/fontawesome-webfont.ttf b/develop/_static/css/fonts/fontawesome-webfont.ttf new file mode 100644 index 00000000..35acda2f Binary files /dev/null and b/develop/_static/css/fonts/fontawesome-webfont.ttf differ diff --git a/develop/_static/css/fonts/fontawesome-webfont.woff b/develop/_static/css/fonts/fontawesome-webfont.woff new file mode 100644 index 00000000..400014a4 Binary files /dev/null and b/develop/_static/css/fonts/fontawesome-webfont.woff differ diff --git a/develop/_static/css/fonts/fontawesome-webfont.woff2 b/develop/_static/css/fonts/fontawesome-webfont.woff2 new file mode 100644 index 00000000..4d13fc60 Binary files /dev/null and b/develop/_static/css/fonts/fontawesome-webfont.woff2 differ diff --git a/develop/_static/css/fonts/lato-bold-italic.woff b/develop/_static/css/fonts/lato-bold-italic.woff new file mode 100644 index 00000000..88ad05b9 Binary files /dev/null and b/develop/_static/css/fonts/lato-bold-italic.woff differ diff --git a/develop/_static/css/fonts/lato-bold-italic.woff2 b/develop/_static/css/fonts/lato-bold-italic.woff2 new file mode 100644 index 00000000..c4e3d804 Binary files /dev/null and b/develop/_static/css/fonts/lato-bold-italic.woff2 differ diff --git a/develop/_static/css/fonts/lato-bold.woff b/develop/_static/css/fonts/lato-bold.woff new file mode 100644 index 00000000..c6dff51f Binary files /dev/null and b/develop/_static/css/fonts/lato-bold.woff differ diff --git a/develop/_static/css/fonts/lato-bold.woff2 b/develop/_static/css/fonts/lato-bold.woff2 new file mode 100644 index 00000000..bb195043 Binary files /dev/null and b/develop/_static/css/fonts/lato-bold.woff2 differ diff --git a/develop/_static/css/fonts/lato-normal-italic.woff b/develop/_static/css/fonts/lato-normal-italic.woff new file mode 100644 index 00000000..76114bc0 Binary files /dev/null and b/develop/_static/css/fonts/lato-normal-italic.woff differ diff --git a/develop/_static/css/fonts/lato-normal-italic.woff2 b/develop/_static/css/fonts/lato-normal-italic.woff2 new file mode 100644 index 00000000..3404f37e Binary files /dev/null and b/develop/_static/css/fonts/lato-normal-italic.woff2 differ diff --git a/develop/_static/css/fonts/lato-normal.woff b/develop/_static/css/fonts/lato-normal.woff new file mode 100644 index 00000000..ae1307ff Binary files /dev/null and b/develop/_static/css/fonts/lato-normal.woff differ diff --git a/develop/_static/css/fonts/lato-normal.woff2 b/develop/_static/css/fonts/lato-normal.woff2 new file mode 100644 index 00000000..3bf98433 Binary files /dev/null and b/develop/_static/css/fonts/lato-normal.woff2 differ diff --git a/develop/_static/css/theme.css b/develop/_static/css/theme.css new file mode 100644 index 00000000..19a446a0 --- /dev/null +++ b/develop/_static/css/theme.css @@ -0,0 +1,4 @@ +html{box-sizing:border-box}*,:after,:before{box-sizing:inherit}article,aside,details,figcaption,figure,footer,header,hgroup,nav,section{display:block}audio,canvas,video{display:inline-block;*display:inline;*zoom:1}[hidden],audio:not([controls]){display:none}*{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}html{font-size:100%;-webkit-text-size-adjust:100%;-ms-text-size-adjust:100%}body{margin:0}a:active,a:hover{outline:0}abbr[title]{border-bottom:1px dotted}b,strong{font-weight:700}blockquote{margin:0}dfn{font-style:italic}ins{background:#ff9;text-decoration:none}ins,mark{color:#000}mark{background:#ff0;font-style:italic;font-weight:700}.rst-content code,.rst-content tt,code,kbd,pre,samp{font-family:monospace,serif;_font-family:courier new,monospace;font-size:1em}pre{white-space:pre}q{quotes:none}q:after,q:before{content:"";content:none}small{font-size:85%}sub,sup{font-size:75%;line-height:0;position:relative;vertical-align:baseline}sup{top:-.5em}sub{bottom:-.25em}dl,ol,ul{margin:0;padding:0;list-style:none;list-style-image:none}li{list-style:none}dd{margin:0}img{border:0;-ms-interpolation-mode:bicubic;vertical-align:middle;max-width:100%}svg:not(:root){overflow:hidden}figure,form{margin:0}label{cursor:pointer}button,input,select,textarea{font-size:100%;margin:0;vertical-align:baseline;*vertical-align:middle}button,input{line-height:normal}button,input[type=button],input[type=reset],input[type=submit]{cursor:pointer;-webkit-appearance:button;*overflow:visible}button[disabled],input[disabled]{cursor:default}input[type=search]{-webkit-appearance:textfield;-moz-box-sizing:content-box;-webkit-box-sizing:content-box;box-sizing:content-box}textarea{resize:vertical}table{border-collapse:collapse;border-spacing:0}td{vertical-align:top}.chromeframe{margin:.2em 0;background:#ccc;color:#000;padding:.2em 0}.ir{display:block;border:0;text-indent:-999em;overflow:hidden;background-color:transparent;background-repeat:no-repeat;text-align:left;direction:ltr;*line-height:0}.ir br{display:none}.hidden{display:none!important;visibility:hidden}.visuallyhidden{border:0;clip:rect(0 0 0 0);height:1px;margin:-1px;overflow:hidden;padding:0;position:absolute;width:1px}.visuallyhidden.focusable:active,.visuallyhidden.focusable:focus{clip:auto;height:auto;margin:0;overflow:visible;position:static;width:auto}.invisible{visibility:hidden}.relative{position:relative}big,small{font-size:100%}@media print{body,html,section{background:none!important}*{box-shadow:none!important;text-shadow:none!important;filter:none!important;-ms-filter:none!important}a,a:visited{text-decoration:underline}.ir a:after,a[href^="#"]:after,a[href^="javascript:"]:after{content:""}blockquote,pre{page-break-inside:avoid}thead{display:table-header-group}img,tr{page-break-inside:avoid}img{max-width:100%!important}@page{margin:.5cm}.rst-content .toctree-wrapper>p.caption,h2,h3,p{orphans:3;widows:3}.rst-content .toctree-wrapper>p.caption,h2,h3{page-break-after:avoid}}.btn,.fa:before,.icon:before,.rst-content .admonition,.rst-content .admonition-title:before,.rst-content .admonition-todo,.rst-content .attention,.rst-content .caution,.rst-content .code-block-caption .headerlink:before,.rst-content .danger,.rst-content .eqno .headerlink:before,.rst-content .error,.rst-content .hint,.rst-content .important,.rst-content .note,.rst-content .seealso,.rst-content .tip,.rst-content .warning,.rst-content code.download span:first-child:before,.rst-content dl dt .headerlink:before,.rst-content h1 .headerlink:before,.rst-content h2 .headerlink:before,.rst-content h3 .headerlink:before,.rst-content h4 .headerlink:before,.rst-content h5 .headerlink:before,.rst-content h6 .headerlink:before,.rst-content p.caption .headerlink:before,.rst-content p .headerlink:before,.rst-content table>caption .headerlink:before,.rst-content tt.download span:first-child:before,.wy-alert,.wy-dropdown .caret:before,.wy-inline-validate.wy-inline-validate-danger .wy-input-context:before,.wy-inline-validate.wy-inline-validate-info .wy-input-context:before,.wy-inline-validate.wy-inline-validate-success .wy-input-context:before,.wy-inline-validate.wy-inline-validate-warning .wy-input-context:before,.wy-menu-vertical li.current>a button.toctree-expand:before,.wy-menu-vertical li.on a button.toctree-expand:before,.wy-menu-vertical li button.toctree-expand:before,input[type=color],input[type=date],input[type=datetime-local],input[type=datetime],input[type=email],input[type=month],input[type=number],input[type=password],input[type=search],input[type=tel],input[type=text],input[type=time],input[type=url],input[type=week],select,textarea{-webkit-font-smoothing:antialiased}.clearfix{*zoom:1}.clearfix:after,.clearfix:before{display:table;content:""}.clearfix:after{clear:both}/*! + * Font Awesome 4.7.0 by @davegandy - http://fontawesome.io - @fontawesome + * License - http://fontawesome.io/license (Font: SIL OFL 1.1, CSS: MIT License) + */@font-face{font-family:FontAwesome;src:url(fonts/fontawesome-webfont.eot?674f50d287a8c48dc19ba404d20fe713);src:url(fonts/fontawesome-webfont.eot?674f50d287a8c48dc19ba404d20fe713?#iefix&v=4.7.0) format("embedded-opentype"),url(fonts/fontawesome-webfont.woff2?af7ae505a9eed503f8b8e6982036873e) format("woff2"),url(fonts/fontawesome-webfont.woff?fee66e712a8a08eef5805a46892932ad) format("woff"),url(fonts/fontawesome-webfont.ttf?b06871f281fee6b241d60582ae9369b9) format("truetype"),url(fonts/fontawesome-webfont.svg?912ec66d7572ff821749319396470bde#fontawesomeregular) format("svg");font-weight:400;font-style:normal}.fa,.icon,.rst-content .admonition-title,.rst-content .code-block-caption .headerlink,.rst-content .eqno .headerlink,.rst-content code.download span:first-child,.rst-content dl dt .headerlink,.rst-content h1 .headerlink,.rst-content h2 .headerlink,.rst-content h3 .headerlink,.rst-content h4 .headerlink,.rst-content h5 .headerlink,.rst-content h6 .headerlink,.rst-content p.caption .headerlink,.rst-content p .headerlink,.rst-content table>caption .headerlink,.rst-content tt.download span:first-child,.wy-menu-vertical li.current>a button.toctree-expand,.wy-menu-vertical li.on a button.toctree-expand,.wy-menu-vertical li button.toctree-expand{display:inline-block;font:normal normal normal 14px/1 FontAwesome;font-size:inherit;text-rendering:auto;-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale}.fa-lg{font-size:1.33333em;line-height:.75em;vertical-align:-15%}.fa-2x{font-size:2em}.fa-3x{font-size:3em}.fa-4x{font-size:4em}.fa-5x{font-size:5em}.fa-fw{width:1.28571em;text-align:center}.fa-ul{padding-left:0;margin-left:2.14286em;list-style-type:none}.fa-ul>li{position:relative}.fa-li{position:absolute;left:-2.14286em;width:2.14286em;top:.14286em;text-align:center}.fa-li.fa-lg{left:-1.85714em}.fa-border{padding:.2em .25em .15em;border:.08em solid #eee;border-radius:.1em}.fa-pull-left{float:left}.fa-pull-right{float:right}.fa-pull-left.icon,.fa.fa-pull-left,.rst-content .code-block-caption .fa-pull-left.headerlink,.rst-content .eqno .fa-pull-left.headerlink,.rst-content .fa-pull-left.admonition-title,.rst-content code.download span.fa-pull-left:first-child,.rst-content dl dt .fa-pull-left.headerlink,.rst-content h1 .fa-pull-left.headerlink,.rst-content h2 .fa-pull-left.headerlink,.rst-content h3 .fa-pull-left.headerlink,.rst-content h4 .fa-pull-left.headerlink,.rst-content h5 .fa-pull-left.headerlink,.rst-content h6 .fa-pull-left.headerlink,.rst-content p .fa-pull-left.headerlink,.rst-content table>caption .fa-pull-left.headerlink,.rst-content tt.download span.fa-pull-left:first-child,.wy-menu-vertical li.current>a button.fa-pull-left.toctree-expand,.wy-menu-vertical li.on a button.fa-pull-left.toctree-expand,.wy-menu-vertical li button.fa-pull-left.toctree-expand{margin-right:.3em}.fa-pull-right.icon,.fa.fa-pull-right,.rst-content .code-block-caption .fa-pull-right.headerlink,.rst-content .eqno .fa-pull-right.headerlink,.rst-content .fa-pull-right.admonition-title,.rst-content code.download span.fa-pull-right:first-child,.rst-content dl dt .fa-pull-right.headerlink,.rst-content h1 .fa-pull-right.headerlink,.rst-content h2 .fa-pull-right.headerlink,.rst-content h3 .fa-pull-right.headerlink,.rst-content h4 .fa-pull-right.headerlink,.rst-content h5 .fa-pull-right.headerlink,.rst-content h6 .fa-pull-right.headerlink,.rst-content p .fa-pull-right.headerlink,.rst-content table>caption .fa-pull-right.headerlink,.rst-content tt.download span.fa-pull-right:first-child,.wy-menu-vertical li.current>a button.fa-pull-right.toctree-expand,.wy-menu-vertical li.on a button.fa-pull-right.toctree-expand,.wy-menu-vertical li button.fa-pull-right.toctree-expand{margin-left:.3em}.pull-right{float:right}.pull-left{float:left}.fa.pull-left,.pull-left.icon,.rst-content .code-block-caption .pull-left.headerlink,.rst-content .eqno .pull-left.headerlink,.rst-content .pull-left.admonition-title,.rst-content code.download span.pull-left:first-child,.rst-content dl dt .pull-left.headerlink,.rst-content h1 .pull-left.headerlink,.rst-content h2 .pull-left.headerlink,.rst-content h3 .pull-left.headerlink,.rst-content h4 .pull-left.headerlink,.rst-content h5 .pull-left.headerlink,.rst-content h6 .pull-left.headerlink,.rst-content p .pull-left.headerlink,.rst-content table>caption .pull-left.headerlink,.rst-content tt.download span.pull-left:first-child,.wy-menu-vertical li.current>a button.pull-left.toctree-expand,.wy-menu-vertical li.on a button.pull-left.toctree-expand,.wy-menu-vertical li button.pull-left.toctree-expand{margin-right:.3em}.fa.pull-right,.pull-right.icon,.rst-content .code-block-caption .pull-right.headerlink,.rst-content .eqno .pull-right.headerlink,.rst-content .pull-right.admonition-title,.rst-content code.download span.pull-right:first-child,.rst-content dl dt .pull-right.headerlink,.rst-content h1 .pull-right.headerlink,.rst-content h2 .pull-right.headerlink,.rst-content h3 .pull-right.headerlink,.rst-content h4 .pull-right.headerlink,.rst-content h5 .pull-right.headerlink,.rst-content h6 .pull-right.headerlink,.rst-content p .pull-right.headerlink,.rst-content table>caption .pull-right.headerlink,.rst-content tt.download span.pull-right:first-child,.wy-menu-vertical li.current>a button.pull-right.toctree-expand,.wy-menu-vertical li.on a button.pull-right.toctree-expand,.wy-menu-vertical li button.pull-right.toctree-expand{margin-left:.3em}.fa-spin{-webkit-animation:fa-spin 2s linear infinite;animation:fa-spin 2s linear infinite}.fa-pulse{-webkit-animation:fa-spin 1s steps(8) infinite;animation:fa-spin 1s steps(8) infinite}@-webkit-keyframes fa-spin{0%{-webkit-transform:rotate(0deg);transform:rotate(0deg)}to{-webkit-transform:rotate(359deg);transform:rotate(359deg)}}@keyframes fa-spin{0%{-webkit-transform:rotate(0deg);transform:rotate(0deg)}to{-webkit-transform:rotate(359deg);transform:rotate(359deg)}}.fa-rotate-90{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=1)";-webkit-transform:rotate(90deg);-ms-transform:rotate(90deg);transform:rotate(90deg)}.fa-rotate-180{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=2)";-webkit-transform:rotate(180deg);-ms-transform:rotate(180deg);transform:rotate(180deg)}.fa-rotate-270{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=3)";-webkit-transform:rotate(270deg);-ms-transform:rotate(270deg);transform:rotate(270deg)}.fa-flip-horizontal{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=0, mirror=1)";-webkit-transform:scaleX(-1);-ms-transform:scaleX(-1);transform:scaleX(-1)}.fa-flip-vertical{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=2, mirror=1)";-webkit-transform:scaleY(-1);-ms-transform:scaleY(-1);transform:scaleY(-1)}:root .fa-flip-horizontal,:root .fa-flip-vertical,:root .fa-rotate-90,:root .fa-rotate-180,:root .fa-rotate-270{filter:none}.fa-stack{position:relative;display:inline-block;width:2em;height:2em;line-height:2em;vertical-align:middle}.fa-stack-1x,.fa-stack-2x{position:absolute;left:0;width:100%;text-align:center}.fa-stack-1x{line-height:inherit}.fa-stack-2x{font-size:2em}.fa-inverse{color:#fff}.fa-glass:before{content:""}.fa-music:before{content:""}.fa-search:before,.icon-search:before{content:""}.fa-envelope-o:before{content:""}.fa-heart:before{content:""}.fa-star:before{content:""}.fa-star-o:before{content:""}.fa-user:before{content:""}.fa-film:before{content:""}.fa-th-large:before{content:""}.fa-th:before{content:""}.fa-th-list:before{content:""}.fa-check:before{content:""}.fa-close:before,.fa-remove:before,.fa-times:before{content:""}.fa-search-plus:before{content:""}.fa-search-minus:before{content:""}.fa-power-off:before{content:""}.fa-signal:before{content:""}.fa-cog:before,.fa-gear:before{content:""}.fa-trash-o:before{content:""}.fa-home:before,.icon-home:before{content:""}.fa-file-o:before{content:""}.fa-clock-o:before{content:""}.fa-road:before{content:""}.fa-download:before,.rst-content code.download span:first-child:before,.rst-content tt.download span:first-child:before{content:""}.fa-arrow-circle-o-down:before{content:""}.fa-arrow-circle-o-up:before{content:""}.fa-inbox:before{content:""}.fa-play-circle-o:before{content:""}.fa-repeat:before,.fa-rotate-right:before{content:""}.fa-refresh:before{content:""}.fa-list-alt:before{content:""}.fa-lock:before{content:""}.fa-flag:before{content:""}.fa-headphones:before{content:""}.fa-volume-off:before{content:""}.fa-volume-down:before{content:""}.fa-volume-up:before{content:""}.fa-qrcode:before{content:""}.fa-barcode:before{content:""}.fa-tag:before{content:""}.fa-tags:before{content:""}.fa-book:before,.icon-book:before{content:""}.fa-bookmark:before{content:""}.fa-print:before{content:""}.fa-camera:before{content:""}.fa-font:before{content:""}.fa-bold:before{content:""}.fa-italic:before{content:""}.fa-text-height:before{content:""}.fa-text-width:before{content:""}.fa-align-left:before{content:""}.fa-align-center:before{content:""}.fa-align-right:before{content:""}.fa-align-justify:before{content:""}.fa-list:before{content:""}.fa-dedent:before,.fa-outdent:before{content:""}.fa-indent:before{content:""}.fa-video-camera:before{content:""}.fa-image:before,.fa-photo:before,.fa-picture-o:before{content:""}.fa-pencil:before{content:""}.fa-map-marker:before{content:""}.fa-adjust:before{content:""}.fa-tint:before{content:""}.fa-edit:before,.fa-pencil-square-o:before{content:""}.fa-share-square-o:before{content:""}.fa-check-square-o:before{content:""}.fa-arrows:before{content:""}.fa-step-backward:before{content:""}.fa-fast-backward:before{content:""}.fa-backward:before{content:""}.fa-play:before{content:""}.fa-pause:before{content:""}.fa-stop:before{content:""}.fa-forward:before{content:""}.fa-fast-forward:before{content:""}.fa-step-forward:before{content:""}.fa-eject:before{content:""}.fa-chevron-left:before{content:""}.fa-chevron-right:before{content:""}.fa-plus-circle:before{content:""}.fa-minus-circle:before{content:""}.fa-times-circle:before,.wy-inline-validate.wy-inline-validate-danger .wy-input-context:before{content:""}.fa-check-circle:before,.wy-inline-validate.wy-inline-validate-success .wy-input-context:before{content:""}.fa-question-circle:before{content:""}.fa-info-circle:before{content:""}.fa-crosshairs:before{content:""}.fa-times-circle-o:before{content:""}.fa-check-circle-o:before{content:""}.fa-ban:before{content:""}.fa-arrow-left:before{content:""}.fa-arrow-right:before{content:""}.fa-arrow-up:before{content:""}.fa-arrow-down:before{content:""}.fa-mail-forward:before,.fa-share:before{content:""}.fa-expand:before{content:""}.fa-compress:before{content:""}.fa-plus:before{content:""}.fa-minus:before{content:""}.fa-asterisk:before{content:""}.fa-exclamation-circle:before,.rst-content .admonition-title:before,.wy-inline-validate.wy-inline-validate-info .wy-input-context:before,.wy-inline-validate.wy-inline-validate-warning .wy-input-context:before{content:""}.fa-gift:before{content:""}.fa-leaf:before{content:""}.fa-fire:before,.icon-fire:before{content:""}.fa-eye:before{content:""}.fa-eye-slash:before{content:""}.fa-exclamation-triangle:before,.fa-warning:before{content:""}.fa-plane:before{content:""}.fa-calendar:before{content:""}.fa-random:before{content:""}.fa-comment:before{content:""}.fa-magnet:before{content:""}.fa-chevron-up:before{content:""}.fa-chevron-down:before{content:""}.fa-retweet:before{content:""}.fa-shopping-cart:before{content:""}.fa-folder:before{content:""}.fa-folder-open:before{content:""}.fa-arrows-v:before{content:""}.fa-arrows-h:before{content:""}.fa-bar-chart-o:before,.fa-bar-chart:before{content:""}.fa-twitter-square:before{content:""}.fa-facebook-square:before{content:""}.fa-camera-retro:before{content:""}.fa-key:before{content:""}.fa-cogs:before,.fa-gears:before{content:""}.fa-comments:before{content:""}.fa-thumbs-o-up:before{content:""}.fa-thumbs-o-down:before{content:""}.fa-star-half:before{content:""}.fa-heart-o:before{content:""}.fa-sign-out:before{content:""}.fa-linkedin-square:before{content:""}.fa-thumb-tack:before{content:""}.fa-external-link:before{content:""}.fa-sign-in:before{content:""}.fa-trophy:before{content:""}.fa-github-square:before{content:""}.fa-upload:before{content:""}.fa-lemon-o:before{content:""}.fa-phone:before{content:""}.fa-square-o:before{content:""}.fa-bookmark-o:before{content:""}.fa-phone-square:before{content:""}.fa-twitter:before{content:""}.fa-facebook-f:before,.fa-facebook:before{content:""}.fa-github:before,.icon-github:before{content:""}.fa-unlock:before{content:""}.fa-credit-card:before{content:""}.fa-feed:before,.fa-rss:before{content:""}.fa-hdd-o:before{content:""}.fa-bullhorn:before{content:""}.fa-bell:before{content:""}.fa-certificate:before{content:""}.fa-hand-o-right:before{content:""}.fa-hand-o-left:before{content:""}.fa-hand-o-up:before{content:""}.fa-hand-o-down:before{content:""}.fa-arrow-circle-left:before,.icon-circle-arrow-left:before{content:""}.fa-arrow-circle-right:before,.icon-circle-arrow-right:before{content:""}.fa-arrow-circle-up:before{content:""}.fa-arrow-circle-down:before{content:""}.fa-globe:before{content:""}.fa-wrench:before{content:""}.fa-tasks:before{content:""}.fa-filter:before{content:""}.fa-briefcase:before{content:""}.fa-arrows-alt:before{content:""}.fa-group:before,.fa-users:before{content:""}.fa-chain:before,.fa-link:before,.icon-link:before{content:""}.fa-cloud:before{content:""}.fa-flask:before{content:""}.fa-cut:before,.fa-scissors:before{content:""}.fa-copy:before,.fa-files-o:before{content:""}.fa-paperclip:before{content:""}.fa-floppy-o:before,.fa-save:before{content:""}.fa-square:before{content:""}.fa-bars:before,.fa-navicon:before,.fa-reorder:before{content:""}.fa-list-ul:before{content:""}.fa-list-ol:before{content:""}.fa-strikethrough:before{content:""}.fa-underline:before{content:""}.fa-table:before{content:""}.fa-magic:before{content:""}.fa-truck:before{content:""}.fa-pinterest:before{content:""}.fa-pinterest-square:before{content:""}.fa-google-plus-square:before{content:""}.fa-google-plus:before{content:""}.fa-money:before{content:""}.fa-caret-down:before,.icon-caret-down:before,.wy-dropdown .caret:before{content:""}.fa-caret-up:before{content:""}.fa-caret-left:before{content:""}.fa-caret-right:before{content:""}.fa-columns:before{content:""}.fa-sort:before,.fa-unsorted:before{content:""}.fa-sort-desc:before,.fa-sort-down:before{content:""}.fa-sort-asc:before,.fa-sort-up:before{content:""}.fa-envelope:before{content:""}.fa-linkedin:before{content:""}.fa-rotate-left:before,.fa-undo:before{content:""}.fa-gavel:before,.fa-legal:before{content:""}.fa-dashboard:before,.fa-tachometer:before{content:""}.fa-comment-o:before{content:""}.fa-comments-o:before{content:""}.fa-bolt:before,.fa-flash:before{content:""}.fa-sitemap:before{content:""}.fa-umbrella:before{content:""}.fa-clipboard:before,.fa-paste:before{content:""}.fa-lightbulb-o:before{content:""}.fa-exchange:before{content:""}.fa-cloud-download:before{content:""}.fa-cloud-upload:before{content:""}.fa-user-md:before{content:""}.fa-stethoscope:before{content:""}.fa-suitcase:before{content:""}.fa-bell-o:before{content:""}.fa-coffee:before{content:""}.fa-cutlery:before{content:""}.fa-file-text-o:before{content:""}.fa-building-o:before{content:""}.fa-hospital-o:before{content:""}.fa-ambulance:before{content:""}.fa-medkit:before{content:""}.fa-fighter-jet:before{content:""}.fa-beer:before{content:""}.fa-h-square:before{content:""}.fa-plus-square:before{content:""}.fa-angle-double-left:before{content:""}.fa-angle-double-right:before{content:""}.fa-angle-double-up:before{content:""}.fa-angle-double-down:before{content:""}.fa-angle-left:before{content:""}.fa-angle-right:before{content:""}.fa-angle-up:before{content:""}.fa-angle-down:before{content:""}.fa-desktop:before{content:""}.fa-laptop:before{content:""}.fa-tablet:before{content:""}.fa-mobile-phone:before,.fa-mobile:before{content:""}.fa-circle-o:before{content:""}.fa-quote-left:before{content:""}.fa-quote-right:before{content:""}.fa-spinner:before{content:""}.fa-circle:before{content:""}.fa-mail-reply:before,.fa-reply:before{content:""}.fa-github-alt:before{content:""}.fa-folder-o:before{content:""}.fa-folder-open-o:before{content:""}.fa-smile-o:before{content:""}.fa-frown-o:before{content:""}.fa-meh-o:before{content:""}.fa-gamepad:before{content:""}.fa-keyboard-o:before{content:""}.fa-flag-o:before{content:""}.fa-flag-checkered:before{content:""}.fa-terminal:before{content:""}.fa-code:before{content:""}.fa-mail-reply-all:before,.fa-reply-all:before{content:""}.fa-star-half-empty:before,.fa-star-half-full:before,.fa-star-half-o:before{content:""}.fa-location-arrow:before{content:""}.fa-crop:before{content:""}.fa-code-fork:before{content:""}.fa-chain-broken:before,.fa-unlink:before{content:""}.fa-question:before{content:""}.fa-info:before{content:""}.fa-exclamation:before{content:""}.fa-superscript:before{content:""}.fa-subscript:before{content:""}.fa-eraser:before{content:""}.fa-puzzle-piece:before{content:""}.fa-microphone:before{content:""}.fa-microphone-slash:before{content:""}.fa-shield:before{content:""}.fa-calendar-o:before{content:""}.fa-fire-extinguisher:before{content:""}.fa-rocket:before{content:""}.fa-maxcdn:before{content:""}.fa-chevron-circle-left:before{content:""}.fa-chevron-circle-right:before{content:""}.fa-chevron-circle-up:before{content:""}.fa-chevron-circle-down:before{content:""}.fa-html5:before{content:""}.fa-css3:before{content:""}.fa-anchor:before{content:""}.fa-unlock-alt:before{content:""}.fa-bullseye:before{content:""}.fa-ellipsis-h:before{content:""}.fa-ellipsis-v:before{content:""}.fa-rss-square:before{content:""}.fa-play-circle:before{content:""}.fa-ticket:before{content:""}.fa-minus-square:before{content:""}.fa-minus-square-o:before,.wy-menu-vertical li.current>a button.toctree-expand:before,.wy-menu-vertical li.on a button.toctree-expand:before{content:""}.fa-level-up:before{content:""}.fa-level-down:before{content:""}.fa-check-square:before{content:""}.fa-pencil-square:before{content:""}.fa-external-link-square:before{content:""}.fa-share-square:before{content:""}.fa-compass:before{content:""}.fa-caret-square-o-down:before,.fa-toggle-down:before{content:""}.fa-caret-square-o-up:before,.fa-toggle-up:before{content:""}.fa-caret-square-o-right:before,.fa-toggle-right:before{content:""}.fa-eur:before,.fa-euro:before{content:""}.fa-gbp:before{content:""}.fa-dollar:before,.fa-usd:before{content:""}.fa-inr:before,.fa-rupee:before{content:""}.fa-cny:before,.fa-jpy:before,.fa-rmb:before,.fa-yen:before{content:""}.fa-rouble:before,.fa-rub:before,.fa-ruble:before{content:""}.fa-krw:before,.fa-won:before{content:""}.fa-bitcoin:before,.fa-btc:before{content:""}.fa-file:before{content:""}.fa-file-text:before{content:""}.fa-sort-alpha-asc:before{content:""}.fa-sort-alpha-desc:before{content:""}.fa-sort-amount-asc:before{content:""}.fa-sort-amount-desc:before{content:""}.fa-sort-numeric-asc:before{content:""}.fa-sort-numeric-desc:before{content:""}.fa-thumbs-up:before{content:""}.fa-thumbs-down:before{content:""}.fa-youtube-square:before{content:""}.fa-youtube:before{content:""}.fa-xing:before{content:""}.fa-xing-square:before{content:""}.fa-youtube-play:before{content:""}.fa-dropbox:before{content:""}.fa-stack-overflow:before{content:""}.fa-instagram:before{content:""}.fa-flickr:before{content:""}.fa-adn:before{content:""}.fa-bitbucket:before,.icon-bitbucket:before{content:""}.fa-bitbucket-square:before{content:""}.fa-tumblr:before{content:""}.fa-tumblr-square:before{content:""}.fa-long-arrow-down:before{content:""}.fa-long-arrow-up:before{content:""}.fa-long-arrow-left:before{content:""}.fa-long-arrow-right:before{content:""}.fa-apple:before{content:""}.fa-windows:before{content:""}.fa-android:before{content:""}.fa-linux:before{content:""}.fa-dribbble:before{content:""}.fa-skype:before{content:""}.fa-foursquare:before{content:""}.fa-trello:before{content:""}.fa-female:before{content:""}.fa-male:before{content:""}.fa-gittip:before,.fa-gratipay:before{content:""}.fa-sun-o:before{content:""}.fa-moon-o:before{content:""}.fa-archive:before{content:""}.fa-bug:before{content:""}.fa-vk:before{content:""}.fa-weibo:before{content:""}.fa-renren:before{content:""}.fa-pagelines:before{content:""}.fa-stack-exchange:before{content:""}.fa-arrow-circle-o-right:before{content:""}.fa-arrow-circle-o-left:before{content:""}.fa-caret-square-o-left:before,.fa-toggle-left:before{content:""}.fa-dot-circle-o:before{content:""}.fa-wheelchair:before{content:""}.fa-vimeo-square:before{content:""}.fa-try:before,.fa-turkish-lira:before{content:""}.fa-plus-square-o:before,.wy-menu-vertical li button.toctree-expand:before{content:""}.fa-space-shuttle:before{content:""}.fa-slack:before{content:""}.fa-envelope-square:before{content:""}.fa-wordpress:before{content:""}.fa-openid:before{content:""}.fa-bank:before,.fa-institution:before,.fa-university:before{content:""}.fa-graduation-cap:before,.fa-mortar-board:before{content:""}.fa-yahoo:before{content:""}.fa-google:before{content:""}.fa-reddit:before{content:""}.fa-reddit-square:before{content:""}.fa-stumbleupon-circle:before{content:""}.fa-stumbleupon:before{content:""}.fa-delicious:before{content:""}.fa-digg:before{content:""}.fa-pied-piper-pp:before{content:""}.fa-pied-piper-alt:before{content:""}.fa-drupal:before{content:""}.fa-joomla:before{content:""}.fa-language:before{content:""}.fa-fax:before{content:""}.fa-building:before{content:""}.fa-child:before{content:""}.fa-paw:before{content:""}.fa-spoon:before{content:""}.fa-cube:before{content:""}.fa-cubes:before{content:""}.fa-behance:before{content:""}.fa-behance-square:before{content:""}.fa-steam:before{content:""}.fa-steam-square:before{content:""}.fa-recycle:before{content:""}.fa-automobile:before,.fa-car:before{content:""}.fa-cab:before,.fa-taxi:before{content:""}.fa-tree:before{content:""}.fa-spotify:before{content:""}.fa-deviantart:before{content:""}.fa-soundcloud:before{content:""}.fa-database:before{content:""}.fa-file-pdf-o:before{content:""}.fa-file-word-o:before{content:""}.fa-file-excel-o:before{content:""}.fa-file-powerpoint-o:before{content:""}.fa-file-image-o:before,.fa-file-photo-o:before,.fa-file-picture-o:before{content:""}.fa-file-archive-o:before,.fa-file-zip-o:before{content:""}.fa-file-audio-o:before,.fa-file-sound-o:before{content:""}.fa-file-movie-o:before,.fa-file-video-o:before{content:""}.fa-file-code-o:before{content:""}.fa-vine:before{content:""}.fa-codepen:before{content:""}.fa-jsfiddle:before{content:""}.fa-life-bouy:before,.fa-life-buoy:before,.fa-life-ring:before,.fa-life-saver:before,.fa-support:before{content:""}.fa-circle-o-notch:before{content:""}.fa-ra:before,.fa-rebel:before,.fa-resistance:before{content:""}.fa-empire:before,.fa-ge:before{content:""}.fa-git-square:before{content:""}.fa-git:before{content:""}.fa-hacker-news:before,.fa-y-combinator-square:before,.fa-yc-square:before{content:""}.fa-tencent-weibo:before{content:""}.fa-qq:before{content:""}.fa-wechat:before,.fa-weixin:before{content:""}.fa-paper-plane:before,.fa-send:before{content:""}.fa-paper-plane-o:before,.fa-send-o:before{content:""}.fa-history:before{content:""}.fa-circle-thin:before{content:""}.fa-header:before{content:""}.fa-paragraph:before{content:""}.fa-sliders:before{content:""}.fa-share-alt:before{content:""}.fa-share-alt-square:before{content:""}.fa-bomb:before{content:""}.fa-futbol-o:before,.fa-soccer-ball-o:before{content:""}.fa-tty:before{content:""}.fa-binoculars:before{content:""}.fa-plug:before{content:""}.fa-slideshare:before{content:""}.fa-twitch:before{content:""}.fa-yelp:before{content:""}.fa-newspaper-o:before{content:""}.fa-wifi:before{content:""}.fa-calculator:before{content:""}.fa-paypal:before{content:""}.fa-google-wallet:before{content:""}.fa-cc-visa:before{content:""}.fa-cc-mastercard:before{content:""}.fa-cc-discover:before{content:""}.fa-cc-amex:before{content:""}.fa-cc-paypal:before{content:""}.fa-cc-stripe:before{content:""}.fa-bell-slash:before{content:""}.fa-bell-slash-o:before{content:""}.fa-trash:before{content:""}.fa-copyright:before{content:""}.fa-at:before{content:""}.fa-eyedropper:before{content:""}.fa-paint-brush:before{content:""}.fa-birthday-cake:before{content:""}.fa-area-chart:before{content:""}.fa-pie-chart:before{content:""}.fa-line-chart:before{content:""}.fa-lastfm:before{content:""}.fa-lastfm-square:before{content:""}.fa-toggle-off:before{content:""}.fa-toggle-on:before{content:""}.fa-bicycle:before{content:""}.fa-bus:before{content:""}.fa-ioxhost:before{content:""}.fa-angellist:before{content:""}.fa-cc:before{content:""}.fa-ils:before,.fa-shekel:before,.fa-sheqel:before{content:""}.fa-meanpath:before{content:""}.fa-buysellads:before{content:""}.fa-connectdevelop:before{content:""}.fa-dashcube:before{content:""}.fa-forumbee:before{content:""}.fa-leanpub:before{content:""}.fa-sellsy:before{content:""}.fa-shirtsinbulk:before{content:""}.fa-simplybuilt:before{content:""}.fa-skyatlas:before{content:""}.fa-cart-plus:before{content:""}.fa-cart-arrow-down:before{content:""}.fa-diamond:before{content:""}.fa-ship:before{content:""}.fa-user-secret:before{content:""}.fa-motorcycle:before{content:""}.fa-street-view:before{content:""}.fa-heartbeat:before{content:""}.fa-venus:before{content:""}.fa-mars:before{content:""}.fa-mercury:before{content:""}.fa-intersex:before,.fa-transgender:before{content:""}.fa-transgender-alt:before{content:""}.fa-venus-double:before{content:""}.fa-mars-double:before{content:""}.fa-venus-mars:before{content:""}.fa-mars-stroke:before{content:""}.fa-mars-stroke-v:before{content:""}.fa-mars-stroke-h:before{content:""}.fa-neuter:before{content:""}.fa-genderless:before{content:""}.fa-facebook-official:before{content:""}.fa-pinterest-p:before{content:""}.fa-whatsapp:before{content:""}.fa-server:before{content:""}.fa-user-plus:before{content:""}.fa-user-times:before{content:""}.fa-bed:before,.fa-hotel:before{content:""}.fa-viacoin:before{content:""}.fa-train:before{content:""}.fa-subway:before{content:""}.fa-medium:before{content:""}.fa-y-combinator:before,.fa-yc:before{content:""}.fa-optin-monster:before{content:""}.fa-opencart:before{content:""}.fa-expeditedssl:before{content:""}.fa-battery-4:before,.fa-battery-full:before,.fa-battery:before{content:""}.fa-battery-3:before,.fa-battery-three-quarters:before{content:""}.fa-battery-2:before,.fa-battery-half:before{content:""}.fa-battery-1:before,.fa-battery-quarter:before{content:""}.fa-battery-0:before,.fa-battery-empty:before{content:""}.fa-mouse-pointer:before{content:""}.fa-i-cursor:before{content:""}.fa-object-group:before{content:""}.fa-object-ungroup:before{content:""}.fa-sticky-note:before{content:""}.fa-sticky-note-o:before{content:""}.fa-cc-jcb:before{content:""}.fa-cc-diners-club:before{content:""}.fa-clone:before{content:""}.fa-balance-scale:before{content:""}.fa-hourglass-o:before{content:""}.fa-hourglass-1:before,.fa-hourglass-start:before{content:""}.fa-hourglass-2:before,.fa-hourglass-half:before{content:""}.fa-hourglass-3:before,.fa-hourglass-end:before{content:""}.fa-hourglass:before{content:""}.fa-hand-grab-o:before,.fa-hand-rock-o:before{content:""}.fa-hand-paper-o:before,.fa-hand-stop-o:before{content:""}.fa-hand-scissors-o:before{content:""}.fa-hand-lizard-o:before{content:""}.fa-hand-spock-o:before{content:""}.fa-hand-pointer-o:before{content:""}.fa-hand-peace-o:before{content:""}.fa-trademark:before{content:""}.fa-registered:before{content:""}.fa-creative-commons:before{content:""}.fa-gg:before{content:""}.fa-gg-circle:before{content:""}.fa-tripadvisor:before{content:""}.fa-odnoklassniki:before{content:""}.fa-odnoklassniki-square:before{content:""}.fa-get-pocket:before{content:""}.fa-wikipedia-w:before{content:""}.fa-safari:before{content:""}.fa-chrome:before{content:""}.fa-firefox:before{content:""}.fa-opera:before{content:""}.fa-internet-explorer:before{content:""}.fa-television:before,.fa-tv:before{content:""}.fa-contao:before{content:""}.fa-500px:before{content:""}.fa-amazon:before{content:""}.fa-calendar-plus-o:before{content:""}.fa-calendar-minus-o:before{content:""}.fa-calendar-times-o:before{content:""}.fa-calendar-check-o:before{content:""}.fa-industry:before{content:""}.fa-map-pin:before{content:""}.fa-map-signs:before{content:""}.fa-map-o:before{content:""}.fa-map:before{content:""}.fa-commenting:before{content:""}.fa-commenting-o:before{content:""}.fa-houzz:before{content:""}.fa-vimeo:before{content:""}.fa-black-tie:before{content:""}.fa-fonticons:before{content:""}.fa-reddit-alien:before{content:""}.fa-edge:before{content:""}.fa-credit-card-alt:before{content:""}.fa-codiepie:before{content:""}.fa-modx:before{content:""}.fa-fort-awesome:before{content:""}.fa-usb:before{content:""}.fa-product-hunt:before{content:""}.fa-mixcloud:before{content:""}.fa-scribd:before{content:""}.fa-pause-circle:before{content:""}.fa-pause-circle-o:before{content:""}.fa-stop-circle:before{content:""}.fa-stop-circle-o:before{content:""}.fa-shopping-bag:before{content:""}.fa-shopping-basket:before{content:""}.fa-hashtag:before{content:""}.fa-bluetooth:before{content:""}.fa-bluetooth-b:before{content:""}.fa-percent:before{content:""}.fa-gitlab:before,.icon-gitlab:before{content:""}.fa-wpbeginner:before{content:""}.fa-wpforms:before{content:""}.fa-envira:before{content:""}.fa-universal-access:before{content:""}.fa-wheelchair-alt:before{content:""}.fa-question-circle-o:before{content:""}.fa-blind:before{content:""}.fa-audio-description:before{content:""}.fa-volume-control-phone:before{content:""}.fa-braille:before{content:""}.fa-assistive-listening-systems:before{content:""}.fa-american-sign-language-interpreting:before,.fa-asl-interpreting:before{content:""}.fa-deaf:before,.fa-deafness:before,.fa-hard-of-hearing:before{content:""}.fa-glide:before{content:""}.fa-glide-g:before{content:""}.fa-sign-language:before,.fa-signing:before{content:""}.fa-low-vision:before{content:""}.fa-viadeo:before{content:""}.fa-viadeo-square:before{content:""}.fa-snapchat:before{content:""}.fa-snapchat-ghost:before{content:""}.fa-snapchat-square:before{content:""}.fa-pied-piper:before{content:""}.fa-first-order:before{content:""}.fa-yoast:before{content:""}.fa-themeisle:before{content:""}.fa-google-plus-circle:before,.fa-google-plus-official:before{content:""}.fa-fa:before,.fa-font-awesome:before{content:""}.fa-handshake-o:before{content:""}.fa-envelope-open:before{content:""}.fa-envelope-open-o:before{content:""}.fa-linode:before{content:""}.fa-address-book:before{content:""}.fa-address-book-o:before{content:""}.fa-address-card:before,.fa-vcard:before{content:""}.fa-address-card-o:before,.fa-vcard-o:before{content:""}.fa-user-circle:before{content:""}.fa-user-circle-o:before{content:""}.fa-user-o:before{content:""}.fa-id-badge:before{content:""}.fa-drivers-license:before,.fa-id-card:before{content:""}.fa-drivers-license-o:before,.fa-id-card-o:before{content:""}.fa-quora:before{content:""}.fa-free-code-camp:before{content:""}.fa-telegram:before{content:""}.fa-thermometer-4:before,.fa-thermometer-full:before,.fa-thermometer:before{content:""}.fa-thermometer-3:before,.fa-thermometer-three-quarters:before{content:""}.fa-thermometer-2:before,.fa-thermometer-half:before{content:""}.fa-thermometer-1:before,.fa-thermometer-quarter:before{content:""}.fa-thermometer-0:before,.fa-thermometer-empty:before{content:""}.fa-shower:before{content:""}.fa-bath:before,.fa-bathtub:before,.fa-s15:before{content:""}.fa-podcast:before{content:""}.fa-window-maximize:before{content:""}.fa-window-minimize:before{content:""}.fa-window-restore:before{content:""}.fa-times-rectangle:before,.fa-window-close:before{content:""}.fa-times-rectangle-o:before,.fa-window-close-o:before{content:""}.fa-bandcamp:before{content:""}.fa-grav:before{content:""}.fa-etsy:before{content:""}.fa-imdb:before{content:""}.fa-ravelry:before{content:""}.fa-eercast:before{content:""}.fa-microchip:before{content:""}.fa-snowflake-o:before{content:""}.fa-superpowers:before{content:""}.fa-wpexplorer:before{content:""}.fa-meetup:before{content:""}.sr-only{position:absolute;width:1px;height:1px;padding:0;margin:-1px;overflow:hidden;clip:rect(0,0,0,0);border:0}.sr-only-focusable:active,.sr-only-focusable:focus{position:static;width:auto;height:auto;margin:0;overflow:visible;clip:auto}.fa,.icon,.rst-content .admonition-title,.rst-content .code-block-caption .headerlink,.rst-content .eqno .headerlink,.rst-content code.download span:first-child,.rst-content dl dt .headerlink,.rst-content h1 .headerlink,.rst-content h2 .headerlink,.rst-content h3 .headerlink,.rst-content h4 .headerlink,.rst-content h5 .headerlink,.rst-content h6 .headerlink,.rst-content p.caption .headerlink,.rst-content p .headerlink,.rst-content table>caption .headerlink,.rst-content tt.download span:first-child,.wy-dropdown .caret,.wy-inline-validate.wy-inline-validate-danger .wy-input-context,.wy-inline-validate.wy-inline-validate-info .wy-input-context,.wy-inline-validate.wy-inline-validate-success .wy-input-context,.wy-inline-validate.wy-inline-validate-warning .wy-input-context,.wy-menu-vertical li.current>a button.toctree-expand,.wy-menu-vertical li.on a button.toctree-expand,.wy-menu-vertical li button.toctree-expand{font-family:inherit}.fa:before,.icon:before,.rst-content .admonition-title:before,.rst-content .code-block-caption .headerlink:before,.rst-content .eqno .headerlink:before,.rst-content code.download span:first-child:before,.rst-content dl dt .headerlink:before,.rst-content h1 .headerlink:before,.rst-content h2 .headerlink:before,.rst-content h3 .headerlink:before,.rst-content h4 .headerlink:before,.rst-content h5 .headerlink:before,.rst-content h6 .headerlink:before,.rst-content p.caption .headerlink:before,.rst-content p .headerlink:before,.rst-content table>caption .headerlink:before,.rst-content tt.download span:first-child:before,.wy-dropdown .caret:before,.wy-inline-validate.wy-inline-validate-danger .wy-input-context:before,.wy-inline-validate.wy-inline-validate-info .wy-input-context:before,.wy-inline-validate.wy-inline-validate-success .wy-input-context:before,.wy-inline-validate.wy-inline-validate-warning .wy-input-context:before,.wy-menu-vertical li.current>a button.toctree-expand:before,.wy-menu-vertical li.on a button.toctree-expand:before,.wy-menu-vertical li button.toctree-expand:before{font-family:FontAwesome;display:inline-block;font-style:normal;font-weight:400;line-height:1;text-decoration:inherit}.rst-content .code-block-caption a .headerlink,.rst-content .eqno a .headerlink,.rst-content a .admonition-title,.rst-content code.download a span:first-child,.rst-content dl dt a .headerlink,.rst-content h1 a .headerlink,.rst-content h2 a .headerlink,.rst-content h3 a .headerlink,.rst-content h4 a .headerlink,.rst-content h5 a .headerlink,.rst-content h6 a .headerlink,.rst-content p.caption a .headerlink,.rst-content p a .headerlink,.rst-content table>caption a .headerlink,.rst-content tt.download a span:first-child,.wy-menu-vertical li.current>a button.toctree-expand,.wy-menu-vertical li.on a button.toctree-expand,.wy-menu-vertical li a button.toctree-expand,a .fa,a .icon,a .rst-content .admonition-title,a .rst-content .code-block-caption .headerlink,a .rst-content .eqno .headerlink,a .rst-content code.download span:first-child,a .rst-content dl dt .headerlink,a .rst-content h1 .headerlink,a .rst-content h2 .headerlink,a .rst-content h3 .headerlink,a .rst-content h4 .headerlink,a .rst-content h5 .headerlink,a .rst-content h6 .headerlink,a .rst-content p.caption .headerlink,a .rst-content p .headerlink,a .rst-content table>caption .headerlink,a .rst-content tt.download span:first-child,a .wy-menu-vertical li button.toctree-expand{display:inline-block;text-decoration:inherit}.btn .fa,.btn .icon,.btn .rst-content .admonition-title,.btn .rst-content .code-block-caption .headerlink,.btn .rst-content .eqno .headerlink,.btn .rst-content code.download span:first-child,.btn .rst-content dl dt .headerlink,.btn .rst-content h1 .headerlink,.btn .rst-content h2 .headerlink,.btn .rst-content h3 .headerlink,.btn .rst-content h4 .headerlink,.btn .rst-content h5 .headerlink,.btn .rst-content h6 .headerlink,.btn .rst-content p .headerlink,.btn .rst-content table>caption .headerlink,.btn .rst-content tt.download span:first-child,.btn .wy-menu-vertical li.current>a button.toctree-expand,.btn .wy-menu-vertical li.on a button.toctree-expand,.btn .wy-menu-vertical li button.toctree-expand,.nav .fa,.nav .icon,.nav .rst-content .admonition-title,.nav .rst-content .code-block-caption .headerlink,.nav .rst-content .eqno .headerlink,.nav .rst-content code.download span:first-child,.nav .rst-content dl dt .headerlink,.nav .rst-content h1 .headerlink,.nav .rst-content h2 .headerlink,.nav .rst-content h3 .headerlink,.nav .rst-content h4 .headerlink,.nav .rst-content h5 .headerlink,.nav .rst-content h6 .headerlink,.nav .rst-content p .headerlink,.nav .rst-content table>caption .headerlink,.nav .rst-content tt.download span:first-child,.nav .wy-menu-vertical li.current>a button.toctree-expand,.nav .wy-menu-vertical li.on a button.toctree-expand,.nav .wy-menu-vertical li button.toctree-expand,.rst-content .btn .admonition-title,.rst-content .code-block-caption .btn .headerlink,.rst-content .code-block-caption .nav .headerlink,.rst-content .eqno .btn .headerlink,.rst-content .eqno .nav .headerlink,.rst-content .nav .admonition-title,.rst-content code.download .btn span:first-child,.rst-content code.download .nav span:first-child,.rst-content dl dt .btn .headerlink,.rst-content dl dt .nav .headerlink,.rst-content h1 .btn .headerlink,.rst-content h1 .nav .headerlink,.rst-content h2 .btn .headerlink,.rst-content h2 .nav .headerlink,.rst-content h3 .btn .headerlink,.rst-content h3 .nav .headerlink,.rst-content h4 .btn .headerlink,.rst-content h4 .nav .headerlink,.rst-content h5 .btn .headerlink,.rst-content h5 .nav .headerlink,.rst-content h6 .btn .headerlink,.rst-content h6 .nav .headerlink,.rst-content p .btn .headerlink,.rst-content p .nav .headerlink,.rst-content table>caption .btn .headerlink,.rst-content table>caption .nav .headerlink,.rst-content tt.download .btn span:first-child,.rst-content tt.download .nav span:first-child,.wy-menu-vertical li .btn button.toctree-expand,.wy-menu-vertical li.current>a .btn button.toctree-expand,.wy-menu-vertical li.current>a .nav button.toctree-expand,.wy-menu-vertical li .nav button.toctree-expand,.wy-menu-vertical li.on a .btn button.toctree-expand,.wy-menu-vertical li.on a .nav button.toctree-expand{display:inline}.btn .fa-large.icon,.btn .fa.fa-large,.btn .rst-content .code-block-caption .fa-large.headerlink,.btn .rst-content .eqno .fa-large.headerlink,.btn .rst-content .fa-large.admonition-title,.btn .rst-content code.download span.fa-large:first-child,.btn .rst-content dl dt .fa-large.headerlink,.btn .rst-content h1 .fa-large.headerlink,.btn .rst-content h2 .fa-large.headerlink,.btn .rst-content h3 .fa-large.headerlink,.btn .rst-content h4 .fa-large.headerlink,.btn .rst-content h5 .fa-large.headerlink,.btn .rst-content h6 .fa-large.headerlink,.btn .rst-content p .fa-large.headerlink,.btn .rst-content table>caption .fa-large.headerlink,.btn .rst-content tt.download span.fa-large:first-child,.btn .wy-menu-vertical li button.fa-large.toctree-expand,.nav .fa-large.icon,.nav .fa.fa-large,.nav .rst-content .code-block-caption .fa-large.headerlink,.nav .rst-content .eqno .fa-large.headerlink,.nav .rst-content .fa-large.admonition-title,.nav .rst-content code.download span.fa-large:first-child,.nav .rst-content dl dt .fa-large.headerlink,.nav .rst-content h1 .fa-large.headerlink,.nav .rst-content h2 .fa-large.headerlink,.nav .rst-content h3 .fa-large.headerlink,.nav .rst-content h4 .fa-large.headerlink,.nav .rst-content h5 .fa-large.headerlink,.nav .rst-content h6 .fa-large.headerlink,.nav .rst-content p .fa-large.headerlink,.nav .rst-content table>caption .fa-large.headerlink,.nav .rst-content tt.download span.fa-large:first-child,.nav .wy-menu-vertical li button.fa-large.toctree-expand,.rst-content .btn .fa-large.admonition-title,.rst-content .code-block-caption .btn .fa-large.headerlink,.rst-content .code-block-caption .nav .fa-large.headerlink,.rst-content .eqno .btn .fa-large.headerlink,.rst-content .eqno .nav .fa-large.headerlink,.rst-content .nav .fa-large.admonition-title,.rst-content code.download .btn span.fa-large:first-child,.rst-content code.download .nav span.fa-large:first-child,.rst-content dl dt .btn .fa-large.headerlink,.rst-content dl dt .nav .fa-large.headerlink,.rst-content h1 .btn .fa-large.headerlink,.rst-content h1 .nav .fa-large.headerlink,.rst-content h2 .btn .fa-large.headerlink,.rst-content h2 .nav .fa-large.headerlink,.rst-content h3 .btn .fa-large.headerlink,.rst-content h3 .nav .fa-large.headerlink,.rst-content h4 .btn .fa-large.headerlink,.rst-content h4 .nav .fa-large.headerlink,.rst-content h5 .btn .fa-large.headerlink,.rst-content h5 .nav .fa-large.headerlink,.rst-content h6 .btn .fa-large.headerlink,.rst-content h6 .nav .fa-large.headerlink,.rst-content p .btn .fa-large.headerlink,.rst-content p .nav .fa-large.headerlink,.rst-content table>caption .btn .fa-large.headerlink,.rst-content table>caption .nav .fa-large.headerlink,.rst-content tt.download .btn span.fa-large:first-child,.rst-content tt.download .nav span.fa-large:first-child,.wy-menu-vertical li .btn button.fa-large.toctree-expand,.wy-menu-vertical li .nav button.fa-large.toctree-expand{line-height:.9em}.btn .fa-spin.icon,.btn .fa.fa-spin,.btn .rst-content .code-block-caption .fa-spin.headerlink,.btn .rst-content .eqno .fa-spin.headerlink,.btn .rst-content .fa-spin.admonition-title,.btn .rst-content code.download span.fa-spin:first-child,.btn .rst-content dl dt .fa-spin.headerlink,.btn .rst-content h1 .fa-spin.headerlink,.btn .rst-content h2 .fa-spin.headerlink,.btn .rst-content h3 .fa-spin.headerlink,.btn .rst-content h4 .fa-spin.headerlink,.btn .rst-content h5 .fa-spin.headerlink,.btn .rst-content h6 .fa-spin.headerlink,.btn .rst-content p .fa-spin.headerlink,.btn .rst-content table>caption .fa-spin.headerlink,.btn .rst-content tt.download span.fa-spin:first-child,.btn .wy-menu-vertical li button.fa-spin.toctree-expand,.nav .fa-spin.icon,.nav .fa.fa-spin,.nav .rst-content .code-block-caption .fa-spin.headerlink,.nav .rst-content .eqno .fa-spin.headerlink,.nav .rst-content .fa-spin.admonition-title,.nav .rst-content code.download span.fa-spin:first-child,.nav .rst-content dl dt .fa-spin.headerlink,.nav .rst-content h1 .fa-spin.headerlink,.nav .rst-content h2 .fa-spin.headerlink,.nav .rst-content h3 .fa-spin.headerlink,.nav .rst-content h4 .fa-spin.headerlink,.nav .rst-content h5 .fa-spin.headerlink,.nav .rst-content h6 .fa-spin.headerlink,.nav .rst-content p .fa-spin.headerlink,.nav .rst-content table>caption .fa-spin.headerlink,.nav .rst-content tt.download span.fa-spin:first-child,.nav .wy-menu-vertical li button.fa-spin.toctree-expand,.rst-content .btn .fa-spin.admonition-title,.rst-content .code-block-caption .btn .fa-spin.headerlink,.rst-content .code-block-caption .nav .fa-spin.headerlink,.rst-content .eqno .btn .fa-spin.headerlink,.rst-content .eqno .nav .fa-spin.headerlink,.rst-content .nav .fa-spin.admonition-title,.rst-content code.download .btn span.fa-spin:first-child,.rst-content code.download .nav span.fa-spin:first-child,.rst-content dl dt .btn .fa-spin.headerlink,.rst-content dl dt .nav .fa-spin.headerlink,.rst-content h1 .btn .fa-spin.headerlink,.rst-content h1 .nav .fa-spin.headerlink,.rst-content h2 .btn .fa-spin.headerlink,.rst-content h2 .nav .fa-spin.headerlink,.rst-content h3 .btn .fa-spin.headerlink,.rst-content h3 .nav .fa-spin.headerlink,.rst-content h4 .btn .fa-spin.headerlink,.rst-content h4 .nav .fa-spin.headerlink,.rst-content h5 .btn .fa-spin.headerlink,.rst-content h5 .nav .fa-spin.headerlink,.rst-content h6 .btn .fa-spin.headerlink,.rst-content h6 .nav .fa-spin.headerlink,.rst-content p .btn .fa-spin.headerlink,.rst-content p .nav .fa-spin.headerlink,.rst-content table>caption .btn .fa-spin.headerlink,.rst-content table>caption .nav .fa-spin.headerlink,.rst-content tt.download .btn span.fa-spin:first-child,.rst-content tt.download .nav span.fa-spin:first-child,.wy-menu-vertical li .btn button.fa-spin.toctree-expand,.wy-menu-vertical li .nav button.fa-spin.toctree-expand{display:inline-block}.btn.fa:before,.btn.icon:before,.rst-content .btn.admonition-title:before,.rst-content .code-block-caption .btn.headerlink:before,.rst-content .eqno .btn.headerlink:before,.rst-content code.download span.btn:first-child:before,.rst-content dl dt .btn.headerlink:before,.rst-content h1 .btn.headerlink:before,.rst-content h2 .btn.headerlink:before,.rst-content h3 .btn.headerlink:before,.rst-content h4 .btn.headerlink:before,.rst-content h5 .btn.headerlink:before,.rst-content h6 .btn.headerlink:before,.rst-content p .btn.headerlink:before,.rst-content table>caption .btn.headerlink:before,.rst-content tt.download span.btn:first-child:before,.wy-menu-vertical li button.btn.toctree-expand:before{opacity:.5;-webkit-transition:opacity .05s ease-in;-moz-transition:opacity .05s ease-in;transition:opacity .05s ease-in}.btn.fa:hover:before,.btn.icon:hover:before,.rst-content .btn.admonition-title:hover:before,.rst-content .code-block-caption .btn.headerlink:hover:before,.rst-content .eqno .btn.headerlink:hover:before,.rst-content code.download span.btn:first-child:hover:before,.rst-content dl dt .btn.headerlink:hover:before,.rst-content h1 .btn.headerlink:hover:before,.rst-content h2 .btn.headerlink:hover:before,.rst-content h3 .btn.headerlink:hover:before,.rst-content h4 .btn.headerlink:hover:before,.rst-content h5 .btn.headerlink:hover:before,.rst-content h6 .btn.headerlink:hover:before,.rst-content p .btn.headerlink:hover:before,.rst-content table>caption .btn.headerlink:hover:before,.rst-content tt.download span.btn:first-child:hover:before,.wy-menu-vertical li button.btn.toctree-expand:hover:before{opacity:1}.btn-mini .fa:before,.btn-mini .icon:before,.btn-mini .rst-content .admonition-title:before,.btn-mini .rst-content .code-block-caption .headerlink:before,.btn-mini .rst-content .eqno .headerlink:before,.btn-mini .rst-content code.download span:first-child:before,.btn-mini .rst-content dl dt .headerlink:before,.btn-mini .rst-content h1 .headerlink:before,.btn-mini .rst-content h2 .headerlink:before,.btn-mini .rst-content h3 .headerlink:before,.btn-mini .rst-content h4 .headerlink:before,.btn-mini .rst-content h5 .headerlink:before,.btn-mini .rst-content h6 .headerlink:before,.btn-mini .rst-content p .headerlink:before,.btn-mini .rst-content table>caption .headerlink:before,.btn-mini .rst-content tt.download span:first-child:before,.btn-mini .wy-menu-vertical li button.toctree-expand:before,.rst-content .btn-mini .admonition-title:before,.rst-content .code-block-caption .btn-mini .headerlink:before,.rst-content .eqno .btn-mini .headerlink:before,.rst-content code.download .btn-mini span:first-child:before,.rst-content dl dt .btn-mini .headerlink:before,.rst-content h1 .btn-mini .headerlink:before,.rst-content h2 .btn-mini .headerlink:before,.rst-content h3 .btn-mini .headerlink:before,.rst-content h4 .btn-mini .headerlink:before,.rst-content h5 .btn-mini .headerlink:before,.rst-content h6 .btn-mini .headerlink:before,.rst-content p .btn-mini .headerlink:before,.rst-content table>caption .btn-mini .headerlink:before,.rst-content tt.download .btn-mini span:first-child:before,.wy-menu-vertical li .btn-mini button.toctree-expand:before{font-size:14px;vertical-align:-15%}.rst-content .admonition,.rst-content .admonition-todo,.rst-content .attention,.rst-content .caution,.rst-content .danger,.rst-content .error,.rst-content .hint,.rst-content .important,.rst-content .note,.rst-content .seealso,.rst-content .tip,.rst-content .warning,.wy-alert{padding:12px;line-height:24px;margin-bottom:24px;background:#e7f2fa}.rst-content .admonition-title,.wy-alert-title{font-weight:700;display:block;color:#fff;background:#6ab0de;padding:6px 12px;margin:-12px -12px 12px}.rst-content .danger,.rst-content .error,.rst-content .wy-alert-danger.admonition,.rst-content .wy-alert-danger.admonition-todo,.rst-content .wy-alert-danger.attention,.rst-content .wy-alert-danger.caution,.rst-content .wy-alert-danger.hint,.rst-content .wy-alert-danger.important,.rst-content .wy-alert-danger.note,.rst-content .wy-alert-danger.seealso,.rst-content .wy-alert-danger.tip,.rst-content .wy-alert-danger.warning,.wy-alert.wy-alert-danger{background:#fdf3f2}.rst-content .danger .admonition-title,.rst-content .danger .wy-alert-title,.rst-content .error .admonition-title,.rst-content .error .wy-alert-title,.rst-content .wy-alert-danger.admonition-todo .admonition-title,.rst-content .wy-alert-danger.admonition-todo .wy-alert-title,.rst-content .wy-alert-danger.admonition .admonition-title,.rst-content .wy-alert-danger.admonition .wy-alert-title,.rst-content .wy-alert-danger.attention .admonition-title,.rst-content .wy-alert-danger.attention .wy-alert-title,.rst-content .wy-alert-danger.caution .admonition-title,.rst-content .wy-alert-danger.caution .wy-alert-title,.rst-content .wy-alert-danger.hint .admonition-title,.rst-content .wy-alert-danger.hint .wy-alert-title,.rst-content .wy-alert-danger.important .admonition-title,.rst-content .wy-alert-danger.important .wy-alert-title,.rst-content .wy-alert-danger.note .admonition-title,.rst-content .wy-alert-danger.note .wy-alert-title,.rst-content .wy-alert-danger.seealso .admonition-title,.rst-content .wy-alert-danger.seealso .wy-alert-title,.rst-content .wy-alert-danger.tip .admonition-title,.rst-content .wy-alert-danger.tip .wy-alert-title,.rst-content .wy-alert-danger.warning .admonition-title,.rst-content .wy-alert-danger.warning .wy-alert-title,.rst-content .wy-alert.wy-alert-danger .admonition-title,.wy-alert.wy-alert-danger .rst-content .admonition-title,.wy-alert.wy-alert-danger .wy-alert-title{background:#f29f97}.rst-content .admonition-todo,.rst-content .attention,.rst-content .caution,.rst-content .warning,.rst-content .wy-alert-warning.admonition,.rst-content .wy-alert-warning.danger,.rst-content .wy-alert-warning.error,.rst-content .wy-alert-warning.hint,.rst-content .wy-alert-warning.important,.rst-content .wy-alert-warning.note,.rst-content .wy-alert-warning.seealso,.rst-content .wy-alert-warning.tip,.wy-alert.wy-alert-warning{background:#ffedcc}.rst-content .admonition-todo .admonition-title,.rst-content .admonition-todo .wy-alert-title,.rst-content .attention .admonition-title,.rst-content .attention .wy-alert-title,.rst-content .caution .admonition-title,.rst-content .caution .wy-alert-title,.rst-content .warning .admonition-title,.rst-content .warning .wy-alert-title,.rst-content .wy-alert-warning.admonition .admonition-title,.rst-content .wy-alert-warning.admonition .wy-alert-title,.rst-content .wy-alert-warning.danger .admonition-title,.rst-content .wy-alert-warning.danger .wy-alert-title,.rst-content .wy-alert-warning.error .admonition-title,.rst-content .wy-alert-warning.error .wy-alert-title,.rst-content .wy-alert-warning.hint .admonition-title,.rst-content .wy-alert-warning.hint .wy-alert-title,.rst-content .wy-alert-warning.important .admonition-title,.rst-content .wy-alert-warning.important .wy-alert-title,.rst-content .wy-alert-warning.note .admonition-title,.rst-content .wy-alert-warning.note .wy-alert-title,.rst-content .wy-alert-warning.seealso .admonition-title,.rst-content .wy-alert-warning.seealso .wy-alert-title,.rst-content .wy-alert-warning.tip .admonition-title,.rst-content .wy-alert-warning.tip .wy-alert-title,.rst-content .wy-alert.wy-alert-warning .admonition-title,.wy-alert.wy-alert-warning .rst-content .admonition-title,.wy-alert.wy-alert-warning .wy-alert-title{background:#f0b37e}.rst-content .note,.rst-content .seealso,.rst-content .wy-alert-info.admonition,.rst-content .wy-alert-info.admonition-todo,.rst-content .wy-alert-info.attention,.rst-content .wy-alert-info.caution,.rst-content .wy-alert-info.danger,.rst-content .wy-alert-info.error,.rst-content .wy-alert-info.hint,.rst-content .wy-alert-info.important,.rst-content .wy-alert-info.tip,.rst-content .wy-alert-info.warning,.wy-alert.wy-alert-info{background:#e7f2fa}.rst-content .note .admonition-title,.rst-content .note .wy-alert-title,.rst-content .seealso .admonition-title,.rst-content .seealso .wy-alert-title,.rst-content .wy-alert-info.admonition-todo .admonition-title,.rst-content .wy-alert-info.admonition-todo .wy-alert-title,.rst-content .wy-alert-info.admonition .admonition-title,.rst-content .wy-alert-info.admonition .wy-alert-title,.rst-content .wy-alert-info.attention .admonition-title,.rst-content .wy-alert-info.attention .wy-alert-title,.rst-content .wy-alert-info.caution .admonition-title,.rst-content .wy-alert-info.caution .wy-alert-title,.rst-content .wy-alert-info.danger .admonition-title,.rst-content .wy-alert-info.danger .wy-alert-title,.rst-content .wy-alert-info.error .admonition-title,.rst-content .wy-alert-info.error .wy-alert-title,.rst-content .wy-alert-info.hint .admonition-title,.rst-content .wy-alert-info.hint .wy-alert-title,.rst-content .wy-alert-info.important .admonition-title,.rst-content .wy-alert-info.important .wy-alert-title,.rst-content .wy-alert-info.tip .admonition-title,.rst-content .wy-alert-info.tip .wy-alert-title,.rst-content .wy-alert-info.warning .admonition-title,.rst-content .wy-alert-info.warning .wy-alert-title,.rst-content .wy-alert.wy-alert-info .admonition-title,.wy-alert.wy-alert-info .rst-content .admonition-title,.wy-alert.wy-alert-info .wy-alert-title{background:#6ab0de}.rst-content .hint,.rst-content .important,.rst-content .tip,.rst-content .wy-alert-success.admonition,.rst-content .wy-alert-success.admonition-todo,.rst-content .wy-alert-success.attention,.rst-content .wy-alert-success.caution,.rst-content .wy-alert-success.danger,.rst-content .wy-alert-success.error,.rst-content .wy-alert-success.note,.rst-content .wy-alert-success.seealso,.rst-content .wy-alert-success.warning,.wy-alert.wy-alert-success{background:#dbfaf4}.rst-content .hint .admonition-title,.rst-content .hint .wy-alert-title,.rst-content .important .admonition-title,.rst-content .important .wy-alert-title,.rst-content .tip .admonition-title,.rst-content .tip .wy-alert-title,.rst-content .wy-alert-success.admonition-todo .admonition-title,.rst-content .wy-alert-success.admonition-todo .wy-alert-title,.rst-content .wy-alert-success.admonition .admonition-title,.rst-content .wy-alert-success.admonition .wy-alert-title,.rst-content .wy-alert-success.attention .admonition-title,.rst-content .wy-alert-success.attention .wy-alert-title,.rst-content .wy-alert-success.caution .admonition-title,.rst-content .wy-alert-success.caution .wy-alert-title,.rst-content .wy-alert-success.danger .admonition-title,.rst-content .wy-alert-success.danger .wy-alert-title,.rst-content .wy-alert-success.error .admonition-title,.rst-content .wy-alert-success.error .wy-alert-title,.rst-content .wy-alert-success.note .admonition-title,.rst-content .wy-alert-success.note .wy-alert-title,.rst-content .wy-alert-success.seealso .admonition-title,.rst-content .wy-alert-success.seealso .wy-alert-title,.rst-content .wy-alert-success.warning .admonition-title,.rst-content .wy-alert-success.warning .wy-alert-title,.rst-content .wy-alert.wy-alert-success .admonition-title,.wy-alert.wy-alert-success .rst-content .admonition-title,.wy-alert.wy-alert-success .wy-alert-title{background:#1abc9c}.rst-content .wy-alert-neutral.admonition,.rst-content .wy-alert-neutral.admonition-todo,.rst-content .wy-alert-neutral.attention,.rst-content .wy-alert-neutral.caution,.rst-content .wy-alert-neutral.danger,.rst-content .wy-alert-neutral.error,.rst-content .wy-alert-neutral.hint,.rst-content .wy-alert-neutral.important,.rst-content .wy-alert-neutral.note,.rst-content .wy-alert-neutral.seealso,.rst-content .wy-alert-neutral.tip,.rst-content .wy-alert-neutral.warning,.wy-alert.wy-alert-neutral{background:#f3f6f6}.rst-content .wy-alert-neutral.admonition-todo .admonition-title,.rst-content .wy-alert-neutral.admonition-todo .wy-alert-title,.rst-content .wy-alert-neutral.admonition .admonition-title,.rst-content .wy-alert-neutral.admonition .wy-alert-title,.rst-content .wy-alert-neutral.attention .admonition-title,.rst-content .wy-alert-neutral.attention .wy-alert-title,.rst-content .wy-alert-neutral.caution .admonition-title,.rst-content .wy-alert-neutral.caution .wy-alert-title,.rst-content .wy-alert-neutral.danger .admonition-title,.rst-content .wy-alert-neutral.danger .wy-alert-title,.rst-content .wy-alert-neutral.error .admonition-title,.rst-content .wy-alert-neutral.error .wy-alert-title,.rst-content .wy-alert-neutral.hint .admonition-title,.rst-content .wy-alert-neutral.hint .wy-alert-title,.rst-content .wy-alert-neutral.important .admonition-title,.rst-content .wy-alert-neutral.important .wy-alert-title,.rst-content .wy-alert-neutral.note .admonition-title,.rst-content .wy-alert-neutral.note .wy-alert-title,.rst-content .wy-alert-neutral.seealso .admonition-title,.rst-content .wy-alert-neutral.seealso .wy-alert-title,.rst-content .wy-alert-neutral.tip .admonition-title,.rst-content .wy-alert-neutral.tip .wy-alert-title,.rst-content .wy-alert-neutral.warning .admonition-title,.rst-content .wy-alert-neutral.warning .wy-alert-title,.rst-content .wy-alert.wy-alert-neutral .admonition-title,.wy-alert.wy-alert-neutral .rst-content .admonition-title,.wy-alert.wy-alert-neutral .wy-alert-title{color:#404040;background:#e1e4e5}.rst-content .wy-alert-neutral.admonition-todo a,.rst-content .wy-alert-neutral.admonition a,.rst-content .wy-alert-neutral.attention a,.rst-content .wy-alert-neutral.caution a,.rst-content .wy-alert-neutral.danger a,.rst-content .wy-alert-neutral.error a,.rst-content .wy-alert-neutral.hint a,.rst-content .wy-alert-neutral.important a,.rst-content .wy-alert-neutral.note a,.rst-content .wy-alert-neutral.seealso a,.rst-content .wy-alert-neutral.tip a,.rst-content .wy-alert-neutral.warning a,.wy-alert.wy-alert-neutral a{color:#2980b9}.rst-content .admonition-todo p:last-child,.rst-content .admonition p:last-child,.rst-content .attention p:last-child,.rst-content .caution p:last-child,.rst-content .danger p:last-child,.rst-content .error p:last-child,.rst-content .hint p:last-child,.rst-content .important p:last-child,.rst-content .note p:last-child,.rst-content .seealso p:last-child,.rst-content .tip p:last-child,.rst-content .warning p:last-child,.wy-alert p:last-child{margin-bottom:0}.wy-tray-container{position:fixed;bottom:0;left:0;z-index:600}.wy-tray-container li{display:block;width:300px;background:transparent;color:#fff;text-align:center;box-shadow:0 5px 5px 0 rgba(0,0,0,.1);padding:0 24px;min-width:20%;opacity:0;height:0;line-height:56px;overflow:hidden;-webkit-transition:all .3s ease-in;-moz-transition:all .3s ease-in;transition:all .3s ease-in}.wy-tray-container li.wy-tray-item-success{background:#27ae60}.wy-tray-container li.wy-tray-item-info{background:#2980b9}.wy-tray-container li.wy-tray-item-warning{background:#e67e22}.wy-tray-container li.wy-tray-item-danger{background:#e74c3c}.wy-tray-container li.on{opacity:1;height:56px}@media screen and (max-width:768px){.wy-tray-container{bottom:auto;top:0;width:100%}.wy-tray-container li{width:100%}}button{font-size:100%;margin:0;vertical-align:baseline;*vertical-align:middle;cursor:pointer;line-height:normal;-webkit-appearance:button;*overflow:visible}button::-moz-focus-inner,input::-moz-focus-inner{border:0;padding:0}button[disabled]{cursor:default}.btn{display:inline-block;border-radius:2px;line-height:normal;white-space:nowrap;text-align:center;cursor:pointer;font-size:100%;padding:6px 12px 8px;color:#fff;border:1px solid rgba(0,0,0,.1);background-color:#27ae60;text-decoration:none;font-weight:400;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;box-shadow:inset 0 1px 2px -1px hsla(0,0%,100%,.5),inset 0 -2px 0 0 rgba(0,0,0,.1);outline-none:false;vertical-align:middle;*display:inline;zoom:1;-webkit-user-drag:none;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none;-webkit-transition:all .1s linear;-moz-transition:all .1s linear;transition:all .1s linear}.btn-hover{background:#2e8ece;color:#fff}.btn:hover{background:#2cc36b;color:#fff}.btn:focus{background:#2cc36b;outline:0}.btn:active{box-shadow:inset 0 -1px 0 0 rgba(0,0,0,.05),inset 0 2px 0 0 rgba(0,0,0,.1);padding:8px 12px 6px}.btn:visited{color:#fff}.btn-disabled,.btn-disabled:active,.btn-disabled:focus,.btn-disabled:hover,.btn:disabled{background-image:none;filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);filter:alpha(opacity=40);opacity:.4;cursor:not-allowed;box-shadow:none}.btn::-moz-focus-inner{padding:0;border:0}.btn-small{font-size:80%}.btn-info{background-color:#2980b9!important}.btn-info:hover{background-color:#2e8ece!important}.btn-neutral{background-color:#f3f6f6!important;color:#404040!important}.btn-neutral:hover{background-color:#e5ebeb!important;color:#404040}.btn-neutral:visited{color:#404040!important}.btn-success{background-color:#27ae60!important}.btn-success:hover{background-color:#295!important}.btn-danger{background-color:#e74c3c!important}.btn-danger:hover{background-color:#ea6153!important}.btn-warning{background-color:#e67e22!important}.btn-warning:hover{background-color:#e98b39!important}.btn-invert{background-color:#222}.btn-invert:hover{background-color:#2f2f2f!important}.btn-link{background-color:transparent!important;color:#2980b9;box-shadow:none;border-color:transparent!important}.btn-link:active,.btn-link:hover{background-color:transparent!important;color:#409ad5!important;box-shadow:none}.btn-link:visited{color:#9b59b6}.wy-btn-group .btn,.wy-control .btn{vertical-align:middle}.wy-btn-group{margin-bottom:24px;*zoom:1}.wy-btn-group:after,.wy-btn-group:before{display:table;content:""}.wy-btn-group:after{clear:both}.wy-dropdown{position:relative;display:inline-block}.wy-dropdown-active .wy-dropdown-menu{display:block}.wy-dropdown-menu{position:absolute;left:0;display:none;float:left;top:100%;min-width:100%;background:#fcfcfc;z-index:100;border:1px solid #cfd7dd;box-shadow:0 2px 2px 0 rgba(0,0,0,.1);padding:12px}.wy-dropdown-menu>dd>a{display:block;clear:both;color:#404040;white-space:nowrap;font-size:90%;padding:0 12px;cursor:pointer}.wy-dropdown-menu>dd>a:hover{background:#2980b9;color:#fff}.wy-dropdown-menu>dd.divider{border-top:1px solid #cfd7dd;margin:6px 0}.wy-dropdown-menu>dd.search{padding-bottom:12px}.wy-dropdown-menu>dd.search input[type=search]{width:100%}.wy-dropdown-menu>dd.call-to-action{background:#e3e3e3;text-transform:uppercase;font-weight:500;font-size:80%}.wy-dropdown-menu>dd.call-to-action:hover{background:#e3e3e3}.wy-dropdown-menu>dd.call-to-action .btn{color:#fff}.wy-dropdown.wy-dropdown-up .wy-dropdown-menu{bottom:100%;top:auto;left:auto;right:0}.wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu{background:#fcfcfc;margin-top:2px}.wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu a{padding:6px 12px}.wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu a:hover{background:#2980b9;color:#fff}.wy-dropdown.wy-dropdown-left .wy-dropdown-menu{right:0;left:auto;text-align:right}.wy-dropdown-arrow:before{content:" ";border-bottom:5px solid #f5f5f5;border-left:5px solid transparent;border-right:5px solid transparent;position:absolute;display:block;top:-4px;left:50%;margin-left:-3px}.wy-dropdown-arrow.wy-dropdown-arrow-left:before{left:11px}.wy-form-stacked select{display:block}.wy-form-aligned .wy-help-inline,.wy-form-aligned input,.wy-form-aligned label,.wy-form-aligned select,.wy-form-aligned textarea{display:inline-block;*display:inline;*zoom:1;vertical-align:middle}.wy-form-aligned .wy-control-group>label{display:inline-block;vertical-align:middle;width:10em;margin:6px 12px 0 0;float:left}.wy-form-aligned .wy-control{float:left}.wy-form-aligned .wy-control label{display:block}.wy-form-aligned .wy-control select{margin-top:6px}fieldset{margin:0}fieldset,legend{border:0;padding:0}legend{width:100%;white-space:normal;margin-bottom:24px;font-size:150%;*margin-left:-7px}label,legend{display:block}label{margin:0 0 .3125em;color:#333;font-size:90%}input,select,textarea{font-size:100%;margin:0;vertical-align:baseline;*vertical-align:middle}.wy-control-group{margin-bottom:24px;max-width:1200px;margin-left:auto;margin-right:auto;*zoom:1}.wy-control-group:after,.wy-control-group:before{display:table;content:""}.wy-control-group:after{clear:both}.wy-control-group.wy-control-group-required>label:after{content:" *";color:#e74c3c}.wy-control-group .wy-form-full,.wy-control-group .wy-form-halves,.wy-control-group .wy-form-thirds{padding-bottom:12px}.wy-control-group .wy-form-full input[type=color],.wy-control-group .wy-form-full input[type=date],.wy-control-group .wy-form-full input[type=datetime-local],.wy-control-group .wy-form-full input[type=datetime],.wy-control-group .wy-form-full input[type=email],.wy-control-group .wy-form-full input[type=month],.wy-control-group .wy-form-full input[type=number],.wy-control-group .wy-form-full input[type=password],.wy-control-group .wy-form-full input[type=search],.wy-control-group .wy-form-full input[type=tel],.wy-control-group .wy-form-full input[type=text],.wy-control-group .wy-form-full input[type=time],.wy-control-group .wy-form-full input[type=url],.wy-control-group .wy-form-full input[type=week],.wy-control-group .wy-form-full select,.wy-control-group .wy-form-halves input[type=color],.wy-control-group .wy-form-halves input[type=date],.wy-control-group .wy-form-halves input[type=datetime-local],.wy-control-group .wy-form-halves input[type=datetime],.wy-control-group .wy-form-halves input[type=email],.wy-control-group .wy-form-halves input[type=month],.wy-control-group .wy-form-halves input[type=number],.wy-control-group .wy-form-halves input[type=password],.wy-control-group .wy-form-halves input[type=search],.wy-control-group .wy-form-halves input[type=tel],.wy-control-group .wy-form-halves input[type=text],.wy-control-group .wy-form-halves input[type=time],.wy-control-group .wy-form-halves input[type=url],.wy-control-group .wy-form-halves input[type=week],.wy-control-group .wy-form-halves select,.wy-control-group .wy-form-thirds input[type=color],.wy-control-group .wy-form-thirds input[type=date],.wy-control-group .wy-form-thirds input[type=datetime-local],.wy-control-group .wy-form-thirds input[type=datetime],.wy-control-group .wy-form-thirds input[type=email],.wy-control-group .wy-form-thirds input[type=month],.wy-control-group .wy-form-thirds input[type=number],.wy-control-group .wy-form-thirds input[type=password],.wy-control-group .wy-form-thirds input[type=search],.wy-control-group .wy-form-thirds input[type=tel],.wy-control-group .wy-form-thirds input[type=text],.wy-control-group .wy-form-thirds input[type=time],.wy-control-group .wy-form-thirds input[type=url],.wy-control-group .wy-form-thirds input[type=week],.wy-control-group .wy-form-thirds select{width:100%}.wy-control-group .wy-form-full{float:left;display:block;width:100%;margin-right:0}.wy-control-group .wy-form-full:last-child{margin-right:0}.wy-control-group .wy-form-halves{float:left;display:block;margin-right:2.35765%;width:48.82117%}.wy-control-group .wy-form-halves:last-child,.wy-control-group .wy-form-halves:nth-of-type(2n){margin-right:0}.wy-control-group .wy-form-halves:nth-of-type(odd){clear:left}.wy-control-group .wy-form-thirds{float:left;display:block;margin-right:2.35765%;width:31.76157%}.wy-control-group .wy-form-thirds:last-child,.wy-control-group .wy-form-thirds:nth-of-type(3n){margin-right:0}.wy-control-group .wy-form-thirds:nth-of-type(3n+1){clear:left}.wy-control-group.wy-control-group-no-input .wy-control,.wy-control-no-input{margin:6px 0 0;font-size:90%}.wy-control-no-input{display:inline-block}.wy-control-group.fluid-input input[type=color],.wy-control-group.fluid-input input[type=date],.wy-control-group.fluid-input input[type=datetime-local],.wy-control-group.fluid-input input[type=datetime],.wy-control-group.fluid-input input[type=email],.wy-control-group.fluid-input input[type=month],.wy-control-group.fluid-input input[type=number],.wy-control-group.fluid-input input[type=password],.wy-control-group.fluid-input input[type=search],.wy-control-group.fluid-input input[type=tel],.wy-control-group.fluid-input input[type=text],.wy-control-group.fluid-input input[type=time],.wy-control-group.fluid-input input[type=url],.wy-control-group.fluid-input input[type=week]{width:100%}.wy-form-message-inline{padding-left:.3em;color:#666;font-size:90%}.wy-form-message{display:block;color:#999;font-size:70%;margin-top:.3125em;font-style:italic}.wy-form-message p{font-size:inherit;font-style:italic;margin-bottom:6px}.wy-form-message p:last-child{margin-bottom:0}input{line-height:normal}input[type=button],input[type=reset],input[type=submit]{-webkit-appearance:button;cursor:pointer;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;*overflow:visible}input[type=color],input[type=date],input[type=datetime-local],input[type=datetime],input[type=email],input[type=month],input[type=number],input[type=password],input[type=search],input[type=tel],input[type=text],input[type=time],input[type=url],input[type=week]{-webkit-appearance:none;padding:6px;display:inline-block;border:1px solid #ccc;font-size:80%;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;box-shadow:inset 0 1px 3px #ddd;border-radius:0;-webkit-transition:border .3s linear;-moz-transition:border .3s linear;transition:border .3s linear}input[type=datetime-local]{padding:.34375em .625em}input[disabled]{cursor:default}input[type=checkbox],input[type=radio]{padding:0;margin-right:.3125em;*height:13px;*width:13px}input[type=checkbox],input[type=radio],input[type=search]{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}input[type=search]::-webkit-search-cancel-button,input[type=search]::-webkit-search-decoration{-webkit-appearance:none}input[type=color]:focus,input[type=date]:focus,input[type=datetime-local]:focus,input[type=datetime]:focus,input[type=email]:focus,input[type=month]:focus,input[type=number]:focus,input[type=password]:focus,input[type=search]:focus,input[type=tel]:focus,input[type=text]:focus,input[type=time]:focus,input[type=url]:focus,input[type=week]:focus{outline:0;outline:thin dotted\9;border-color:#333}input.no-focus:focus{border-color:#ccc!important}input[type=checkbox]:focus,input[type=file]:focus,input[type=radio]:focus{outline:thin dotted #333;outline:1px auto #129fea}input[type=color][disabled],input[type=date][disabled],input[type=datetime-local][disabled],input[type=datetime][disabled],input[type=email][disabled],input[type=month][disabled],input[type=number][disabled],input[type=password][disabled],input[type=search][disabled],input[type=tel][disabled],input[type=text][disabled],input[type=time][disabled],input[type=url][disabled],input[type=week][disabled]{cursor:not-allowed;background-color:#fafafa}input:focus:invalid,select:focus:invalid,textarea:focus:invalid{color:#e74c3c;border:1px solid #e74c3c}input:focus:invalid:focus,select:focus:invalid:focus,textarea:focus:invalid:focus{border-color:#e74c3c}input[type=checkbox]:focus:invalid:focus,input[type=file]:focus:invalid:focus,input[type=radio]:focus:invalid:focus{outline-color:#e74c3c}input.wy-input-large{padding:12px;font-size:100%}textarea{overflow:auto;vertical-align:top;width:100%;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif}select,textarea{padding:.5em .625em;display:inline-block;border:1px solid #ccc;font-size:80%;box-shadow:inset 0 1px 3px #ddd;-webkit-transition:border .3s linear;-moz-transition:border .3s linear;transition:border .3s linear}select{border:1px solid #ccc;background-color:#fff}select[multiple]{height:auto}select:focus,textarea:focus{outline:0}input[readonly],select[disabled],select[readonly],textarea[disabled],textarea[readonly]{cursor:not-allowed;background-color:#fafafa}input[type=checkbox][disabled],input[type=radio][disabled]{cursor:not-allowed}.wy-checkbox,.wy-radio{margin:6px 0;color:#404040;display:block}.wy-checkbox input,.wy-radio input{vertical-align:baseline}.wy-form-message-inline{display:inline-block;*display:inline;*zoom:1;vertical-align:middle}.wy-input-prefix,.wy-input-suffix{white-space:nowrap;padding:6px}.wy-input-prefix .wy-input-context,.wy-input-suffix .wy-input-context{line-height:27px;padding:0 8px;display:inline-block;font-size:80%;background-color:#f3f6f6;border:1px solid #ccc;color:#999}.wy-input-suffix .wy-input-context{border-left:0}.wy-input-prefix .wy-input-context{border-right:0}.wy-switch{position:relative;display:block;height:24px;margin-top:12px;cursor:pointer}.wy-switch:before{left:0;top:0;width:36px;height:12px;background:#ccc}.wy-switch:after,.wy-switch:before{position:absolute;content:"";display:block;border-radius:4px;-webkit-transition:all .2s ease-in-out;-moz-transition:all .2s ease-in-out;transition:all .2s ease-in-out}.wy-switch:after{width:18px;height:18px;background:#999;left:-3px;top:-3px}.wy-switch span{position:absolute;left:48px;display:block;font-size:12px;color:#ccc;line-height:1}.wy-switch.active:before{background:#1e8449}.wy-switch.active:after{left:24px;background:#27ae60}.wy-switch.disabled{cursor:not-allowed;opacity:.8}.wy-control-group.wy-control-group-error .wy-form-message,.wy-control-group.wy-control-group-error>label{color:#e74c3c}.wy-control-group.wy-control-group-error input[type=color],.wy-control-group.wy-control-group-error input[type=date],.wy-control-group.wy-control-group-error input[type=datetime-local],.wy-control-group.wy-control-group-error input[type=datetime],.wy-control-group.wy-control-group-error input[type=email],.wy-control-group.wy-control-group-error input[type=month],.wy-control-group.wy-control-group-error input[type=number],.wy-control-group.wy-control-group-error input[type=password],.wy-control-group.wy-control-group-error input[type=search],.wy-control-group.wy-control-group-error input[type=tel],.wy-control-group.wy-control-group-error input[type=text],.wy-control-group.wy-control-group-error input[type=time],.wy-control-group.wy-control-group-error input[type=url],.wy-control-group.wy-control-group-error input[type=week],.wy-control-group.wy-control-group-error textarea{border:1px solid #e74c3c}.wy-inline-validate{white-space:nowrap}.wy-inline-validate .wy-input-context{padding:.5em .625em;display:inline-block;font-size:80%}.wy-inline-validate.wy-inline-validate-success .wy-input-context{color:#27ae60}.wy-inline-validate.wy-inline-validate-danger .wy-input-context{color:#e74c3c}.wy-inline-validate.wy-inline-validate-warning .wy-input-context{color:#e67e22}.wy-inline-validate.wy-inline-validate-info .wy-input-context{color:#2980b9}.rotate-90{-webkit-transform:rotate(90deg);-moz-transform:rotate(90deg);-ms-transform:rotate(90deg);-o-transform:rotate(90deg);transform:rotate(90deg)}.rotate-180{-webkit-transform:rotate(180deg);-moz-transform:rotate(180deg);-ms-transform:rotate(180deg);-o-transform:rotate(180deg);transform:rotate(180deg)}.rotate-270{-webkit-transform:rotate(270deg);-moz-transform:rotate(270deg);-ms-transform:rotate(270deg);-o-transform:rotate(270deg);transform:rotate(270deg)}.mirror{-webkit-transform:scaleX(-1);-moz-transform:scaleX(-1);-ms-transform:scaleX(-1);-o-transform:scaleX(-1);transform:scaleX(-1)}.mirror.rotate-90{-webkit-transform:scaleX(-1) rotate(90deg);-moz-transform:scaleX(-1) rotate(90deg);-ms-transform:scaleX(-1) rotate(90deg);-o-transform:scaleX(-1) rotate(90deg);transform:scaleX(-1) rotate(90deg)}.mirror.rotate-180{-webkit-transform:scaleX(-1) rotate(180deg);-moz-transform:scaleX(-1) rotate(180deg);-ms-transform:scaleX(-1) rotate(180deg);-o-transform:scaleX(-1) rotate(180deg);transform:scaleX(-1) rotate(180deg)}.mirror.rotate-270{-webkit-transform:scaleX(-1) rotate(270deg);-moz-transform:scaleX(-1) rotate(270deg);-ms-transform:scaleX(-1) rotate(270deg);-o-transform:scaleX(-1) rotate(270deg);transform:scaleX(-1) rotate(270deg)}@media only screen and (max-width:480px){.wy-form button[type=submit]{margin:.7em 0 0}.wy-form input[type=color],.wy-form input[type=date],.wy-form input[type=datetime-local],.wy-form input[type=datetime],.wy-form input[type=email],.wy-form input[type=month],.wy-form input[type=number],.wy-form input[type=password],.wy-form input[type=search],.wy-form input[type=tel],.wy-form input[type=text],.wy-form input[type=time],.wy-form input[type=url],.wy-form input[type=week],.wy-form label{margin-bottom:.3em;display:block}.wy-form input[type=color],.wy-form input[type=date],.wy-form input[type=datetime-local],.wy-form input[type=datetime],.wy-form input[type=email],.wy-form input[type=month],.wy-form input[type=number],.wy-form input[type=password],.wy-form input[type=search],.wy-form input[type=tel],.wy-form input[type=time],.wy-form input[type=url],.wy-form input[type=week]{margin-bottom:0}.wy-form-aligned .wy-control-group label{margin-bottom:.3em;text-align:left;display:block;width:100%}.wy-form-aligned .wy-control{margin:1.5em 0 0}.wy-form-message,.wy-form-message-inline,.wy-form .wy-help-inline{display:block;font-size:80%;padding:6px 0}}@media screen and (max-width:768px){.tablet-hide{display:none}}@media screen and (max-width:480px){.mobile-hide{display:none}}.float-left{float:left}.float-right{float:right}.full-width{width:100%}.rst-content table.docutils,.rst-content table.field-list,.wy-table{border-collapse:collapse;border-spacing:0;empty-cells:show;margin-bottom:24px}.rst-content table.docutils caption,.rst-content table.field-list caption,.wy-table caption{color:#000;font:italic 85%/1 arial,sans-serif;padding:1em 0;text-align:center}.rst-content table.docutils td,.rst-content table.docutils th,.rst-content table.field-list td,.rst-content table.field-list th,.wy-table td,.wy-table th{font-size:90%;margin:0;overflow:visible;padding:8px 16px}.rst-content table.docutils td:first-child,.rst-content table.docutils th:first-child,.rst-content table.field-list td:first-child,.rst-content table.field-list th:first-child,.wy-table td:first-child,.wy-table th:first-child{border-left-width:0}.rst-content table.docutils thead,.rst-content table.field-list thead,.wy-table thead{color:#000;text-align:left;vertical-align:bottom;white-space:nowrap}.rst-content table.docutils thead th,.rst-content table.field-list thead th,.wy-table thead th{font-weight:700;border-bottom:2px solid #e1e4e5}.rst-content table.docutils td,.rst-content table.field-list td,.wy-table td{background-color:transparent;vertical-align:middle}.rst-content table.docutils td p,.rst-content table.field-list td p,.wy-table td p{line-height:18px}.rst-content table.docutils td p:last-child,.rst-content table.field-list td p:last-child,.wy-table td p:last-child{margin-bottom:0}.rst-content table.docutils .wy-table-cell-min,.rst-content table.field-list .wy-table-cell-min,.wy-table .wy-table-cell-min{width:1%;padding-right:0}.rst-content table.docutils .wy-table-cell-min input[type=checkbox],.rst-content table.field-list .wy-table-cell-min input[type=checkbox],.wy-table .wy-table-cell-min input[type=checkbox]{margin:0}.wy-table-secondary{color:grey;font-size:90%}.wy-table-tertiary{color:grey;font-size:80%}.rst-content table.docutils:not(.field-list) tr:nth-child(2n-1) td,.wy-table-backed,.wy-table-odd td,.wy-table-striped tr:nth-child(2n-1) td{background-color:#f3f6f6}.rst-content table.docutils,.wy-table-bordered-all{border:1px solid #e1e4e5}.rst-content table.docutils td,.wy-table-bordered-all td{border-bottom:1px solid #e1e4e5;border-left:1px solid #e1e4e5}.rst-content table.docutils tbody>tr:last-child td,.wy-table-bordered-all tbody>tr:last-child td{border-bottom-width:0}.wy-table-bordered{border:1px solid #e1e4e5}.wy-table-bordered-rows td{border-bottom:1px solid #e1e4e5}.wy-table-bordered-rows tbody>tr:last-child td{border-bottom-width:0}.wy-table-horizontal td,.wy-table-horizontal th{border-width:0 0 1px;border-bottom:1px solid #e1e4e5}.wy-table-horizontal tbody>tr:last-child td{border-bottom-width:0}.wy-table-responsive{margin-bottom:24px;max-width:100%;overflow:auto}.wy-table-responsive table{margin-bottom:0!important}.wy-table-responsive table td,.wy-table-responsive table th{white-space:nowrap}a{color:#2980b9;text-decoration:none;cursor:pointer}a:hover{color:#3091d1}a:visited{color:#9b59b6}html{height:100%}body,html{overflow-x:hidden}body{font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;font-weight:400;color:#404040;min-height:100%;background:#edf0f2}.wy-text-left{text-align:left}.wy-text-center{text-align:center}.wy-text-right{text-align:right}.wy-text-large{font-size:120%}.wy-text-normal{font-size:100%}.wy-text-small,small{font-size:80%}.wy-text-strike{text-decoration:line-through}.wy-text-warning{color:#e67e22!important}a.wy-text-warning:hover{color:#eb9950!important}.wy-text-info{color:#2980b9!important}a.wy-text-info:hover{color:#409ad5!important}.wy-text-success{color:#27ae60!important}a.wy-text-success:hover{color:#36d278!important}.wy-text-danger{color:#e74c3c!important}a.wy-text-danger:hover{color:#ed7669!important}.wy-text-neutral{color:#404040!important}a.wy-text-neutral:hover{color:#595959!important}.rst-content .toctree-wrapper>p.caption,h1,h2,h3,h4,h5,h6,legend{margin-top:0;font-weight:700;font-family:Roboto Slab,ff-tisa-web-pro,Georgia,Arial,sans-serif}p{line-height:24px;font-size:16px;margin:0 0 24px}h1{font-size:175%}.rst-content .toctree-wrapper>p.caption,h2{font-size:150%}h3{font-size:125%}h4{font-size:115%}h5{font-size:110%}h6{font-size:100%}hr{display:block;height:1px;border:0;border-top:1px solid #e1e4e5;margin:24px 0;padding:0}.rst-content code,.rst-content tt,code{white-space:nowrap;max-width:100%;background:#fff;border:1px solid #e1e4e5;font-size:75%;padding:0 5px;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;color:#e74c3c;overflow-x:auto}.rst-content tt.code-large,code.code-large{font-size:90%}.rst-content .section ul,.rst-content .toctree-wrapper ul,.rst-content section ul,.wy-plain-list-disc,article ul{list-style:disc;line-height:24px;margin-bottom:24px}.rst-content .section ul li,.rst-content .toctree-wrapper ul li,.rst-content section ul li,.wy-plain-list-disc li,article ul li{list-style:disc;margin-left:24px}.rst-content .section ul li p:last-child,.rst-content .section ul li ul,.rst-content .toctree-wrapper ul li p:last-child,.rst-content .toctree-wrapper ul li ul,.rst-content section ul li p:last-child,.rst-content section ul li ul,.wy-plain-list-disc li p:last-child,.wy-plain-list-disc li ul,article ul li p:last-child,article ul li ul{margin-bottom:0}.rst-content .section ul li li,.rst-content .toctree-wrapper ul li li,.rst-content section ul li li,.wy-plain-list-disc li li,article ul li li{list-style:circle}.rst-content .section ul li li li,.rst-content .toctree-wrapper ul li li li,.rst-content section ul li li li,.wy-plain-list-disc li li li,article ul li li li{list-style:square}.rst-content .section ul li ol li,.rst-content .toctree-wrapper ul li ol li,.rst-content section ul li ol li,.wy-plain-list-disc li ol li,article ul li ol li{list-style:decimal}.rst-content .section ol,.rst-content .section ol.arabic,.rst-content .toctree-wrapper ol,.rst-content .toctree-wrapper ol.arabic,.rst-content section ol,.rst-content section ol.arabic,.wy-plain-list-decimal,article ol{list-style:decimal;line-height:24px;margin-bottom:24px}.rst-content .section ol.arabic li,.rst-content .section ol li,.rst-content .toctree-wrapper ol.arabic li,.rst-content .toctree-wrapper ol li,.rst-content section ol.arabic li,.rst-content section ol li,.wy-plain-list-decimal li,article ol li{list-style:decimal;margin-left:24px}.rst-content .section ol.arabic li ul,.rst-content .section ol li p:last-child,.rst-content .section ol li ul,.rst-content .toctree-wrapper ol.arabic li ul,.rst-content .toctree-wrapper ol li p:last-child,.rst-content .toctree-wrapper ol li ul,.rst-content section ol.arabic li ul,.rst-content section ol li p:last-child,.rst-content section ol li ul,.wy-plain-list-decimal li p:last-child,.wy-plain-list-decimal li ul,article ol li p:last-child,article ol li ul{margin-bottom:0}.rst-content .section ol.arabic li ul li,.rst-content .section ol li ul li,.rst-content .toctree-wrapper ol.arabic li ul li,.rst-content .toctree-wrapper ol li ul li,.rst-content section ol.arabic li ul li,.rst-content section ol li ul li,.wy-plain-list-decimal li ul li,article ol li ul li{list-style:disc}.wy-breadcrumbs{*zoom:1}.wy-breadcrumbs:after,.wy-breadcrumbs:before{display:table;content:""}.wy-breadcrumbs:after{clear:both}.wy-breadcrumbs>li{display:inline-block;padding-top:5px}.wy-breadcrumbs>li.wy-breadcrumbs-aside{float:right}.rst-content .wy-breadcrumbs>li code,.rst-content .wy-breadcrumbs>li tt,.wy-breadcrumbs>li .rst-content tt,.wy-breadcrumbs>li code{all:inherit;color:inherit}.breadcrumb-item:before{content:"/";color:#bbb;font-size:13px;padding:0 6px 0 3px}.wy-breadcrumbs-extra{margin-bottom:0;color:#b3b3b3;font-size:80%;display:inline-block}@media screen and (max-width:480px){.wy-breadcrumbs-extra,.wy-breadcrumbs li.wy-breadcrumbs-aside{display:none}}@media print{.wy-breadcrumbs li.wy-breadcrumbs-aside{display:none}}html{font-size:16px}.wy-affix{position:fixed;top:1.618em}.wy-menu a:hover{text-decoration:none}.wy-menu-horiz{*zoom:1}.wy-menu-horiz:after,.wy-menu-horiz:before{display:table;content:""}.wy-menu-horiz:after{clear:both}.wy-menu-horiz li,.wy-menu-horiz ul{display:inline-block}.wy-menu-horiz li:hover{background:hsla(0,0%,100%,.1)}.wy-menu-horiz li.divide-left{border-left:1px solid #404040}.wy-menu-horiz li.divide-right{border-right:1px solid #404040}.wy-menu-horiz a{height:32px;display:inline-block;line-height:32px;padding:0 16px}.wy-menu-vertical{width:300px}.wy-menu-vertical header,.wy-menu-vertical p.caption{color:#55a5d9;height:32px;line-height:32px;padding:0 1.618em;margin:12px 0 0;display:block;font-weight:700;text-transform:uppercase;font-size:85%;white-space:nowrap}.wy-menu-vertical ul{margin-bottom:0}.wy-menu-vertical li.divide-top{border-top:1px solid #404040}.wy-menu-vertical li.divide-bottom{border-bottom:1px solid #404040}.wy-menu-vertical li.current{background:#e3e3e3}.wy-menu-vertical li.current a{color:grey;border-right:1px solid #c9c9c9;padding:.4045em 2.427em}.wy-menu-vertical li.current a:hover{background:#d6d6d6}.rst-content .wy-menu-vertical li tt,.wy-menu-vertical li .rst-content tt,.wy-menu-vertical li code{border:none;background:inherit;color:inherit;padding-left:0;padding-right:0}.wy-menu-vertical li button.toctree-expand{display:block;float:left;margin-left:-1.2em;line-height:18px;color:#4d4d4d;border:none;background:none;padding:0}.wy-menu-vertical li.current>a,.wy-menu-vertical li.on a{color:#404040;font-weight:700;position:relative;background:#fcfcfc;border:none;padding:.4045em 1.618em}.wy-menu-vertical li.current>a:hover,.wy-menu-vertical li.on a:hover{background:#fcfcfc}.wy-menu-vertical li.current>a:hover button.toctree-expand,.wy-menu-vertical li.on a:hover button.toctree-expand{color:grey}.wy-menu-vertical li.current>a button.toctree-expand,.wy-menu-vertical li.on a button.toctree-expand{display:block;line-height:18px;color:#333}.wy-menu-vertical li.toctree-l1.current>a{border-bottom:1px solid #c9c9c9;border-top:1px solid #c9c9c9}.wy-menu-vertical .toctree-l1.current .toctree-l2>ul,.wy-menu-vertical .toctree-l2.current .toctree-l3>ul,.wy-menu-vertical .toctree-l3.current .toctree-l4>ul,.wy-menu-vertical .toctree-l4.current .toctree-l5>ul,.wy-menu-vertical .toctree-l5.current .toctree-l6>ul,.wy-menu-vertical .toctree-l6.current .toctree-l7>ul,.wy-menu-vertical .toctree-l7.current .toctree-l8>ul,.wy-menu-vertical .toctree-l8.current .toctree-l9>ul,.wy-menu-vertical .toctree-l9.current .toctree-l10>ul,.wy-menu-vertical .toctree-l10.current .toctree-l11>ul{display:none}.wy-menu-vertical .toctree-l1.current .current.toctree-l2>ul,.wy-menu-vertical .toctree-l2.current .current.toctree-l3>ul,.wy-menu-vertical .toctree-l3.current .current.toctree-l4>ul,.wy-menu-vertical .toctree-l4.current .current.toctree-l5>ul,.wy-menu-vertical .toctree-l5.current .current.toctree-l6>ul,.wy-menu-vertical .toctree-l6.current .current.toctree-l7>ul,.wy-menu-vertical .toctree-l7.current .current.toctree-l8>ul,.wy-menu-vertical .toctree-l8.current .current.toctree-l9>ul,.wy-menu-vertical .toctree-l9.current .current.toctree-l10>ul,.wy-menu-vertical .toctree-l10.current .current.toctree-l11>ul{display:block}.wy-menu-vertical li.toctree-l3,.wy-menu-vertical li.toctree-l4{font-size:.9em}.wy-menu-vertical li.toctree-l2 a,.wy-menu-vertical li.toctree-l3 a,.wy-menu-vertical li.toctree-l4 a,.wy-menu-vertical li.toctree-l5 a,.wy-menu-vertical li.toctree-l6 a,.wy-menu-vertical li.toctree-l7 a,.wy-menu-vertical li.toctree-l8 a,.wy-menu-vertical li.toctree-l9 a,.wy-menu-vertical li.toctree-l10 a{color:#404040}.wy-menu-vertical li.toctree-l2 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l3 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l4 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l5 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l6 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l7 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l8 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l9 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l10 a:hover button.toctree-expand{color:grey}.wy-menu-vertical li.toctree-l2.current li.toctree-l3>a,.wy-menu-vertical li.toctree-l3.current li.toctree-l4>a,.wy-menu-vertical li.toctree-l4.current li.toctree-l5>a,.wy-menu-vertical li.toctree-l5.current li.toctree-l6>a,.wy-menu-vertical li.toctree-l6.current li.toctree-l7>a,.wy-menu-vertical li.toctree-l7.current li.toctree-l8>a,.wy-menu-vertical li.toctree-l8.current li.toctree-l9>a,.wy-menu-vertical li.toctree-l9.current li.toctree-l10>a,.wy-menu-vertical li.toctree-l10.current li.toctree-l11>a{display:block}.wy-menu-vertical li.toctree-l2.current>a{padding:.4045em 2.427em}.wy-menu-vertical li.toctree-l2.current li.toctree-l3>a{padding:.4045em 1.618em .4045em 4.045em}.wy-menu-vertical li.toctree-l3.current>a{padding:.4045em 4.045em}.wy-menu-vertical li.toctree-l3.current li.toctree-l4>a{padding:.4045em 1.618em .4045em 5.663em}.wy-menu-vertical li.toctree-l4.current>a{padding:.4045em 5.663em}.wy-menu-vertical li.toctree-l4.current li.toctree-l5>a{padding:.4045em 1.618em .4045em 7.281em}.wy-menu-vertical li.toctree-l5.current>a{padding:.4045em 7.281em}.wy-menu-vertical li.toctree-l5.current li.toctree-l6>a{padding:.4045em 1.618em .4045em 8.899em}.wy-menu-vertical li.toctree-l6.current>a{padding:.4045em 8.899em}.wy-menu-vertical li.toctree-l6.current li.toctree-l7>a{padding:.4045em 1.618em .4045em 10.517em}.wy-menu-vertical li.toctree-l7.current>a{padding:.4045em 10.517em}.wy-menu-vertical li.toctree-l7.current li.toctree-l8>a{padding:.4045em 1.618em .4045em 12.135em}.wy-menu-vertical li.toctree-l8.current>a{padding:.4045em 12.135em}.wy-menu-vertical li.toctree-l8.current li.toctree-l9>a{padding:.4045em 1.618em .4045em 13.753em}.wy-menu-vertical li.toctree-l9.current>a{padding:.4045em 13.753em}.wy-menu-vertical li.toctree-l9.current li.toctree-l10>a{padding:.4045em 1.618em .4045em 15.371em}.wy-menu-vertical li.toctree-l10.current>a{padding:.4045em 15.371em}.wy-menu-vertical li.toctree-l10.current li.toctree-l11>a{padding:.4045em 1.618em .4045em 16.989em}.wy-menu-vertical li.toctree-l2.current>a,.wy-menu-vertical li.toctree-l2.current li.toctree-l3>a{background:#c9c9c9}.wy-menu-vertical li.toctree-l2 button.toctree-expand{color:#a3a3a3}.wy-menu-vertical li.toctree-l3.current>a,.wy-menu-vertical li.toctree-l3.current li.toctree-l4>a{background:#bdbdbd}.wy-menu-vertical li.toctree-l3 button.toctree-expand{color:#969696}.wy-menu-vertical li.current ul{display:block}.wy-menu-vertical li ul{margin-bottom:0;display:none}.wy-menu-vertical li ul li a{margin-bottom:0;color:#d9d9d9;font-weight:400}.wy-menu-vertical a{line-height:18px;padding:.4045em 1.618em;display:block;position:relative;font-size:90%;color:#d9d9d9}.wy-menu-vertical a:hover{background-color:#4e4a4a;cursor:pointer}.wy-menu-vertical a:hover button.toctree-expand{color:#d9d9d9}.wy-menu-vertical a:active{background-color:#2980b9;cursor:pointer;color:#fff}.wy-menu-vertical a:active button.toctree-expand{color:#fff}.wy-side-nav-search{display:block;width:300px;padding:.809em;margin-bottom:.809em;z-index:200;background-color:#2980b9;text-align:center;color:#fcfcfc}.wy-side-nav-search input[type=text]{width:100%;border-radius:50px;padding:6px 12px;border-color:#2472a4}.wy-side-nav-search img{display:block;margin:auto auto .809em;height:45px;width:45px;background-color:#2980b9;padding:5px;border-radius:100%}.wy-side-nav-search .wy-dropdown>a,.wy-side-nav-search>a{color:#fcfcfc;font-size:100%;font-weight:700;display:inline-block;padding:4px 6px;margin-bottom:.809em;max-width:100%}.wy-side-nav-search .wy-dropdown>a:hover,.wy-side-nav-search>a:hover{background:hsla(0,0%,100%,.1)}.wy-side-nav-search .wy-dropdown>a img.logo,.wy-side-nav-search>a img.logo{display:block;margin:0 auto;height:auto;width:auto;border-radius:0;max-width:100%;background:transparent}.wy-side-nav-search .wy-dropdown>a.icon img.logo,.wy-side-nav-search>a.icon img.logo{margin-top:.85em}.wy-side-nav-search>div.version{margin-top:-.4045em;margin-bottom:.809em;font-weight:400;color:hsla(0,0%,100%,.3)}.wy-nav .wy-menu-vertical header{color:#2980b9}.wy-nav .wy-menu-vertical a{color:#b3b3b3}.wy-nav .wy-menu-vertical a:hover{background-color:#2980b9;color:#fff}[data-menu-wrap]{-webkit-transition:all .2s ease-in;-moz-transition:all .2s ease-in;transition:all .2s ease-in;position:absolute;opacity:1;width:100%;opacity:0}[data-menu-wrap].move-center{left:0;right:auto;opacity:1}[data-menu-wrap].move-left{right:auto;left:-100%;opacity:0}[data-menu-wrap].move-right{right:-100%;left:auto;opacity:0}.wy-body-for-nav{background:#fcfcfc}.wy-grid-for-nav{position:absolute;width:100%;height:100%}.wy-nav-side{position:fixed;top:0;bottom:0;left:0;padding-bottom:2em;width:300px;overflow-x:hidden;overflow-y:hidden;min-height:100%;color:#9b9b9b;background:#343131;z-index:200}.wy-side-scroll{width:320px;position:relative;overflow-x:hidden;overflow-y:scroll;height:100%}.wy-nav-top{display:none;background:#2980b9;color:#fff;padding:.4045em .809em;position:relative;line-height:50px;text-align:center;font-size:100%;*zoom:1}.wy-nav-top:after,.wy-nav-top:before{display:table;content:""}.wy-nav-top:after{clear:both}.wy-nav-top a{color:#fff;font-weight:700}.wy-nav-top img{margin-right:12px;height:45px;width:45px;background-color:#2980b9;padding:5px;border-radius:100%}.wy-nav-top i{font-size:30px;float:left;cursor:pointer;padding-top:inherit}.wy-nav-content-wrap{margin-left:300px;background:#fcfcfc;min-height:100%}.wy-nav-content{padding:1.618em 3.236em;height:100%;max-width:800px;margin:auto}.wy-body-mask{position:fixed;width:100%;height:100%;background:rgba(0,0,0,.2);display:none;z-index:499}.wy-body-mask.on{display:block}footer{color:grey}footer p{margin-bottom:12px}.rst-content footer span.commit tt,footer span.commit .rst-content tt,footer span.commit code{padding:0;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;font-size:1em;background:none;border:none;color:grey}.rst-footer-buttons{*zoom:1}.rst-footer-buttons:after,.rst-footer-buttons:before{width:100%;display:table;content:""}.rst-footer-buttons:after{clear:both}.rst-breadcrumbs-buttons{margin-top:12px;*zoom:1}.rst-breadcrumbs-buttons:after,.rst-breadcrumbs-buttons:before{display:table;content:""}.rst-breadcrumbs-buttons:after{clear:both}#search-results .search li{margin-bottom:24px;border-bottom:1px solid #e1e4e5;padding-bottom:24px}#search-results .search li:first-child{border-top:1px solid #e1e4e5;padding-top:24px}#search-results .search li a{font-size:120%;margin-bottom:12px;display:inline-block}#search-results .context{color:grey;font-size:90%}.genindextable li>ul{margin-left:24px}@media screen and (max-width:768px){.wy-body-for-nav{background:#fcfcfc}.wy-nav-top{display:block}.wy-nav-side{left:-300px}.wy-nav-side.shift{width:85%;left:0}.wy-menu.wy-menu-vertical,.wy-side-nav-search,.wy-side-scroll{width:auto}.wy-nav-content-wrap{margin-left:0}.wy-nav-content-wrap .wy-nav-content{padding:1.618em}.wy-nav-content-wrap.shift{position:fixed;min-width:100%;left:85%;top:0;height:100%;overflow:hidden}}@media screen and (min-width:1100px){.wy-nav-content-wrap{background:rgba(0,0,0,.05)}.wy-nav-content{margin:0;background:#fcfcfc}}@media print{.rst-versions,.wy-nav-side,footer{display:none}.wy-nav-content-wrap{margin-left:0}}.rst-versions{position:fixed;bottom:0;left:0;width:300px;color:#fcfcfc;background:#1f1d1d;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;z-index:400}.rst-versions a{color:#2980b9;text-decoration:none}.rst-versions .rst-badge-small{display:none}.rst-versions .rst-current-version{padding:12px;background-color:#272525;display:block;text-align:right;font-size:90%;cursor:pointer;color:#27ae60;*zoom:1}.rst-versions .rst-current-version:after,.rst-versions .rst-current-version:before{display:table;content:""}.rst-versions .rst-current-version:after{clear:both}.rst-content .code-block-caption .rst-versions .rst-current-version .headerlink,.rst-content .eqno .rst-versions .rst-current-version .headerlink,.rst-content .rst-versions .rst-current-version .admonition-title,.rst-content code.download .rst-versions .rst-current-version span:first-child,.rst-content dl dt .rst-versions .rst-current-version .headerlink,.rst-content h1 .rst-versions .rst-current-version .headerlink,.rst-content h2 .rst-versions .rst-current-version .headerlink,.rst-content h3 .rst-versions .rst-current-version .headerlink,.rst-content h4 .rst-versions .rst-current-version .headerlink,.rst-content h5 .rst-versions .rst-current-version .headerlink,.rst-content h6 .rst-versions .rst-current-version .headerlink,.rst-content p .rst-versions .rst-current-version .headerlink,.rst-content table>caption .rst-versions .rst-current-version .headerlink,.rst-content tt.download .rst-versions .rst-current-version span:first-child,.rst-versions .rst-current-version .fa,.rst-versions .rst-current-version .icon,.rst-versions .rst-current-version .rst-content .admonition-title,.rst-versions .rst-current-version .rst-content .code-block-caption .headerlink,.rst-versions .rst-current-version .rst-content .eqno .headerlink,.rst-versions .rst-current-version .rst-content code.download span:first-child,.rst-versions .rst-current-version .rst-content dl dt .headerlink,.rst-versions .rst-current-version .rst-content h1 .headerlink,.rst-versions .rst-current-version .rst-content h2 .headerlink,.rst-versions .rst-current-version .rst-content h3 .headerlink,.rst-versions .rst-current-version .rst-content h4 .headerlink,.rst-versions .rst-current-version .rst-content h5 .headerlink,.rst-versions .rst-current-version .rst-content h6 .headerlink,.rst-versions .rst-current-version .rst-content p .headerlink,.rst-versions .rst-current-version .rst-content table>caption .headerlink,.rst-versions .rst-current-version .rst-content tt.download span:first-child,.rst-versions .rst-current-version .wy-menu-vertical li button.toctree-expand,.wy-menu-vertical li .rst-versions .rst-current-version button.toctree-expand{color:#fcfcfc}.rst-versions .rst-current-version .fa-book,.rst-versions .rst-current-version .icon-book{float:left}.rst-versions .rst-current-version.rst-out-of-date{background-color:#e74c3c;color:#fff}.rst-versions .rst-current-version.rst-active-old-version{background-color:#f1c40f;color:#000}.rst-versions.shift-up{height:auto;max-height:100%;overflow-y:scroll}.rst-versions.shift-up .rst-other-versions{display:block}.rst-versions .rst-other-versions{font-size:90%;padding:12px;color:grey;display:none}.rst-versions .rst-other-versions hr{display:block;height:1px;border:0;margin:20px 0;padding:0;border-top:1px solid #413d3d}.rst-versions .rst-other-versions dd{display:inline-block;margin:0}.rst-versions .rst-other-versions dd a{display:inline-block;padding:6px;color:#fcfcfc}.rst-versions.rst-badge{width:auto;bottom:20px;right:20px;left:auto;border:none;max-width:300px;max-height:90%}.rst-versions.rst-badge .fa-book,.rst-versions.rst-badge .icon-book{float:none;line-height:30px}.rst-versions.rst-badge.shift-up .rst-current-version{text-align:right}.rst-versions.rst-badge.shift-up .rst-current-version .fa-book,.rst-versions.rst-badge.shift-up .rst-current-version .icon-book{float:left}.rst-versions.rst-badge>.rst-current-version{width:auto;height:30px;line-height:30px;padding:0 6px;display:block;text-align:center}@media screen and (max-width:768px){.rst-versions{width:85%;display:none}.rst-versions.shift{display:block}}.rst-content .toctree-wrapper>p.caption,.rst-content h1,.rst-content h2,.rst-content h3,.rst-content h4,.rst-content h5,.rst-content h6{margin-bottom:24px}.rst-content img{max-width:100%;height:auto}.rst-content div.figure,.rst-content figure{margin-bottom:24px}.rst-content div.figure .caption-text,.rst-content figure .caption-text{font-style:italic}.rst-content div.figure p:last-child.caption,.rst-content figure p:last-child.caption{margin-bottom:0}.rst-content div.figure.align-center,.rst-content figure.align-center{text-align:center}.rst-content .section>a>img,.rst-content .section>img,.rst-content section>a>img,.rst-content section>img{margin-bottom:24px}.rst-content abbr[title]{text-decoration:none}.rst-content.style-external-links a.reference.external:after{font-family:FontAwesome;content:"\f08e";color:#b3b3b3;vertical-align:super;font-size:60%;margin:0 .2em}.rst-content blockquote{margin-left:24px;line-height:24px;margin-bottom:24px}.rst-content pre.literal-block{white-space:pre;margin:0;padding:12px;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;display:block;overflow:auto}.rst-content div[class^=highlight],.rst-content pre.literal-block{border:1px solid #e1e4e5;overflow-x:auto;margin:1px 0 24px}.rst-content div[class^=highlight] div[class^=highlight],.rst-content pre.literal-block div[class^=highlight]{padding:0;border:none;margin:0}.rst-content div[class^=highlight] td.code{width:100%}.rst-content .linenodiv pre{border-right:1px solid #e6e9ea;margin:0;padding:12px;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;user-select:none;pointer-events:none}.rst-content div[class^=highlight] pre{white-space:pre;margin:0;padding:12px;display:block;overflow:auto}.rst-content div[class^=highlight] pre .hll{display:block;margin:0 -12px;padding:0 12px}.rst-content .linenodiv pre,.rst-content div[class^=highlight] pre,.rst-content pre.literal-block{font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;font-size:12px;line-height:1.4}.rst-content div.highlight .gp,.rst-content div.highlight span.linenos{user-select:none;pointer-events:none}.rst-content div.highlight span.linenos{display:inline-block;padding-left:0;padding-right:12px;margin-right:12px;border-right:1px solid #e6e9ea}.rst-content .code-block-caption{font-style:italic;font-size:85%;line-height:1;padding:1em 0;text-align:center}@media print{.rst-content .codeblock,.rst-content div[class^=highlight],.rst-content div[class^=highlight] pre{white-space:pre-wrap}}.rst-content .admonition,.rst-content .admonition-todo,.rst-content .attention,.rst-content .caution,.rst-content .danger,.rst-content .error,.rst-content .hint,.rst-content .important,.rst-content .note,.rst-content .seealso,.rst-content .tip,.rst-content .warning{clear:both}.rst-content .admonition-todo .last,.rst-content .admonition-todo>:last-child,.rst-content .admonition .last,.rst-content .admonition>:last-child,.rst-content .attention .last,.rst-content .attention>:last-child,.rst-content .caution .last,.rst-content .caution>:last-child,.rst-content .danger .last,.rst-content .danger>:last-child,.rst-content .error .last,.rst-content .error>:last-child,.rst-content .hint .last,.rst-content .hint>:last-child,.rst-content .important .last,.rst-content .important>:last-child,.rst-content .note .last,.rst-content .note>:last-child,.rst-content .seealso .last,.rst-content .seealso>:last-child,.rst-content .tip .last,.rst-content .tip>:last-child,.rst-content .warning .last,.rst-content .warning>:last-child{margin-bottom:0}.rst-content .admonition-title:before{margin-right:4px}.rst-content .admonition table{border-color:rgba(0,0,0,.1)}.rst-content .admonition table td,.rst-content .admonition table th{background:transparent!important;border-color:rgba(0,0,0,.1)!important}.rst-content .section ol.loweralpha,.rst-content .section ol.loweralpha>li,.rst-content .toctree-wrapper ol.loweralpha,.rst-content .toctree-wrapper ol.loweralpha>li,.rst-content section ol.loweralpha,.rst-content section ol.loweralpha>li{list-style:lower-alpha}.rst-content .section ol.upperalpha,.rst-content .section ol.upperalpha>li,.rst-content .toctree-wrapper ol.upperalpha,.rst-content .toctree-wrapper ol.upperalpha>li,.rst-content section ol.upperalpha,.rst-content section ol.upperalpha>li{list-style:upper-alpha}.rst-content .section ol li>*,.rst-content .section ul li>*,.rst-content .toctree-wrapper ol li>*,.rst-content .toctree-wrapper ul li>*,.rst-content section ol li>*,.rst-content section ul li>*{margin-top:12px;margin-bottom:12px}.rst-content .section ol li>:first-child,.rst-content .section ul li>:first-child,.rst-content .toctree-wrapper ol li>:first-child,.rst-content .toctree-wrapper ul li>:first-child,.rst-content section ol li>:first-child,.rst-content section ul li>:first-child{margin-top:0}.rst-content .section ol li>p,.rst-content .section ol li>p:last-child,.rst-content .section ul li>p,.rst-content .section ul li>p:last-child,.rst-content .toctree-wrapper ol li>p,.rst-content .toctree-wrapper ol li>p:last-child,.rst-content .toctree-wrapper ul li>p,.rst-content .toctree-wrapper ul li>p:last-child,.rst-content section ol li>p,.rst-content section ol li>p:last-child,.rst-content section ul li>p,.rst-content section ul li>p:last-child{margin-bottom:12px}.rst-content .section ol li>p:only-child,.rst-content .section ol li>p:only-child:last-child,.rst-content .section ul li>p:only-child,.rst-content .section ul li>p:only-child:last-child,.rst-content .toctree-wrapper ol li>p:only-child,.rst-content .toctree-wrapper ol li>p:only-child:last-child,.rst-content .toctree-wrapper ul li>p:only-child,.rst-content .toctree-wrapper ul li>p:only-child:last-child,.rst-content section ol li>p:only-child,.rst-content section ol li>p:only-child:last-child,.rst-content section ul li>p:only-child,.rst-content section ul li>p:only-child:last-child{margin-bottom:0}.rst-content .section ol li>ol,.rst-content .section ol li>ul,.rst-content .section ul li>ol,.rst-content .section ul li>ul,.rst-content .toctree-wrapper ol li>ol,.rst-content .toctree-wrapper ol li>ul,.rst-content .toctree-wrapper ul li>ol,.rst-content .toctree-wrapper ul li>ul,.rst-content section ol li>ol,.rst-content section ol li>ul,.rst-content section ul li>ol,.rst-content section ul li>ul{margin-bottom:12px}.rst-content .section ol.simple li>*,.rst-content .section ol.simple li ol,.rst-content .section ol.simple li ul,.rst-content .section ul.simple li>*,.rst-content .section ul.simple li ol,.rst-content .section ul.simple li ul,.rst-content .toctree-wrapper ol.simple li>*,.rst-content .toctree-wrapper ol.simple li ol,.rst-content .toctree-wrapper ol.simple li ul,.rst-content .toctree-wrapper ul.simple li>*,.rst-content .toctree-wrapper ul.simple li ol,.rst-content .toctree-wrapper ul.simple li ul,.rst-content section ol.simple li>*,.rst-content section ol.simple li ol,.rst-content section ol.simple li ul,.rst-content section ul.simple li>*,.rst-content section ul.simple li ol,.rst-content section ul.simple li ul{margin-top:0;margin-bottom:0}.rst-content .line-block{margin-left:0;margin-bottom:24px;line-height:24px}.rst-content .line-block .line-block{margin-left:24px;margin-bottom:0}.rst-content .topic-title{font-weight:700;margin-bottom:12px}.rst-content .toc-backref{color:#404040}.rst-content .align-right{float:right;margin:0 0 24px 24px}.rst-content .align-left{float:left;margin:0 24px 24px 0}.rst-content .align-center{margin:auto}.rst-content .align-center:not(table){display:block}.rst-content .code-block-caption .headerlink,.rst-content .eqno .headerlink,.rst-content .toctree-wrapper>p.caption .headerlink,.rst-content dl dt .headerlink,.rst-content h1 .headerlink,.rst-content h2 .headerlink,.rst-content h3 .headerlink,.rst-content h4 .headerlink,.rst-content h5 .headerlink,.rst-content h6 .headerlink,.rst-content p.caption .headerlink,.rst-content p .headerlink,.rst-content table>caption .headerlink{opacity:0;font-size:14px;font-family:FontAwesome;margin-left:.5em}.rst-content .code-block-caption .headerlink:focus,.rst-content .code-block-caption:hover .headerlink,.rst-content .eqno .headerlink:focus,.rst-content .eqno:hover .headerlink,.rst-content .toctree-wrapper>p.caption .headerlink:focus,.rst-content .toctree-wrapper>p.caption:hover .headerlink,.rst-content dl dt .headerlink:focus,.rst-content dl dt:hover .headerlink,.rst-content h1 .headerlink:focus,.rst-content h1:hover .headerlink,.rst-content h2 .headerlink:focus,.rst-content h2:hover .headerlink,.rst-content h3 .headerlink:focus,.rst-content h3:hover .headerlink,.rst-content h4 .headerlink:focus,.rst-content h4:hover .headerlink,.rst-content h5 .headerlink:focus,.rst-content h5:hover .headerlink,.rst-content h6 .headerlink:focus,.rst-content h6:hover .headerlink,.rst-content p.caption .headerlink:focus,.rst-content p.caption:hover .headerlink,.rst-content p .headerlink:focus,.rst-content p:hover .headerlink,.rst-content table>caption .headerlink:focus,.rst-content table>caption:hover .headerlink{opacity:1}.rst-content p a{overflow-wrap:anywhere}.rst-content .wy-table td p,.rst-content .wy-table td ul,.rst-content .wy-table th p,.rst-content .wy-table th ul,.rst-content table.docutils td p,.rst-content table.docutils td ul,.rst-content table.docutils th p,.rst-content table.docutils th ul,.rst-content table.field-list td p,.rst-content table.field-list td ul,.rst-content table.field-list th p,.rst-content table.field-list th ul{font-size:inherit}.rst-content .btn:focus{outline:2px solid}.rst-content table>caption .headerlink:after{font-size:12px}.rst-content .centered{text-align:center}.rst-content .sidebar{float:right;width:40%;display:block;margin:0 0 24px 24px;padding:24px;background:#f3f6f6;border:1px solid #e1e4e5}.rst-content .sidebar dl,.rst-content .sidebar p,.rst-content .sidebar ul{font-size:90%}.rst-content .sidebar .last,.rst-content .sidebar>:last-child{margin-bottom:0}.rst-content .sidebar .sidebar-title{display:block;font-family:Roboto Slab,ff-tisa-web-pro,Georgia,Arial,sans-serif;font-weight:700;background:#e1e4e5;padding:6px 12px;margin:-24px -24px 24px;font-size:100%}.rst-content .highlighted{background:#f1c40f;box-shadow:0 0 0 2px #f1c40f;display:inline;font-weight:700}.rst-content .citation-reference,.rst-content .footnote-reference{vertical-align:baseline;position:relative;top:-.4em;line-height:0;font-size:90%}.rst-content .citation-reference>span.fn-bracket,.rst-content .footnote-reference>span.fn-bracket{display:none}.rst-content .hlist{width:100%}.rst-content dl dt span.classifier:before{content:" : "}.rst-content dl dt span.classifier-delimiter{display:none!important}html.writer-html4 .rst-content table.docutils.citation,html.writer-html4 .rst-content table.docutils.footnote{background:none;border:none}html.writer-html4 .rst-content table.docutils.citation td,html.writer-html4 .rst-content table.docutils.citation tr,html.writer-html4 .rst-content table.docutils.footnote td,html.writer-html4 .rst-content table.docutils.footnote tr{border:none;background-color:transparent!important;white-space:normal}html.writer-html4 .rst-content table.docutils.citation td.label,html.writer-html4 .rst-content table.docutils.footnote td.label{padding-left:0;padding-right:0;vertical-align:top}html.writer-html5 .rst-content dl.citation,html.writer-html5 .rst-content dl.field-list,html.writer-html5 .rst-content dl.footnote{display:grid;grid-template-columns:auto minmax(80%,95%)}html.writer-html5 .rst-content dl.citation>dt,html.writer-html5 .rst-content dl.field-list>dt,html.writer-html5 .rst-content dl.footnote>dt{display:inline-grid;grid-template-columns:max-content auto}html.writer-html5 .rst-content aside.citation,html.writer-html5 .rst-content aside.footnote,html.writer-html5 .rst-content div.citation{display:grid;grid-template-columns:auto auto minmax(.65rem,auto) minmax(40%,95%)}html.writer-html5 .rst-content aside.citation>span.label,html.writer-html5 .rst-content aside.footnote>span.label,html.writer-html5 .rst-content div.citation>span.label{grid-column-start:1;grid-column-end:2}html.writer-html5 .rst-content aside.citation>span.backrefs,html.writer-html5 .rst-content aside.footnote>span.backrefs,html.writer-html5 .rst-content div.citation>span.backrefs{grid-column-start:2;grid-column-end:3;grid-row-start:1;grid-row-end:3}html.writer-html5 .rst-content aside.citation>p,html.writer-html5 .rst-content aside.footnote>p,html.writer-html5 .rst-content div.citation>p{grid-column-start:4;grid-column-end:5}html.writer-html5 .rst-content dl.citation,html.writer-html5 .rst-content dl.field-list,html.writer-html5 .rst-content dl.footnote{margin-bottom:24px}html.writer-html5 .rst-content dl.citation>dt,html.writer-html5 .rst-content dl.field-list>dt,html.writer-html5 .rst-content dl.footnote>dt{padding-left:1rem}html.writer-html5 .rst-content dl.citation>dd,html.writer-html5 .rst-content dl.citation>dt,html.writer-html5 .rst-content dl.field-list>dd,html.writer-html5 .rst-content dl.field-list>dt,html.writer-html5 .rst-content dl.footnote>dd,html.writer-html5 .rst-content dl.footnote>dt{margin-bottom:0}html.writer-html5 .rst-content dl.citation,html.writer-html5 .rst-content dl.footnote{font-size:.9rem}html.writer-html5 .rst-content dl.citation>dt,html.writer-html5 .rst-content dl.footnote>dt{margin:0 .5rem .5rem 0;line-height:1.2rem;word-break:break-all;font-weight:400}html.writer-html5 .rst-content dl.citation>dt>span.brackets:before,html.writer-html5 .rst-content dl.footnote>dt>span.brackets:before{content:"["}html.writer-html5 .rst-content dl.citation>dt>span.brackets:after,html.writer-html5 .rst-content dl.footnote>dt>span.brackets:after{content:"]"}html.writer-html5 .rst-content dl.citation>dt>span.fn-backref,html.writer-html5 .rst-content dl.footnote>dt>span.fn-backref{text-align:left;font-style:italic;margin-left:.65rem;word-break:break-word;word-spacing:-.1rem;max-width:5rem}html.writer-html5 .rst-content dl.citation>dt>span.fn-backref>a,html.writer-html5 .rst-content dl.footnote>dt>span.fn-backref>a{word-break:keep-all}html.writer-html5 .rst-content dl.citation>dt>span.fn-backref>a:not(:first-child):before,html.writer-html5 .rst-content dl.footnote>dt>span.fn-backref>a:not(:first-child):before{content:" "}html.writer-html5 .rst-content dl.citation>dd,html.writer-html5 .rst-content dl.footnote>dd{margin:0 0 .5rem;line-height:1.2rem}html.writer-html5 .rst-content dl.citation>dd p,html.writer-html5 .rst-content dl.footnote>dd p{font-size:.9rem}html.writer-html5 .rst-content aside.citation,html.writer-html5 .rst-content aside.footnote,html.writer-html5 .rst-content div.citation{padding-left:1rem;padding-right:1rem;font-size:.9rem;line-height:1.2rem}html.writer-html5 .rst-content aside.citation p,html.writer-html5 .rst-content aside.footnote p,html.writer-html5 .rst-content div.citation p{font-size:.9rem;line-height:1.2rem;margin-bottom:12px}html.writer-html5 .rst-content aside.citation span.backrefs,html.writer-html5 .rst-content aside.footnote span.backrefs,html.writer-html5 .rst-content div.citation span.backrefs{text-align:left;font-style:italic;margin-left:.65rem;word-break:break-word;word-spacing:-.1rem;max-width:5rem}html.writer-html5 .rst-content aside.citation span.backrefs>a,html.writer-html5 .rst-content aside.footnote span.backrefs>a,html.writer-html5 .rst-content div.citation span.backrefs>a{word-break:keep-all}html.writer-html5 .rst-content aside.citation span.backrefs>a:not(:first-child):before,html.writer-html5 .rst-content aside.footnote span.backrefs>a:not(:first-child):before,html.writer-html5 .rst-content div.citation span.backrefs>a:not(:first-child):before{content:" "}html.writer-html5 .rst-content aside.citation span.label,html.writer-html5 .rst-content aside.footnote span.label,html.writer-html5 .rst-content div.citation span.label{line-height:1.2rem}html.writer-html5 .rst-content aside.citation-list,html.writer-html5 .rst-content aside.footnote-list,html.writer-html5 .rst-content div.citation-list{margin-bottom:24px}html.writer-html5 .rst-content dl.option-list kbd{font-size:.9rem}.rst-content table.docutils.footnote,html.writer-html4 .rst-content table.docutils.citation,html.writer-html5 .rst-content aside.footnote,html.writer-html5 .rst-content aside.footnote-list aside.footnote,html.writer-html5 .rst-content div.citation-list>div.citation,html.writer-html5 .rst-content dl.citation,html.writer-html5 .rst-content dl.footnote{color:grey}.rst-content table.docutils.footnote code,.rst-content table.docutils.footnote tt,html.writer-html4 .rst-content table.docutils.citation code,html.writer-html4 .rst-content table.docutils.citation tt,html.writer-html5 .rst-content aside.footnote-list aside.footnote code,html.writer-html5 .rst-content aside.footnote-list aside.footnote tt,html.writer-html5 .rst-content aside.footnote code,html.writer-html5 .rst-content aside.footnote tt,html.writer-html5 .rst-content div.citation-list>div.citation code,html.writer-html5 .rst-content div.citation-list>div.citation tt,html.writer-html5 .rst-content dl.citation code,html.writer-html5 .rst-content dl.citation tt,html.writer-html5 .rst-content dl.footnote code,html.writer-html5 .rst-content dl.footnote tt{color:#555}.rst-content .wy-table-responsive.citation,.rst-content .wy-table-responsive.footnote{margin-bottom:0}.rst-content .wy-table-responsive.citation+:not(.citation),.rst-content .wy-table-responsive.footnote+:not(.footnote){margin-top:24px}.rst-content .wy-table-responsive.citation:last-child,.rst-content .wy-table-responsive.footnote:last-child{margin-bottom:24px}.rst-content table.docutils th{border-color:#e1e4e5}html.writer-html5 .rst-content table.docutils th{border:1px solid #e1e4e5}html.writer-html5 .rst-content table.docutils td>p,html.writer-html5 .rst-content table.docutils th>p{line-height:1rem;margin-bottom:0;font-size:.9rem}.rst-content table.docutils td .last,.rst-content table.docutils td .last>:last-child{margin-bottom:0}.rst-content table.field-list,.rst-content table.field-list td{border:none}.rst-content table.field-list td p{line-height:inherit}.rst-content table.field-list td>strong{display:inline-block}.rst-content table.field-list .field-name{padding-right:10px;text-align:left;white-space:nowrap}.rst-content table.field-list .field-body{text-align:left}.rst-content code,.rst-content tt{color:#000;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;padding:2px 5px}.rst-content code big,.rst-content code em,.rst-content tt big,.rst-content tt em{font-size:100%!important;line-height:normal}.rst-content code.literal,.rst-content tt.literal{color:#e74c3c;white-space:normal}.rst-content code.xref,.rst-content tt.xref,a .rst-content code,a .rst-content tt{font-weight:700;color:#404040;overflow-wrap:normal}.rst-content kbd,.rst-content pre,.rst-content samp{font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace}.rst-content a code,.rst-content a tt{color:#2980b9}.rst-content dl{margin-bottom:24px}.rst-content dl dt{font-weight:700;margin-bottom:12px}.rst-content dl ol,.rst-content dl p,.rst-content dl table,.rst-content dl ul{margin-bottom:12px}.rst-content dl dd{margin:0 0 12px 24px;line-height:24px}.rst-content dl dd>ol:last-child,.rst-content dl dd>p:last-child,.rst-content dl dd>table:last-child,.rst-content dl dd>ul:last-child{margin-bottom:0}html.writer-html4 .rst-content dl:not(.docutils),html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple){margin-bottom:24px}html.writer-html4 .rst-content dl:not(.docutils)>dt,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt{display:table;margin:6px 0;font-size:90%;line-height:normal;background:#e7f2fa;color:#2980b9;border-top:3px solid #6ab0de;padding:6px;position:relative}html.writer-html4 .rst-content dl:not(.docutils)>dt:before,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt:before{color:#6ab0de}html.writer-html4 .rst-content dl:not(.docutils)>dt .headerlink,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt .headerlink{color:#404040;font-size:100%!important}html.writer-html4 .rst-content dl:not(.docutils) dl:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) dl:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt{margin-bottom:6px;border:none;border-left:3px solid #ccc;background:#f0f0f0;color:#555}html.writer-html4 .rst-content dl:not(.docutils) dl:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt .headerlink,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) dl:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt .headerlink{color:#404040;font-size:100%!important}html.writer-html4 .rst-content dl:not(.docutils)>dt:first-child,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt:first-child{margin-top:0}html.writer-html4 .rst-content dl:not(.docutils) code.descclassname,html.writer-html4 .rst-content dl:not(.docutils) code.descname,html.writer-html4 .rst-content dl:not(.docutils) tt.descclassname,html.writer-html4 .rst-content dl:not(.docutils) tt.descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) code.descclassname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) code.descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) tt.descclassname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) tt.descname{background-color:transparent;border:none;padding:0;font-size:100%!important}html.writer-html4 .rst-content dl:not(.docutils) code.descname,html.writer-html4 .rst-content dl:not(.docutils) tt.descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) code.descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) tt.descname{font-weight:700}html.writer-html4 .rst-content dl:not(.docutils) .optional,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .optional{display:inline-block;padding:0 4px;color:#000;font-weight:700}html.writer-html4 .rst-content dl:not(.docutils) .property,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .property{display:inline-block;padding-right:8px;max-width:100%}html.writer-html4 .rst-content dl:not(.docutils) .k,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .k{font-style:italic}html.writer-html4 .rst-content dl:not(.docutils) .descclassname,html.writer-html4 .rst-content dl:not(.docutils) .descname,html.writer-html4 .rst-content dl:not(.docutils) .sig-name,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .descclassname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .sig-name{font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;color:#000}.rst-content .viewcode-back,.rst-content .viewcode-link{display:inline-block;color:#27ae60;font-size:80%;padding-left:24px}.rst-content .viewcode-back{display:block;float:right}.rst-content p.rubric{margin-bottom:12px;font-weight:700}.rst-content code.download,.rst-content tt.download{background:inherit;padding:inherit;font-weight:400;font-family:inherit;font-size:inherit;color:inherit;border:inherit;white-space:inherit}.rst-content code.download span:first-child,.rst-content tt.download span:first-child{-webkit-font-smoothing:subpixel-antialiased}.rst-content code.download span:first-child:before,.rst-content tt.download span:first-child:before{margin-right:4px}.rst-content .guilabel,.rst-content .menuselection{font-size:80%;font-weight:700;border-radius:4px;padding:2.4px 6px;margin:auto 2px}.rst-content .guilabel,.rst-content .menuselection{border:1px solid #7fbbe3;background:#e7f2fa}.rst-content :not(dl.option-list)>:not(dt):not(kbd):not(.kbd)>.kbd,.rst-content :not(dl.option-list)>:not(dt):not(kbd):not(.kbd)>kbd{color:inherit;font-size:80%;background-color:#fff;border:1px solid #a6a6a6;border-radius:4px;box-shadow:0 2px grey;padding:2.4px 6px;margin:auto 0}.rst-content .versionmodified{font-style:italic}@media screen and (max-width:480px){.rst-content .sidebar{width:100%}}span[id*=MathJax-Span]{color:#404040}.math{text-align:center}@font-face{font-family:Lato;src:url(fonts/lato-normal.woff2?bd03a2cc277bbbc338d464e679fe9942) format("woff2"),url(fonts/lato-normal.woff?27bd77b9162d388cb8d4c4217c7c5e2a) format("woff");font-weight:400;font-style:normal;font-display:block}@font-face{font-family:Lato;src:url(fonts/lato-bold.woff2?cccb897485813c7c256901dbca54ecf2) format("woff2"),url(fonts/lato-bold.woff?d878b6c29b10beca227e9eef4246111b) format("woff");font-weight:700;font-style:normal;font-display:block}@font-face{font-family:Lato;src:url(fonts/lato-bold-italic.woff2?0b6bb6725576b072c5d0b02ecdd1900d) format("woff2"),url(fonts/lato-bold-italic.woff?9c7e4e9eb485b4a121c760e61bc3707c) format("woff");font-weight:700;font-style:italic;font-display:block}@font-face{font-family:Lato;src:url(fonts/lato-normal-italic.woff2?4eb103b4d12be57cb1d040ed5e162e9d) format("woff2"),url(fonts/lato-normal-italic.woff?f28f2d6482446544ef1ea1ccc6dd5892) format("woff");font-weight:400;font-style:italic;font-display:block}@font-face{font-family:Roboto Slab;font-style:normal;font-weight:400;src:url(fonts/Roboto-Slab-Regular.woff2?7abf5b8d04d26a2cafea937019bca958) format("woff2"),url(fonts/Roboto-Slab-Regular.woff?c1be9284088d487c5e3ff0a10a92e58c) format("woff");font-display:block}@font-face{font-family:Roboto Slab;font-style:normal;font-weight:700;src:url(fonts/Roboto-Slab-Bold.woff2?9984f4a9bda09be08e83f2506954adbe) format("woff2"),url(fonts/Roboto-Slab-Bold.woff?bed5564a116b05148e3b3bea6fb1162a) format("woff");font-display:block} \ No newline at end of file diff --git a/develop/_static/debug.js b/develop/_static/debug.js new file mode 100644 index 00000000..338b2413 --- /dev/null +++ b/develop/_static/debug.js @@ -0,0 +1,7 @@ +// Add debug actions to flyout menu + +$(function () { + $("[data-toggle='rst-debug-badge']").on("click", function () { + $("[data-toggle='rst-versions']").toggleClass("rst-badge"); + }); +}) diff --git a/develop/_static/doctools.js b/develop/_static/doctools.js new file mode 100644 index 00000000..d06a71d7 --- /dev/null +++ b/develop/_static/doctools.js @@ -0,0 +1,156 @@ +/* + * doctools.js + * ~~~~~~~~~~~ + * + * Base JavaScript utilities for all Sphinx HTML documentation. + * + * :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ +"use strict"; + +const BLACKLISTED_KEY_CONTROL_ELEMENTS = new Set([ + "TEXTAREA", + "INPUT", + "SELECT", + "BUTTON", +]); + +const _ready = (callback) => { + if (document.readyState !== "loading") { + callback(); + } else { + document.addEventListener("DOMContentLoaded", callback); + } +}; + +/** + * Small JavaScript module for the documentation. + */ +const Documentation = { + init: () => { + Documentation.initDomainIndexTable(); + Documentation.initOnKeyListeners(); + }, + + /** + * i18n support + */ + TRANSLATIONS: {}, + PLURAL_EXPR: (n) => (n === 1 ? 0 : 1), + LOCALE: "unknown", + + // gettext and ngettext don't access this so that the functions + // can safely bound to a different name (_ = Documentation.gettext) + gettext: (string) => { + const translated = Documentation.TRANSLATIONS[string]; + switch (typeof translated) { + case "undefined": + return string; // no translation + case "string": + return translated; // translation exists + default: + return translated[0]; // (singular, plural) translation tuple exists + } + }, + + ngettext: (singular, plural, n) => { + const translated = Documentation.TRANSLATIONS[singular]; + if (typeof translated !== "undefined") + return translated[Documentation.PLURAL_EXPR(n)]; + return n === 1 ? singular : plural; + }, + + addTranslations: (catalog) => { + Object.assign(Documentation.TRANSLATIONS, catalog.messages); + Documentation.PLURAL_EXPR = new Function( + "n", + `return (${catalog.plural_expr})` + ); + Documentation.LOCALE = catalog.locale; + }, + + /** + * helper function to focus on search bar + */ + focusSearchBar: () => { + document.querySelectorAll("input[name=q]")[0]?.focus(); + }, + + /** + * Initialise the domain index toggle buttons + */ + initDomainIndexTable: () => { + const toggler = (el) => { + const idNumber = el.id.substr(7); + const toggledRows = document.querySelectorAll(`tr.cg-${idNumber}`); + if (el.src.substr(-9) === "minus.png") { + el.src = `${el.src.substr(0, el.src.length - 9)}plus.png`; + toggledRows.forEach((el) => (el.style.display = "none")); + } else { + el.src = `${el.src.substr(0, el.src.length - 8)}minus.png`; + toggledRows.forEach((el) => (el.style.display = "")); + } + }; + + const togglerElements = document.querySelectorAll("img.toggler"); + togglerElements.forEach((el) => + el.addEventListener("click", (event) => toggler(event.currentTarget)) + ); + togglerElements.forEach((el) => (el.style.display = "")); + if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) togglerElements.forEach(toggler); + }, + + initOnKeyListeners: () => { + // only install a listener if it is really needed + if ( + !DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS && + !DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS + ) + return; + + document.addEventListener("keydown", (event) => { + // bail for input elements + if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return; + // bail with special keys + if (event.altKey || event.ctrlKey || event.metaKey) return; + + if (!event.shiftKey) { + switch (event.key) { + case "ArrowLeft": + if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break; + + const prevLink = document.querySelector('link[rel="prev"]'); + if (prevLink && prevLink.href) { + window.location.href = prevLink.href; + event.preventDefault(); + } + break; + case "ArrowRight": + if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break; + + const nextLink = document.querySelector('link[rel="next"]'); + if (nextLink && nextLink.href) { + window.location.href = nextLink.href; + event.preventDefault(); + } + break; + } + } + + // some keyboard layouts may need Shift to get / + switch (event.key) { + case "/": + if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) break; + Documentation.focusSearchBar(); + event.preventDefault(); + } + }); + }, +}; + +// quick alias for translations +const _ = Documentation.gettext; + +_ready(Documentation.init); diff --git a/develop/_static/documentation_options.js b/develop/_static/documentation_options.js new file mode 100644 index 00000000..dd6cb24b --- /dev/null +++ b/develop/_static/documentation_options.js @@ -0,0 +1,14 @@ +var DOCUMENTATION_OPTIONS = { + URL_ROOT: document.getElementById("documentation_options").getAttribute('data-url_root'), + VERSION: '0.0.0', + LANGUAGE: 'en', + COLLAPSE_INDEX: false, + BUILDER: 'html', + FILE_SUFFIX: '.html', + LINK_SUFFIX: '.html', + HAS_SOURCE: false, + SOURCELINK_SUFFIX: '.txt', + NAVIGATION_WITH_KEYS: false, + SHOW_SEARCH_SUMMARY: true, + ENABLE_SEARCH_SHORTCUTS: true, +}; \ No newline at end of file diff --git a/develop/_static/file.png b/develop/_static/file.png new file mode 100644 index 00000000..a858a410 Binary files /dev/null and b/develop/_static/file.png differ diff --git a/develop/_static/jquery.js b/develop/_static/jquery.js new file mode 100644 index 00000000..c4c6022f --- /dev/null +++ b/develop/_static/jquery.js @@ -0,0 +1,2 @@ +/*! jQuery v3.6.0 | (c) OpenJS Foundation and other contributors | jquery.org/license */ +!function(e,t){"use strict";"object"==typeof module&&"object"==typeof module.exports?module.exports=e.document?t(e,!0):function(e){if(!e.document)throw new Error("jQuery requires a window with a document");return t(e)}:t(e)}("undefined"!=typeof window?window:this,function(C,e){"use strict";var t=[],r=Object.getPrototypeOf,s=t.slice,g=t.flat?function(e){return t.flat.call(e)}:function(e){return t.concat.apply([],e)},u=t.push,i=t.indexOf,n={},o=n.toString,v=n.hasOwnProperty,a=v.toString,l=a.call(Object),y={},m=function(e){return"function"==typeof e&&"number"!=typeof e.nodeType&&"function"!=typeof e.item},x=function(e){return null!=e&&e===e.window},E=C.document,c={type:!0,src:!0,nonce:!0,noModule:!0};function b(e,t,n){var r,i,o=(n=n||E).createElement("script");if(o.text=e,t)for(r in c)(i=t[r]||t.getAttribute&&t.getAttribute(r))&&o.setAttribute(r,i);n.head.appendChild(o).parentNode.removeChild(o)}function w(e){return null==e?e+"":"object"==typeof e||"function"==typeof e?n[o.call(e)]||"object":typeof e}var f="3.6.0",S=function(e,t){return new S.fn.init(e,t)};function p(e){var t=!!e&&"length"in e&&e.length,n=w(e);return!m(e)&&!x(e)&&("array"===n||0===t||"number"==typeof t&&0+~]|"+M+")"+M+"*"),U=new RegExp(M+"|>"),X=new RegExp(F),V=new RegExp("^"+I+"$"),G={ID:new RegExp("^#("+I+")"),CLASS:new RegExp("^\\.("+I+")"),TAG:new RegExp("^("+I+"|[*])"),ATTR:new RegExp("^"+W),PSEUDO:new RegExp("^"+F),CHILD:new RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\("+M+"*(even|odd|(([+-]|)(\\d*)n|)"+M+"*(?:([+-]|)"+M+"*(\\d+)|))"+M+"*\\)|)","i"),bool:new RegExp("^(?:"+R+")$","i"),needsContext:new RegExp("^"+M+"*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\("+M+"*((?:-\\d)?\\d*)"+M+"*\\)|)(?=[^-]|$)","i")},Y=/HTML$/i,Q=/^(?:input|select|textarea|button)$/i,J=/^h\d$/i,K=/^[^{]+\{\s*\[native \w/,Z=/^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,ee=/[+~]/,te=new RegExp("\\\\[\\da-fA-F]{1,6}"+M+"?|\\\\([^\\r\\n\\f])","g"),ne=function(e,t){var n="0x"+e.slice(1)-65536;return t||(n<0?String.fromCharCode(n+65536):String.fromCharCode(n>>10|55296,1023&n|56320))},re=/([\0-\x1f\x7f]|^-?\d)|^-$|[^\0-\x1f\x7f-\uFFFF\w-]/g,ie=function(e,t){return t?"\0"===e?"\ufffd":e.slice(0,-1)+"\\"+e.charCodeAt(e.length-1).toString(16)+" ":"\\"+e},oe=function(){T()},ae=be(function(e){return!0===e.disabled&&"fieldset"===e.nodeName.toLowerCase()},{dir:"parentNode",next:"legend"});try{H.apply(t=O.call(p.childNodes),p.childNodes),t[p.childNodes.length].nodeType}catch(e){H={apply:t.length?function(e,t){L.apply(e,O.call(t))}:function(e,t){var n=e.length,r=0;while(e[n++]=t[r++]);e.length=n-1}}}function se(t,e,n,r){var i,o,a,s,u,l,c,f=e&&e.ownerDocument,p=e?e.nodeType:9;if(n=n||[],"string"!=typeof t||!t||1!==p&&9!==p&&11!==p)return n;if(!r&&(T(e),e=e||C,E)){if(11!==p&&(u=Z.exec(t)))if(i=u[1]){if(9===p){if(!(a=e.getElementById(i)))return n;if(a.id===i)return n.push(a),n}else if(f&&(a=f.getElementById(i))&&y(e,a)&&a.id===i)return n.push(a),n}else{if(u[2])return H.apply(n,e.getElementsByTagName(t)),n;if((i=u[3])&&d.getElementsByClassName&&e.getElementsByClassName)return H.apply(n,e.getElementsByClassName(i)),n}if(d.qsa&&!N[t+" "]&&(!v||!v.test(t))&&(1!==p||"object"!==e.nodeName.toLowerCase())){if(c=t,f=e,1===p&&(U.test(t)||z.test(t))){(f=ee.test(t)&&ye(e.parentNode)||e)===e&&d.scope||((s=e.getAttribute("id"))?s=s.replace(re,ie):e.setAttribute("id",s=S)),o=(l=h(t)).length;while(o--)l[o]=(s?"#"+s:":scope")+" "+xe(l[o]);c=l.join(",")}try{return H.apply(n,f.querySelectorAll(c)),n}catch(e){N(t,!0)}finally{s===S&&e.removeAttribute("id")}}}return g(t.replace($,"$1"),e,n,r)}function ue(){var r=[];return function e(t,n){return r.push(t+" ")>b.cacheLength&&delete e[r.shift()],e[t+" "]=n}}function le(e){return e[S]=!0,e}function ce(e){var t=C.createElement("fieldset");try{return!!e(t)}catch(e){return!1}finally{t.parentNode&&t.parentNode.removeChild(t),t=null}}function fe(e,t){var n=e.split("|"),r=n.length;while(r--)b.attrHandle[n[r]]=t}function pe(e,t){var n=t&&e,r=n&&1===e.nodeType&&1===t.nodeType&&e.sourceIndex-t.sourceIndex;if(r)return r;if(n)while(n=n.nextSibling)if(n===t)return-1;return e?1:-1}function de(t){return function(e){return"input"===e.nodeName.toLowerCase()&&e.type===t}}function he(n){return function(e){var t=e.nodeName.toLowerCase();return("input"===t||"button"===t)&&e.type===n}}function ge(t){return function(e){return"form"in e?e.parentNode&&!1===e.disabled?"label"in e?"label"in e.parentNode?e.parentNode.disabled===t:e.disabled===t:e.isDisabled===t||e.isDisabled!==!t&&ae(e)===t:e.disabled===t:"label"in e&&e.disabled===t}}function ve(a){return le(function(o){return o=+o,le(function(e,t){var n,r=a([],e.length,o),i=r.length;while(i--)e[n=r[i]]&&(e[n]=!(t[n]=e[n]))})})}function ye(e){return e&&"undefined"!=typeof e.getElementsByTagName&&e}for(e in d=se.support={},i=se.isXML=function(e){var t=e&&e.namespaceURI,n=e&&(e.ownerDocument||e).documentElement;return!Y.test(t||n&&n.nodeName||"HTML")},T=se.setDocument=function(e){var t,n,r=e?e.ownerDocument||e:p;return r!=C&&9===r.nodeType&&r.documentElement&&(a=(C=r).documentElement,E=!i(C),p!=C&&(n=C.defaultView)&&n.top!==n&&(n.addEventListener?n.addEventListener("unload",oe,!1):n.attachEvent&&n.attachEvent("onunload",oe)),d.scope=ce(function(e){return a.appendChild(e).appendChild(C.createElement("div")),"undefined"!=typeof e.querySelectorAll&&!e.querySelectorAll(":scope fieldset div").length}),d.attributes=ce(function(e){return e.className="i",!e.getAttribute("className")}),d.getElementsByTagName=ce(function(e){return e.appendChild(C.createComment("")),!e.getElementsByTagName("*").length}),d.getElementsByClassName=K.test(C.getElementsByClassName),d.getById=ce(function(e){return a.appendChild(e).id=S,!C.getElementsByName||!C.getElementsByName(S).length}),d.getById?(b.filter.ID=function(e){var t=e.replace(te,ne);return function(e){return e.getAttribute("id")===t}},b.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&E){var n=t.getElementById(e);return n?[n]:[]}}):(b.filter.ID=function(e){var n=e.replace(te,ne);return function(e){var t="undefined"!=typeof e.getAttributeNode&&e.getAttributeNode("id");return t&&t.value===n}},b.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&E){var n,r,i,o=t.getElementById(e);if(o){if((n=o.getAttributeNode("id"))&&n.value===e)return[o];i=t.getElementsByName(e),r=0;while(o=i[r++])if((n=o.getAttributeNode("id"))&&n.value===e)return[o]}return[]}}),b.find.TAG=d.getElementsByTagName?function(e,t){return"undefined"!=typeof t.getElementsByTagName?t.getElementsByTagName(e):d.qsa?t.querySelectorAll(e):void 0}:function(e,t){var n,r=[],i=0,o=t.getElementsByTagName(e);if("*"===e){while(n=o[i++])1===n.nodeType&&r.push(n);return r}return o},b.find.CLASS=d.getElementsByClassName&&function(e,t){if("undefined"!=typeof t.getElementsByClassName&&E)return t.getElementsByClassName(e)},s=[],v=[],(d.qsa=K.test(C.querySelectorAll))&&(ce(function(e){var t;a.appendChild(e).innerHTML="",e.querySelectorAll("[msallowcapture^='']").length&&v.push("[*^$]="+M+"*(?:''|\"\")"),e.querySelectorAll("[selected]").length||v.push("\\["+M+"*(?:value|"+R+")"),e.querySelectorAll("[id~="+S+"-]").length||v.push("~="),(t=C.createElement("input")).setAttribute("name",""),e.appendChild(t),e.querySelectorAll("[name='']").length||v.push("\\["+M+"*name"+M+"*="+M+"*(?:''|\"\")"),e.querySelectorAll(":checked").length||v.push(":checked"),e.querySelectorAll("a#"+S+"+*").length||v.push(".#.+[+~]"),e.querySelectorAll("\\\f"),v.push("[\\r\\n\\f]")}),ce(function(e){e.innerHTML="";var t=C.createElement("input");t.setAttribute("type","hidden"),e.appendChild(t).setAttribute("name","D"),e.querySelectorAll("[name=d]").length&&v.push("name"+M+"*[*^$|!~]?="),2!==e.querySelectorAll(":enabled").length&&v.push(":enabled",":disabled"),a.appendChild(e).disabled=!0,2!==e.querySelectorAll(":disabled").length&&v.push(":enabled",":disabled"),e.querySelectorAll("*,:x"),v.push(",.*:")})),(d.matchesSelector=K.test(c=a.matches||a.webkitMatchesSelector||a.mozMatchesSelector||a.oMatchesSelector||a.msMatchesSelector))&&ce(function(e){d.disconnectedMatch=c.call(e,"*"),c.call(e,"[s!='']:x"),s.push("!=",F)}),v=v.length&&new RegExp(v.join("|")),s=s.length&&new RegExp(s.join("|")),t=K.test(a.compareDocumentPosition),y=t||K.test(a.contains)?function(e,t){var n=9===e.nodeType?e.documentElement:e,r=t&&t.parentNode;return e===r||!(!r||1!==r.nodeType||!(n.contains?n.contains(r):e.compareDocumentPosition&&16&e.compareDocumentPosition(r)))}:function(e,t){if(t)while(t=t.parentNode)if(t===e)return!0;return!1},j=t?function(e,t){if(e===t)return l=!0,0;var n=!e.compareDocumentPosition-!t.compareDocumentPosition;return n||(1&(n=(e.ownerDocument||e)==(t.ownerDocument||t)?e.compareDocumentPosition(t):1)||!d.sortDetached&&t.compareDocumentPosition(e)===n?e==C||e.ownerDocument==p&&y(p,e)?-1:t==C||t.ownerDocument==p&&y(p,t)?1:u?P(u,e)-P(u,t):0:4&n?-1:1)}:function(e,t){if(e===t)return l=!0,0;var n,r=0,i=e.parentNode,o=t.parentNode,a=[e],s=[t];if(!i||!o)return e==C?-1:t==C?1:i?-1:o?1:u?P(u,e)-P(u,t):0;if(i===o)return pe(e,t);n=e;while(n=n.parentNode)a.unshift(n);n=t;while(n=n.parentNode)s.unshift(n);while(a[r]===s[r])r++;return r?pe(a[r],s[r]):a[r]==p?-1:s[r]==p?1:0}),C},se.matches=function(e,t){return se(e,null,null,t)},se.matchesSelector=function(e,t){if(T(e),d.matchesSelector&&E&&!N[t+" "]&&(!s||!s.test(t))&&(!v||!v.test(t)))try{var n=c.call(e,t);if(n||d.disconnectedMatch||e.document&&11!==e.document.nodeType)return n}catch(e){N(t,!0)}return 0":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(e){return e[1]=e[1].replace(te,ne),e[3]=(e[3]||e[4]||e[5]||"").replace(te,ne),"~="===e[2]&&(e[3]=" "+e[3]+" "),e.slice(0,4)},CHILD:function(e){return e[1]=e[1].toLowerCase(),"nth"===e[1].slice(0,3)?(e[3]||se.error(e[0]),e[4]=+(e[4]?e[5]+(e[6]||1):2*("even"===e[3]||"odd"===e[3])),e[5]=+(e[7]+e[8]||"odd"===e[3])):e[3]&&se.error(e[0]),e},PSEUDO:function(e){var t,n=!e[6]&&e[2];return G.CHILD.test(e[0])?null:(e[3]?e[2]=e[4]||e[5]||"":n&&X.test(n)&&(t=h(n,!0))&&(t=n.indexOf(")",n.length-t)-n.length)&&(e[0]=e[0].slice(0,t),e[2]=n.slice(0,t)),e.slice(0,3))}},filter:{TAG:function(e){var t=e.replace(te,ne).toLowerCase();return"*"===e?function(){return!0}:function(e){return e.nodeName&&e.nodeName.toLowerCase()===t}},CLASS:function(e){var t=m[e+" "];return t||(t=new RegExp("(^|"+M+")"+e+"("+M+"|$)"))&&m(e,function(e){return t.test("string"==typeof e.className&&e.className||"undefined"!=typeof e.getAttribute&&e.getAttribute("class")||"")})},ATTR:function(n,r,i){return function(e){var t=se.attr(e,n);return null==t?"!="===r:!r||(t+="","="===r?t===i:"!="===r?t!==i:"^="===r?i&&0===t.indexOf(i):"*="===r?i&&-1:\x20\t\r\n\f]*)[\x20\t\r\n\f]*\/?>(?:<\/\1>|)$/i;function j(e,n,r){return m(n)?S.grep(e,function(e,t){return!!n.call(e,t,e)!==r}):n.nodeType?S.grep(e,function(e){return e===n!==r}):"string"!=typeof n?S.grep(e,function(e){return-1)[^>]*|#([\w-]+))$/;(S.fn.init=function(e,t,n){var r,i;if(!e)return this;if(n=n||D,"string"==typeof e){if(!(r="<"===e[0]&&">"===e[e.length-1]&&3<=e.length?[null,e,null]:q.exec(e))||!r[1]&&t)return!t||t.jquery?(t||n).find(e):this.constructor(t).find(e);if(r[1]){if(t=t instanceof S?t[0]:t,S.merge(this,S.parseHTML(r[1],t&&t.nodeType?t.ownerDocument||t:E,!0)),N.test(r[1])&&S.isPlainObject(t))for(r in t)m(this[r])?this[r](t[r]):this.attr(r,t[r]);return this}return(i=E.getElementById(r[2]))&&(this[0]=i,this.length=1),this}return e.nodeType?(this[0]=e,this.length=1,this):m(e)?void 0!==n.ready?n.ready(e):e(S):S.makeArray(e,this)}).prototype=S.fn,D=S(E);var L=/^(?:parents|prev(?:Until|All))/,H={children:!0,contents:!0,next:!0,prev:!0};function O(e,t){while((e=e[t])&&1!==e.nodeType);return e}S.fn.extend({has:function(e){var t=S(e,this),n=t.length;return this.filter(function(){for(var e=0;e\x20\t\r\n\f]*)/i,he=/^$|^module$|\/(?:java|ecma)script/i;ce=E.createDocumentFragment().appendChild(E.createElement("div")),(fe=E.createElement("input")).setAttribute("type","radio"),fe.setAttribute("checked","checked"),fe.setAttribute("name","t"),ce.appendChild(fe),y.checkClone=ce.cloneNode(!0).cloneNode(!0).lastChild.checked,ce.innerHTML="",y.noCloneChecked=!!ce.cloneNode(!0).lastChild.defaultValue,ce.innerHTML="",y.option=!!ce.lastChild;var ge={thead:[1,"","
"],col:[2,"","
"],tr:[2,"","
"],td:[3,"","
"],_default:[0,"",""]};function ve(e,t){var n;return n="undefined"!=typeof e.getElementsByTagName?e.getElementsByTagName(t||"*"):"undefined"!=typeof e.querySelectorAll?e.querySelectorAll(t||"*"):[],void 0===t||t&&A(e,t)?S.merge([e],n):n}function ye(e,t){for(var n=0,r=e.length;n",""]);var me=/<|&#?\w+;/;function xe(e,t,n,r,i){for(var o,a,s,u,l,c,f=t.createDocumentFragment(),p=[],d=0,h=e.length;d\s*$/g;function je(e,t){return A(e,"table")&&A(11!==t.nodeType?t:t.firstChild,"tr")&&S(e).children("tbody")[0]||e}function De(e){return e.type=(null!==e.getAttribute("type"))+"/"+e.type,e}function qe(e){return"true/"===(e.type||"").slice(0,5)?e.type=e.type.slice(5):e.removeAttribute("type"),e}function Le(e,t){var n,r,i,o,a,s;if(1===t.nodeType){if(Y.hasData(e)&&(s=Y.get(e).events))for(i in Y.remove(t,"handle events"),s)for(n=0,r=s[i].length;n").attr(n.scriptAttrs||{}).prop({charset:n.scriptCharset,src:n.url}).on("load error",i=function(e){r.remove(),i=null,e&&t("error"===e.type?404:200,e.type)}),E.head.appendChild(r[0])},abort:function(){i&&i()}}});var _t,zt=[],Ut=/(=)\?(?=&|$)|\?\?/;S.ajaxSetup({jsonp:"callback",jsonpCallback:function(){var e=zt.pop()||S.expando+"_"+wt.guid++;return this[e]=!0,e}}),S.ajaxPrefilter("json jsonp",function(e,t,n){var r,i,o,a=!1!==e.jsonp&&(Ut.test(e.url)?"url":"string"==typeof e.data&&0===(e.contentType||"").indexOf("application/x-www-form-urlencoded")&&Ut.test(e.data)&&"data");if(a||"jsonp"===e.dataTypes[0])return r=e.jsonpCallback=m(e.jsonpCallback)?e.jsonpCallback():e.jsonpCallback,a?e[a]=e[a].replace(Ut,"$1"+r):!1!==e.jsonp&&(e.url+=(Tt.test(e.url)?"&":"?")+e.jsonp+"="+r),e.converters["script json"]=function(){return o||S.error(r+" was not called"),o[0]},e.dataTypes[0]="json",i=C[r],C[r]=function(){o=arguments},n.always(function(){void 0===i?S(C).removeProp(r):C[r]=i,e[r]&&(e.jsonpCallback=t.jsonpCallback,zt.push(r)),o&&m(i)&&i(o[0]),o=i=void 0}),"script"}),y.createHTMLDocument=((_t=E.implementation.createHTMLDocument("").body).innerHTML="
",2===_t.childNodes.length),S.parseHTML=function(e,t,n){return"string"!=typeof e?[]:("boolean"==typeof t&&(n=t,t=!1),t||(y.createHTMLDocument?((r=(t=E.implementation.createHTMLDocument("")).createElement("base")).href=E.location.href,t.head.appendChild(r)):t=E),o=!n&&[],(i=N.exec(e))?[t.createElement(i[1])]:(i=xe([e],t,o),o&&o.length&&S(o).remove(),S.merge([],i.childNodes)));var r,i,o},S.fn.load=function(e,t,n){var r,i,o,a=this,s=e.indexOf(" ");return-1").append(S.parseHTML(e)).find(r):e)}).always(n&&function(e,t){a.each(function(){n.apply(this,o||[e.responseText,t,e])})}),this},S.expr.pseudos.animated=function(t){return S.grep(S.timers,function(e){return t===e.elem}).length},S.offset={setOffset:function(e,t,n){var r,i,o,a,s,u,l=S.css(e,"position"),c=S(e),f={};"static"===l&&(e.style.position="relative"),s=c.offset(),o=S.css(e,"top"),u=S.css(e,"left"),("absolute"===l||"fixed"===l)&&-1<(o+u).indexOf("auto")?(a=(r=c.position()).top,i=r.left):(a=parseFloat(o)||0,i=parseFloat(u)||0),m(t)&&(t=t.call(e,n,S.extend({},s))),null!=t.top&&(f.top=t.top-s.top+a),null!=t.left&&(f.left=t.left-s.left+i),"using"in t?t.using.call(e,f):c.css(f)}},S.fn.extend({offset:function(t){if(arguments.length)return void 0===t?this:this.each(function(e){S.offset.setOffset(this,t,e)});var e,n,r=this[0];return r?r.getClientRects().length?(e=r.getBoundingClientRect(),n=r.ownerDocument.defaultView,{top:e.top+n.pageYOffset,left:e.left+n.pageXOffset}):{top:0,left:0}:void 0},position:function(){if(this[0]){var e,t,n,r=this[0],i={top:0,left:0};if("fixed"===S.css(r,"position"))t=r.getBoundingClientRect();else{t=this.offset(),n=r.ownerDocument,e=r.offsetParent||n.documentElement;while(e&&(e===n.body||e===n.documentElement)&&"static"===S.css(e,"position"))e=e.parentNode;e&&e!==r&&1===e.nodeType&&((i=S(e).offset()).top+=S.css(e,"borderTopWidth",!0),i.left+=S.css(e,"borderLeftWidth",!0))}return{top:t.top-i.top-S.css(r,"marginTop",!0),left:t.left-i.left-S.css(r,"marginLeft",!0)}}},offsetParent:function(){return this.map(function(){var e=this.offsetParent;while(e&&"static"===S.css(e,"position"))e=e.offsetParent;return e||re})}}),S.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(t,i){var o="pageYOffset"===i;S.fn[t]=function(e){return $(this,function(e,t,n){var r;if(x(e)?r=e:9===e.nodeType&&(r=e.defaultView),void 0===n)return r?r[i]:e[t];r?r.scrollTo(o?r.pageXOffset:n,o?n:r.pageYOffset):e[t]=n},t,e,arguments.length)}}),S.each(["top","left"],function(e,n){S.cssHooks[n]=Fe(y.pixelPosition,function(e,t){if(t)return t=We(e,n),Pe.test(t)?S(e).position()[n]+"px":t})}),S.each({Height:"height",Width:"width"},function(a,s){S.each({padding:"inner"+a,content:s,"":"outer"+a},function(r,o){S.fn[o]=function(e,t){var n=arguments.length&&(r||"boolean"!=typeof e),i=r||(!0===e||!0===t?"margin":"border");return $(this,function(e,t,n){var r;return x(e)?0===o.indexOf("outer")?e["inner"+a]:e.document.documentElement["client"+a]:9===e.nodeType?(r=e.documentElement,Math.max(e.body["scroll"+a],r["scroll"+a],e.body["offset"+a],r["offset"+a],r["client"+a])):void 0===n?S.css(e,t,i):S.style(e,t,n,i)},s,n?e:void 0,n)}})}),S.each(["ajaxStart","ajaxStop","ajaxComplete","ajaxError","ajaxSuccess","ajaxSend"],function(e,t){S.fn[t]=function(e){return this.on(t,e)}}),S.fn.extend({bind:function(e,t,n){return this.on(e,null,t,n)},unbind:function(e,t){return this.off(e,null,t)},delegate:function(e,t,n,r){return this.on(t,e,n,r)},undelegate:function(e,t,n){return 1===arguments.length?this.off(e,"**"):this.off(t,e||"**",n)},hover:function(e,t){return this.mouseenter(e).mouseleave(t||e)}}),S.each("blur focus focusin focusout resize scroll click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup contextmenu".split(" "),function(e,n){S.fn[n]=function(e,t){return 0",d.insertBefore(c.lastChild,d.firstChild)}function d(){var a=y.elements;return"string"==typeof a?a.split(" "):a}function e(a,b){var c=y.elements;"string"!=typeof c&&(c=c.join(" ")),"string"!=typeof a&&(a=a.join(" ")),y.elements=c+" "+a,j(b)}function f(a){var b=x[a[v]];return b||(b={},w++,a[v]=w,x[w]=b),b}function g(a,c,d){if(c||(c=b),q)return c.createElement(a);d||(d=f(c));var e;return e=d.cache[a]?d.cache[a].cloneNode():u.test(a)?(d.cache[a]=d.createElem(a)).cloneNode():d.createElem(a),!e.canHaveChildren||t.test(a)||e.tagUrn?e:d.frag.appendChild(e)}function h(a,c){if(a||(a=b),q)return a.createDocumentFragment();c=c||f(a);for(var e=c.frag.cloneNode(),g=0,h=d(),i=h.length;i>g;g++)e.createElement(h[g]);return e}function i(a,b){b.cache||(b.cache={},b.createElem=a.createElement,b.createFrag=a.createDocumentFragment,b.frag=b.createFrag()),a.createElement=function(c){return y.shivMethods?g(c,a,b):b.createElem(c)},a.createDocumentFragment=Function("h,f","return function(){var n=f.cloneNode(),c=n.createElement;h.shivMethods&&("+d().join().replace(/[\w\-:]+/g,function(a){return b.createElem(a),b.frag.createElement(a),'c("'+a+'")'})+");return n}")(y,b.frag)}function j(a){a||(a=b);var d=f(a);return!y.shivCSS||p||d.hasCSS||(d.hasCSS=!!c(a,"article,aside,dialog,figcaption,figure,footer,header,hgroup,main,nav,section{display:block}mark{background:#FF0;color:#000}template{display:none}")),q||i(a,d),a}function k(a){for(var b,c=a.getElementsByTagName("*"),e=c.length,f=RegExp("^(?:"+d().join("|")+")$","i"),g=[];e--;)b=c[e],f.test(b.nodeName)&&g.push(b.applyElement(l(b)));return g}function l(a){for(var b,c=a.attributes,d=c.length,e=a.ownerDocument.createElement(A+":"+a.nodeName);d--;)b=c[d],b.specified&&e.setAttribute(b.nodeName,b.nodeValue);return e.style.cssText=a.style.cssText,e}function m(a){for(var b,c=a.split("{"),e=c.length,f=RegExp("(^|[\\s,>+~])("+d().join("|")+")(?=[[\\s,>+~#.:]|$)","gi"),g="$1"+A+"\\:$2";e--;)b=c[e]=c[e].split("}"),b[b.length-1]=b[b.length-1].replace(f,g),c[e]=b.join("}");return c.join("{")}function n(a){for(var b=a.length;b--;)a[b].removeNode()}function o(a){function b(){clearTimeout(g._removeSheetTimer),d&&d.removeNode(!0),d=null}var d,e,g=f(a),h=a.namespaces,i=a.parentWindow;return!B||a.printShived?a:("undefined"==typeof h[A]&&h.add(A),i.attachEvent("onbeforeprint",function(){b();for(var f,g,h,i=a.styleSheets,j=[],l=i.length,n=Array(l);l--;)n[l]=i[l];for(;h=n.pop();)if(!h.disabled&&z.test(h.media)){try{f=h.imports,g=f.length}catch(o){g=0}for(l=0;g>l;l++)n.push(f[l]);try{j.push(h.cssText)}catch(o){}}j=m(j.reverse().join("")),e=k(a),d=c(a,j)}),i.attachEvent("onafterprint",function(){n(e),clearTimeout(g._removeSheetTimer),g._removeSheetTimer=setTimeout(b,500)}),a.printShived=!0,a)}var p,q,r="3.7.3",s=a.html5||{},t=/^<|^(?:button|map|select|textarea|object|iframe|option|optgroup)$/i,u=/^(?:a|b|code|div|fieldset|h1|h2|h3|h4|h5|h6|i|label|li|ol|p|q|span|strong|style|table|tbody|td|th|tr|ul)$/i,v="_html5shiv",w=0,x={};!function(){try{var a=b.createElement("a");a.innerHTML="",p="hidden"in a,q=1==a.childNodes.length||function(){b.createElement("a");var a=b.createDocumentFragment();return"undefined"==typeof a.cloneNode||"undefined"==typeof a.createDocumentFragment||"undefined"==typeof a.createElement}()}catch(c){p=!0,q=!0}}();var y={elements:s.elements||"abbr article aside audio bdi canvas data datalist details dialog figcaption figure footer header hgroup main mark meter nav output picture progress section summary template time video",version:r,shivCSS:s.shivCSS!==!1,supportsUnknownElements:q,shivMethods:s.shivMethods!==!1,type:"default",shivDocument:j,createElement:g,createDocumentFragment:h,addElements:e};a.html5=y,j(b);var z=/^$|\b(?:all|print)\b/,A="html5shiv",B=!q&&function(){var c=b.documentElement;return!("undefined"==typeof b.namespaces||"undefined"==typeof b.parentWindow||"undefined"==typeof c.applyElement||"undefined"==typeof c.removeNode||"undefined"==typeof a.attachEvent)}();y.type+=" print",y.shivPrint=o,o(b),"object"==typeof module&&module.exports&&(module.exports=y)}("undefined"!=typeof window?window:this,document); \ No newline at end of file diff --git a/develop/_static/js/html5shiv.min.js b/develop/_static/js/html5shiv.min.js new file mode 100644 index 00000000..cd1c674f --- /dev/null +++ b/develop/_static/js/html5shiv.min.js @@ -0,0 +1,4 @@ +/** +* @preserve HTML5 Shiv 3.7.3 | @afarkas @jdalton @jon_neal @rem | MIT/GPL2 Licensed +*/ +!function(a,b){function c(a,b){var c=a.createElement("p"),d=a.getElementsByTagName("head")[0]||a.documentElement;return c.innerHTML="x",d.insertBefore(c.lastChild,d.firstChild)}function d(){var a=t.elements;return"string"==typeof a?a.split(" "):a}function e(a,b){var c=t.elements;"string"!=typeof c&&(c=c.join(" ")),"string"!=typeof a&&(a=a.join(" ")),t.elements=c+" "+a,j(b)}function f(a){var b=s[a[q]];return b||(b={},r++,a[q]=r,s[r]=b),b}function g(a,c,d){if(c||(c=b),l)return c.createElement(a);d||(d=f(c));var e;return e=d.cache[a]?d.cache[a].cloneNode():p.test(a)?(d.cache[a]=d.createElem(a)).cloneNode():d.createElem(a),!e.canHaveChildren||o.test(a)||e.tagUrn?e:d.frag.appendChild(e)}function h(a,c){if(a||(a=b),l)return a.createDocumentFragment();c=c||f(a);for(var e=c.frag.cloneNode(),g=0,h=d(),i=h.length;i>g;g++)e.createElement(h[g]);return e}function i(a,b){b.cache||(b.cache={},b.createElem=a.createElement,b.createFrag=a.createDocumentFragment,b.frag=b.createFrag()),a.createElement=function(c){return t.shivMethods?g(c,a,b):b.createElem(c)},a.createDocumentFragment=Function("h,f","return function(){var n=f.cloneNode(),c=n.createElement;h.shivMethods&&("+d().join().replace(/[\w\-:]+/g,function(a){return b.createElem(a),b.frag.createElement(a),'c("'+a+'")'})+");return n}")(t,b.frag)}function j(a){a||(a=b);var d=f(a);return!t.shivCSS||k||d.hasCSS||(d.hasCSS=!!c(a,"article,aside,dialog,figcaption,figure,footer,header,hgroup,main,nav,section{display:block}mark{background:#FF0;color:#000}template{display:none}")),l||i(a,d),a}var k,l,m="3.7.3-pre",n=a.html5||{},o=/^<|^(?:button|map|select|textarea|object|iframe|option|optgroup)$/i,p=/^(?:a|b|code|div|fieldset|h1|h2|h3|h4|h5|h6|i|label|li|ol|p|q|span|strong|style|table|tbody|td|th|tr|ul)$/i,q="_html5shiv",r=0,s={};!function(){try{var a=b.createElement("a");a.innerHTML="",k="hidden"in a,l=1==a.childNodes.length||function(){b.createElement("a");var a=b.createDocumentFragment();return"undefined"==typeof a.cloneNode||"undefined"==typeof a.createDocumentFragment||"undefined"==typeof a.createElement}()}catch(c){k=!0,l=!0}}();var t={elements:n.elements||"abbr article aside audio bdi canvas data datalist details dialog figcaption figure footer header hgroup main mark meter nav output picture progress section summary template time video",version:m,shivCSS:n.shivCSS!==!1,supportsUnknownElements:l,shivMethods:n.shivMethods!==!1,type:"default",shivDocument:j,createElement:g,createDocumentFragment:h,addElements:e};a.html5=t,j(b),"object"==typeof module&&module.exports&&(module.exports=t)}("undefined"!=typeof window?window:this,document); \ No newline at end of file diff --git a/develop/_static/js/theme.js b/develop/_static/js/theme.js new file mode 100644 index 00000000..1fddb6ee --- /dev/null +++ b/develop/_static/js/theme.js @@ -0,0 +1 @@ +!function(n){var e={};function t(i){if(e[i])return e[i].exports;var o=e[i]={i:i,l:!1,exports:{}};return n[i].call(o.exports,o,o.exports,t),o.l=!0,o.exports}t.m=n,t.c=e,t.d=function(n,e,i){t.o(n,e)||Object.defineProperty(n,e,{enumerable:!0,get:i})},t.r=function(n){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(n,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(n,"__esModule",{value:!0})},t.t=function(n,e){if(1&e&&(n=t(n)),8&e)return n;if(4&e&&"object"==typeof n&&n&&n.__esModule)return n;var i=Object.create(null);if(t.r(i),Object.defineProperty(i,"default",{enumerable:!0,value:n}),2&e&&"string"!=typeof n)for(var o in n)t.d(i,o,function(e){return n[e]}.bind(null,o));return i},t.n=function(n){var e=n&&n.__esModule?function(){return n.default}:function(){return n};return t.d(e,"a",e),e},t.o=function(n,e){return Object.prototype.hasOwnProperty.call(n,e)},t.p="",t(t.s=0)}([function(n,e,t){t(1),n.exports=t(3)},function(n,e,t){(function(){var e="undefined"!=typeof window?window.jQuery:t(2);n.exports.ThemeNav={navBar:null,win:null,winScroll:!1,winResize:!1,linkScroll:!1,winPosition:0,winHeight:null,docHeight:null,isRunning:!1,enable:function(n){var t=this;void 0===n&&(n=!0),t.isRunning||(t.isRunning=!0,e((function(e){t.init(e),t.reset(),t.win.on("hashchange",t.reset),n&&t.win.on("scroll",(function(){t.linkScroll||t.winScroll||(t.winScroll=!0,requestAnimationFrame((function(){t.onScroll()})))})),t.win.on("resize",(function(){t.winResize||(t.winResize=!0,requestAnimationFrame((function(){t.onResize()})))})),t.onResize()})))},enableSticky:function(){this.enable(!0)},init:function(n){n(document);var e=this;this.navBar=n("div.wy-side-scroll:first"),this.win=n(window),n(document).on("click","[data-toggle='wy-nav-top']",(function(){n("[data-toggle='wy-nav-shift']").toggleClass("shift"),n("[data-toggle='rst-versions']").toggleClass("shift")})).on("click",".wy-menu-vertical .current ul li a",(function(){var t=n(this);n("[data-toggle='wy-nav-shift']").removeClass("shift"),n("[data-toggle='rst-versions']").toggleClass("shift"),e.toggleCurrent(t),e.hashChange()})).on("click","[data-toggle='rst-current-version']",(function(){n("[data-toggle='rst-versions']").toggleClass("shift-up")})),n("table.docutils:not(.field-list,.footnote,.citation)").wrap("
"),n("table.docutils.footnote").wrap("
"),n("table.docutils.citation").wrap("
"),n(".wy-menu-vertical ul").not(".simple").siblings("a").each((function(){var t=n(this);expand=n(''),expand.on("click",(function(n){return e.toggleCurrent(t),n.stopPropagation(),!1})),t.prepend(expand)}))},reset:function(){var n=encodeURI(window.location.hash)||"#";try{var e=$(".wy-menu-vertical"),t=e.find('[href="'+n+'"]');if(0===t.length){var i=$('.document [id="'+n.substring(1)+'"]').closest("div.section");0===(t=e.find('[href="#'+i.attr("id")+'"]')).length&&(t=e.find('[href="#"]'))}if(t.length>0){$(".wy-menu-vertical .current").removeClass("current").attr("aria-expanded","false"),t.addClass("current").attr("aria-expanded","true"),t.closest("li.toctree-l1").parent().addClass("current").attr("aria-expanded","true");for(let n=1;n<=10;n++)t.closest("li.toctree-l"+n).addClass("current").attr("aria-expanded","true");t[0].scrollIntoView()}}catch(n){console.log("Error expanding nav for anchor",n)}},onScroll:function(){this.winScroll=!1;var n=this.win.scrollTop(),e=n+this.winHeight,t=this.navBar.scrollTop()+(n-this.winPosition);n<0||e>this.docHeight||(this.navBar.scrollTop(t),this.winPosition=n)},onResize:function(){this.winResize=!1,this.winHeight=this.win.height(),this.docHeight=$(document).height()},hashChange:function(){this.linkScroll=!0,this.win.one("hashchange",(function(){this.linkScroll=!1}))},toggleCurrent:function(n){var e=n.closest("li");e.siblings("li.current").removeClass("current").attr("aria-expanded","false"),e.siblings().find("li.current").removeClass("current").attr("aria-expanded","false");var t=e.find("> ul li");t.length&&(t.removeClass("current").attr("aria-expanded","false"),e.toggleClass("current").attr("aria-expanded",(function(n,e){return"true"==e?"false":"true"})))}},"undefined"!=typeof window&&(window.SphinxRtdTheme={Navigation:n.exports.ThemeNav,StickyNav:n.exports.ThemeNav}),function(){for(var n=0,e=["ms","moz","webkit","o"],t=0;t + +launchlaunchlitelite \ No newline at end of file diff --git a/develop/_static/language_data.js b/develop/_static/language_data.js new file mode 100644 index 00000000..250f5665 --- /dev/null +++ b/develop/_static/language_data.js @@ -0,0 +1,199 @@ +/* + * language_data.js + * ~~~~~~~~~~~~~~~~ + * + * This script contains the language-specific data used by searchtools.js, + * namely the list of stopwords, stemmer, scorer and splitter. + * + * :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +var stopwords = ["a", "and", "are", "as", "at", "be", "but", "by", "for", "if", "in", "into", "is", "it", "near", "no", "not", "of", "on", "or", "such", "that", "the", "their", "then", "there", "these", "they", "this", "to", "was", "will", "with"]; + + +/* Non-minified version is copied as a separate JS file, is available */ + +/** + * Porter Stemmer + */ +var Stemmer = function() { + + var step2list = { + ational: 'ate', + tional: 'tion', + enci: 'ence', + anci: 'ance', + izer: 'ize', + bli: 'ble', + alli: 'al', + entli: 'ent', + eli: 'e', + ousli: 'ous', + ization: 'ize', + ation: 'ate', + ator: 'ate', + alism: 'al', + iveness: 'ive', + fulness: 'ful', + ousness: 'ous', + aliti: 'al', + iviti: 'ive', + biliti: 'ble', + logi: 'log' + }; + + var step3list = { + icate: 'ic', + ative: '', + alize: 'al', + iciti: 'ic', + ical: 'ic', + ful: '', + ness: '' + }; + + var c = "[^aeiou]"; // consonant + var v = "[aeiouy]"; // vowel + var C = c + "[^aeiouy]*"; // consonant sequence + var V = v + "[aeiou]*"; // vowel sequence + + var mgr0 = "^(" + C + ")?" + V + C; // [C]VC... is m>0 + var meq1 = "^(" + C + ")?" + V + C + "(" + V + ")?$"; // [C]VC[V] is m=1 + var mgr1 = "^(" + C + ")?" + V + C + V + C; // [C]VCVC... is m>1 + var s_v = "^(" + C + ")?" + v; // vowel in stem + + this.stemWord = function (w) { + var stem; + var suffix; + var firstch; + var origword = w; + + if (w.length < 3) + return w; + + var re; + var re2; + var re3; + var re4; + + firstch = w.substr(0,1); + if (firstch == "y") + w = firstch.toUpperCase() + w.substr(1); + + // Step 1a + re = /^(.+?)(ss|i)es$/; + re2 = /^(.+?)([^s])s$/; + + if (re.test(w)) + w = w.replace(re,"$1$2"); + else if (re2.test(w)) + w = w.replace(re2,"$1$2"); + + // Step 1b + re = /^(.+?)eed$/; + re2 = /^(.+?)(ed|ing)$/; + if (re.test(w)) { + var fp = re.exec(w); + re = new RegExp(mgr0); + if (re.test(fp[1])) { + re = /.$/; + w = w.replace(re,""); + } + } + else if (re2.test(w)) { + var fp = re2.exec(w); + stem = fp[1]; + re2 = new RegExp(s_v); + if (re2.test(stem)) { + w = stem; + re2 = /(at|bl|iz)$/; + re3 = new RegExp("([^aeiouylsz])\\1$"); + re4 = new RegExp("^" + C + v + "[^aeiouwxy]$"); + if (re2.test(w)) + w = w + "e"; + else if (re3.test(w)) { + re = /.$/; + w = w.replace(re,""); + } + else if (re4.test(w)) + w = w + "e"; + } + } + + // Step 1c + re = /^(.+?)y$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(s_v); + if (re.test(stem)) + w = stem + "i"; + } + + // Step 2 + re = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + suffix = fp[2]; + re = new RegExp(mgr0); + if (re.test(stem)) + w = stem + step2list[suffix]; + } + + // Step 3 + re = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + suffix = fp[2]; + re = new RegExp(mgr0); + if (re.test(stem)) + w = stem + step3list[suffix]; + } + + // Step 4 + re = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|iti|ous|ive|ize)$/; + re2 = /^(.+?)(s|t)(ion)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(mgr1); + if (re.test(stem)) + w = stem; + } + else if (re2.test(w)) { + var fp = re2.exec(w); + stem = fp[1] + fp[2]; + re2 = new RegExp(mgr1); + if (re2.test(stem)) + w = stem; + } + + // Step 5 + re = /^(.+?)e$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(mgr1); + re2 = new RegExp(meq1); + re3 = new RegExp("^" + C + v + "[^aeiouwxy]$"); + if (re.test(stem) || (re2.test(stem) && !(re3.test(stem)))) + w = stem; + } + re = /ll$/; + re2 = new RegExp(mgr1); + if (re.test(w) && re2.test(w)) { + re = /.$/; + w = w.replace(re,""); + } + + // and turn initial Y back to y + if (firstch == "y") + w = firstch.toLowerCase() + w.substr(1); + return w; + } +} + diff --git a/develop/_static/minus.png b/develop/_static/minus.png new file mode 100644 index 00000000..d96755fd Binary files /dev/null and b/develop/_static/minus.png differ diff --git a/develop/_static/no_image.png b/develop/_static/no_image.png new file mode 100644 index 00000000..8c2d48d5 Binary files /dev/null and b/develop/_static/no_image.png differ diff --git a/develop/_static/plus.png b/develop/_static/plus.png new file mode 100644 index 00000000..7107cec9 Binary files /dev/null and b/develop/_static/plus.png differ diff --git a/develop/_static/pygments.css b/develop/_static/pygments.css new file mode 100644 index 00000000..0d49244e --- /dev/null +++ b/develop/_static/pygments.css @@ -0,0 +1,75 @@ +pre { line-height: 125%; } +td.linenos .normal { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } +span.linenos { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } +td.linenos .special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } +span.linenos.special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } +.highlight .hll { background-color: #ffffcc } +.highlight { background: #eeffcc; } +.highlight .c { color: #408090; font-style: italic } /* Comment */ +.highlight .err { border: 1px solid #FF0000 } /* Error */ +.highlight .k { color: #007020; font-weight: bold } /* Keyword */ +.highlight .o { color: #666666 } /* Operator */ +.highlight .ch { color: #408090; font-style: italic } /* Comment.Hashbang */ +.highlight .cm { color: #408090; font-style: italic } /* Comment.Multiline */ +.highlight .cp { color: #007020 } /* Comment.Preproc */ +.highlight .cpf { color: #408090; font-style: italic } /* Comment.PreprocFile */ +.highlight .c1 { color: #408090; font-style: italic } /* Comment.Single */ +.highlight .cs { color: #408090; background-color: #fff0f0 } /* Comment.Special */ +.highlight .gd { color: #A00000 } /* Generic.Deleted */ +.highlight .ge { font-style: italic } /* Generic.Emph */ +.highlight .ges { font-weight: bold; font-style: italic } /* Generic.EmphStrong */ +.highlight .gr { color: #FF0000 } /* Generic.Error */ +.highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */ +.highlight .gi { color: #00A000 } /* Generic.Inserted */ +.highlight .go { color: #333333 } /* Generic.Output */ +.highlight .gp { color: #c65d09; font-weight: bold } /* Generic.Prompt */ +.highlight .gs { font-weight: bold } /* Generic.Strong */ +.highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */ +.highlight .gt { color: #0044DD } /* Generic.Traceback */ +.highlight .kc { color: #007020; font-weight: bold } /* Keyword.Constant */ +.highlight .kd { color: #007020; font-weight: bold } /* Keyword.Declaration */ +.highlight .kn { color: #007020; font-weight: bold } /* Keyword.Namespace */ +.highlight .kp { color: #007020 } /* Keyword.Pseudo */ +.highlight .kr { color: #007020; font-weight: bold } /* Keyword.Reserved */ +.highlight .kt { color: #902000 } /* Keyword.Type */ +.highlight .m { color: #208050 } /* Literal.Number */ +.highlight .s { color: #4070a0 } /* Literal.String */ +.highlight .na { color: #4070a0 } /* Name.Attribute */ +.highlight .nb { color: #007020 } /* Name.Builtin */ +.highlight .nc { color: #0e84b5; font-weight: bold } /* Name.Class */ +.highlight .no { color: #60add5 } /* Name.Constant */ +.highlight .nd { color: #555555; font-weight: bold } /* Name.Decorator */ +.highlight .ni { color: #d55537; font-weight: bold } /* Name.Entity */ +.highlight .ne { color: #007020 } /* Name.Exception */ +.highlight .nf { color: #06287e } /* Name.Function */ +.highlight .nl { color: #002070; font-weight: bold } /* Name.Label */ +.highlight .nn { color: #0e84b5; font-weight: bold } /* Name.Namespace */ +.highlight .nt { color: #062873; font-weight: bold } /* Name.Tag */ +.highlight .nv { color: #bb60d5 } /* Name.Variable */ +.highlight .ow { color: #007020; font-weight: bold } /* Operator.Word */ +.highlight .w { color: #bbbbbb } /* Text.Whitespace */ +.highlight .mb { color: #208050 } /* Literal.Number.Bin */ +.highlight .mf { color: #208050 } /* Literal.Number.Float */ +.highlight .mh { color: #208050 } /* Literal.Number.Hex */ +.highlight .mi { color: #208050 } /* Literal.Number.Integer */ +.highlight .mo { color: #208050 } /* Literal.Number.Oct */ +.highlight .sa { color: #4070a0 } /* Literal.String.Affix */ +.highlight .sb { color: #4070a0 } /* Literal.String.Backtick */ +.highlight .sc { color: #4070a0 } /* Literal.String.Char */ +.highlight .dl { color: #4070a0 } /* Literal.String.Delimiter */ +.highlight .sd { color: #4070a0; font-style: italic } /* Literal.String.Doc */ +.highlight .s2 { color: #4070a0 } /* Literal.String.Double */ +.highlight .se { color: #4070a0; font-weight: bold } /* Literal.String.Escape */ +.highlight .sh { color: #4070a0 } /* Literal.String.Heredoc */ +.highlight .si { color: #70a0d0; font-style: italic } /* Literal.String.Interpol */ +.highlight .sx { color: #c65d09 } /* Literal.String.Other */ +.highlight .sr { color: #235388 } /* Literal.String.Regex */ +.highlight .s1 { color: #4070a0 } /* Literal.String.Single */ +.highlight .ss { color: #517918 } /* Literal.String.Symbol */ +.highlight .bp { color: #007020 } /* Name.Builtin.Pseudo */ +.highlight .fm { color: #06287e } /* Name.Function.Magic */ +.highlight .vc { color: #bb60d5 } /* Name.Variable.Class */ +.highlight .vg { color: #bb60d5 } /* Name.Variable.Global */ +.highlight .vi { color: #bb60d5 } /* Name.Variable.Instance */ +.highlight .vm { color: #bb60d5 } /* Name.Variable.Magic */ +.highlight .il { color: #208050 } /* Literal.Number.Integer.Long */ \ No newline at end of file diff --git a/develop/_static/searchtools.js b/develop/_static/searchtools.js new file mode 100644 index 00000000..97d56a74 --- /dev/null +++ b/develop/_static/searchtools.js @@ -0,0 +1,566 @@ +/* + * searchtools.js + * ~~~~~~~~~~~~~~~~ + * + * Sphinx JavaScript utilities for the full-text search. + * + * :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ +"use strict"; + +/** + * Simple result scoring code. + */ +if (typeof Scorer === "undefined") { + var Scorer = { + // Implement the following function to further tweak the score for each result + // The function takes a result array [docname, title, anchor, descr, score, filename] + // and returns the new score. + /* + score: result => { + const [docname, title, anchor, descr, score, filename] = result + return score + }, + */ + + // query matches the full name of an object + objNameMatch: 11, + // or matches in the last dotted part of the object name + objPartialMatch: 6, + // Additive scores depending on the priority of the object + objPrio: { + 0: 15, // used to be importantResults + 1: 5, // used to be objectResults + 2: -5, // used to be unimportantResults + }, + // Used when the priority is not in the mapping. + objPrioDefault: 0, + + // query found in title + title: 15, + partialTitle: 7, + // query found in terms + term: 5, + partialTerm: 2, + }; +} + +const _removeChildren = (element) => { + while (element && element.lastChild) element.removeChild(element.lastChild); +}; + +/** + * See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions#escaping + */ +const _escapeRegExp = (string) => + string.replace(/[.*+\-?^${}()|[\]\\]/g, "\\$&"); // $& means the whole matched string + +const _displayItem = (item, searchTerms) => { + const docBuilder = DOCUMENTATION_OPTIONS.BUILDER; + const docUrlRoot = DOCUMENTATION_OPTIONS.URL_ROOT; + const docFileSuffix = DOCUMENTATION_OPTIONS.FILE_SUFFIX; + const docLinkSuffix = DOCUMENTATION_OPTIONS.LINK_SUFFIX; + const showSearchSummary = DOCUMENTATION_OPTIONS.SHOW_SEARCH_SUMMARY; + + const [docName, title, anchor, descr, score, _filename] = item; + + let listItem = document.createElement("li"); + let requestUrl; + let linkUrl; + if (docBuilder === "dirhtml") { + // dirhtml builder + let dirname = docName + "/"; + if (dirname.match(/\/index\/$/)) + dirname = dirname.substring(0, dirname.length - 6); + else if (dirname === "index/") dirname = ""; + requestUrl = docUrlRoot + dirname; + linkUrl = requestUrl; + } else { + // normal html builders + requestUrl = docUrlRoot + docName + docFileSuffix; + linkUrl = docName + docLinkSuffix; + } + let linkEl = listItem.appendChild(document.createElement("a")); + linkEl.href = linkUrl + anchor; + linkEl.dataset.score = score; + linkEl.innerHTML = title; + if (descr) + listItem.appendChild(document.createElement("span")).innerHTML = + " (" + descr + ")"; + else if (showSearchSummary) + fetch(requestUrl) + .then((responseData) => responseData.text()) + .then((data) => { + if (data) + listItem.appendChild( + Search.makeSearchSummary(data, searchTerms) + ); + }); + Search.output.appendChild(listItem); +}; +const _finishSearch = (resultCount) => { + Search.stopPulse(); + Search.title.innerText = _("Search Results"); + if (!resultCount) + Search.status.innerText = Documentation.gettext( + "Your search did not match any documents. Please make sure that all words are spelled correctly and that you've selected enough categories." + ); + else + Search.status.innerText = _( + `Search finished, found ${resultCount} page(s) matching the search query.` + ); +}; +const _displayNextItem = ( + results, + resultCount, + searchTerms +) => { + // results left, load the summary and display it + // this is intended to be dynamic (don't sub resultsCount) + if (results.length) { + _displayItem(results.pop(), searchTerms); + setTimeout( + () => _displayNextItem(results, resultCount, searchTerms), + 5 + ); + } + // search finished, update title and status message + else _finishSearch(resultCount); +}; + +/** + * Default splitQuery function. Can be overridden in ``sphinx.search`` with a + * custom function per language. + * + * The regular expression works by splitting the string on consecutive characters + * that are not Unicode letters, numbers, underscores, or emoji characters. + * This is the same as ``\W+`` in Python, preserving the surrogate pair area. + */ +if (typeof splitQuery === "undefined") { + var splitQuery = (query) => query + .split(/[^\p{Letter}\p{Number}_\p{Emoji_Presentation}]+/gu) + .filter(term => term) // remove remaining empty strings +} + +/** + * Search Module + */ +const Search = { + _index: null, + _queued_query: null, + _pulse_status: -1, + + htmlToText: (htmlString) => { + const htmlElement = new DOMParser().parseFromString(htmlString, 'text/html'); + htmlElement.querySelectorAll(".headerlink").forEach((el) => { el.remove() }); + const docContent = htmlElement.querySelector('[role="main"]'); + if (docContent !== undefined) return docContent.textContent; + console.warn( + "Content block not found. Sphinx search tries to obtain it via '[role=main]'. Could you check your theme or template." + ); + return ""; + }, + + init: () => { + const query = new URLSearchParams(window.location.search).get("q"); + document + .querySelectorAll('input[name="q"]') + .forEach((el) => (el.value = query)); + if (query) Search.performSearch(query); + }, + + loadIndex: (url) => + (document.body.appendChild(document.createElement("script")).src = url), + + setIndex: (index) => { + Search._index = index; + if (Search._queued_query !== null) { + const query = Search._queued_query; + Search._queued_query = null; + Search.query(query); + } + }, + + hasIndex: () => Search._index !== null, + + deferQuery: (query) => (Search._queued_query = query), + + stopPulse: () => (Search._pulse_status = -1), + + startPulse: () => { + if (Search._pulse_status >= 0) return; + + const pulse = () => { + Search._pulse_status = (Search._pulse_status + 1) % 4; + Search.dots.innerText = ".".repeat(Search._pulse_status); + if (Search._pulse_status >= 0) window.setTimeout(pulse, 500); + }; + pulse(); + }, + + /** + * perform a search for something (or wait until index is loaded) + */ + performSearch: (query) => { + // create the required interface elements + const searchText = document.createElement("h2"); + searchText.textContent = _("Searching"); + const searchSummary = document.createElement("p"); + searchSummary.classList.add("search-summary"); + searchSummary.innerText = ""; + const searchList = document.createElement("ul"); + searchList.classList.add("search"); + + const out = document.getElementById("search-results"); + Search.title = out.appendChild(searchText); + Search.dots = Search.title.appendChild(document.createElement("span")); + Search.status = out.appendChild(searchSummary); + Search.output = out.appendChild(searchList); + + const searchProgress = document.getElementById("search-progress"); + // Some themes don't use the search progress node + if (searchProgress) { + searchProgress.innerText = _("Preparing search..."); + } + Search.startPulse(); + + // index already loaded, the browser was quick! + if (Search.hasIndex()) Search.query(query); + else Search.deferQuery(query); + }, + + /** + * execute search (requires search index to be loaded) + */ + query: (query) => { + const filenames = Search._index.filenames; + const docNames = Search._index.docnames; + const titles = Search._index.titles; + const allTitles = Search._index.alltitles; + const indexEntries = Search._index.indexentries; + + // stem the search terms and add them to the correct list + const stemmer = new Stemmer(); + const searchTerms = new Set(); + const excludedTerms = new Set(); + const highlightTerms = new Set(); + const objectTerms = new Set(splitQuery(query.toLowerCase().trim())); + splitQuery(query.trim()).forEach((queryTerm) => { + const queryTermLower = queryTerm.toLowerCase(); + + // maybe skip this "word" + // stopwords array is from language_data.js + if ( + stopwords.indexOf(queryTermLower) !== -1 || + queryTerm.match(/^\d+$/) + ) + return; + + // stem the word + let word = stemmer.stemWord(queryTermLower); + // select the correct list + if (word[0] === "-") excludedTerms.add(word.substr(1)); + else { + searchTerms.add(word); + highlightTerms.add(queryTermLower); + } + }); + + if (SPHINX_HIGHLIGHT_ENABLED) { // set in sphinx_highlight.js + localStorage.setItem("sphinx_highlight_terms", [...highlightTerms].join(" ")) + } + + // console.debug("SEARCH: searching for:"); + // console.info("required: ", [...searchTerms]); + // console.info("excluded: ", [...excludedTerms]); + + // array of [docname, title, anchor, descr, score, filename] + let results = []; + _removeChildren(document.getElementById("search-progress")); + + const queryLower = query.toLowerCase(); + for (const [title, foundTitles] of Object.entries(allTitles)) { + if (title.toLowerCase().includes(queryLower) && (queryLower.length >= title.length/2)) { + for (const [file, id] of foundTitles) { + let score = Math.round(100 * queryLower.length / title.length) + results.push([ + docNames[file], + titles[file] !== title ? `${titles[file]} > ${title}` : title, + id !== null ? "#" + id : "", + null, + score, + filenames[file], + ]); + } + } + } + + // search for explicit entries in index directives + for (const [entry, foundEntries] of Object.entries(indexEntries)) { + if (entry.includes(queryLower) && (queryLower.length >= entry.length/2)) { + for (const [file, id] of foundEntries) { + let score = Math.round(100 * queryLower.length / entry.length) + results.push([ + docNames[file], + titles[file], + id ? "#" + id : "", + null, + score, + filenames[file], + ]); + } + } + } + + // lookup as object + objectTerms.forEach((term) => + results.push(...Search.performObjectSearch(term, objectTerms)) + ); + + // lookup as search terms in fulltext + results.push(...Search.performTermsSearch(searchTerms, excludedTerms)); + + // let the scorer override scores with a custom scoring function + if (Scorer.score) results.forEach((item) => (item[4] = Scorer.score(item))); + + // now sort the results by score (in opposite order of appearance, since the + // display function below uses pop() to retrieve items) and then + // alphabetically + results.sort((a, b) => { + const leftScore = a[4]; + const rightScore = b[4]; + if (leftScore === rightScore) { + // same score: sort alphabetically + const leftTitle = a[1].toLowerCase(); + const rightTitle = b[1].toLowerCase(); + if (leftTitle === rightTitle) return 0; + return leftTitle > rightTitle ? -1 : 1; // inverted is intentional + } + return leftScore > rightScore ? 1 : -1; + }); + + // remove duplicate search results + // note the reversing of results, so that in the case of duplicates, the highest-scoring entry is kept + let seen = new Set(); + results = results.reverse().reduce((acc, result) => { + let resultStr = result.slice(0, 4).concat([result[5]]).map(v => String(v)).join(','); + if (!seen.has(resultStr)) { + acc.push(result); + seen.add(resultStr); + } + return acc; + }, []); + + results = results.reverse(); + + // for debugging + //Search.lastresults = results.slice(); // a copy + // console.info("search results:", Search.lastresults); + + // print the results + _displayNextItem(results, results.length, searchTerms); + }, + + /** + * search for object names + */ + performObjectSearch: (object, objectTerms) => { + const filenames = Search._index.filenames; + const docNames = Search._index.docnames; + const objects = Search._index.objects; + const objNames = Search._index.objnames; + const titles = Search._index.titles; + + const results = []; + + const objectSearchCallback = (prefix, match) => { + const name = match[4] + const fullname = (prefix ? prefix + "." : "") + name; + const fullnameLower = fullname.toLowerCase(); + if (fullnameLower.indexOf(object) < 0) return; + + let score = 0; + const parts = fullnameLower.split("."); + + // check for different match types: exact matches of full name or + // "last name" (i.e. last dotted part) + if (fullnameLower === object || parts.slice(-1)[0] === object) + score += Scorer.objNameMatch; + else if (parts.slice(-1)[0].indexOf(object) > -1) + score += Scorer.objPartialMatch; // matches in last name + + const objName = objNames[match[1]][2]; + const title = titles[match[0]]; + + // If more than one term searched for, we require other words to be + // found in the name/title/description + const otherTerms = new Set(objectTerms); + otherTerms.delete(object); + if (otherTerms.size > 0) { + const haystack = `${prefix} ${name} ${objName} ${title}`.toLowerCase(); + if ( + [...otherTerms].some((otherTerm) => haystack.indexOf(otherTerm) < 0) + ) + return; + } + + let anchor = match[3]; + if (anchor === "") anchor = fullname; + else if (anchor === "-") anchor = objNames[match[1]][1] + "-" + fullname; + + const descr = objName + _(", in ") + title; + + // add custom score for some objects according to scorer + if (Scorer.objPrio.hasOwnProperty(match[2])) + score += Scorer.objPrio[match[2]]; + else score += Scorer.objPrioDefault; + + results.push([ + docNames[match[0]], + fullname, + "#" + anchor, + descr, + score, + filenames[match[0]], + ]); + }; + Object.keys(objects).forEach((prefix) => + objects[prefix].forEach((array) => + objectSearchCallback(prefix, array) + ) + ); + return results; + }, + + /** + * search for full-text terms in the index + */ + performTermsSearch: (searchTerms, excludedTerms) => { + // prepare search + const terms = Search._index.terms; + const titleTerms = Search._index.titleterms; + const filenames = Search._index.filenames; + const docNames = Search._index.docnames; + const titles = Search._index.titles; + + const scoreMap = new Map(); + const fileMap = new Map(); + + // perform the search on the required terms + searchTerms.forEach((word) => { + const files = []; + const arr = [ + { files: terms[word], score: Scorer.term }, + { files: titleTerms[word], score: Scorer.title }, + ]; + // add support for partial matches + if (word.length > 2) { + const escapedWord = _escapeRegExp(word); + Object.keys(terms).forEach((term) => { + if (term.match(escapedWord) && !terms[word]) + arr.push({ files: terms[term], score: Scorer.partialTerm }); + }); + Object.keys(titleTerms).forEach((term) => { + if (term.match(escapedWord) && !titleTerms[word]) + arr.push({ files: titleTerms[word], score: Scorer.partialTitle }); + }); + } + + // no match but word was a required one + if (arr.every((record) => record.files === undefined)) return; + + // found search word in contents + arr.forEach((record) => { + if (record.files === undefined) return; + + let recordFiles = record.files; + if (recordFiles.length === undefined) recordFiles = [recordFiles]; + files.push(...recordFiles); + + // set score for the word in each file + recordFiles.forEach((file) => { + if (!scoreMap.has(file)) scoreMap.set(file, {}); + scoreMap.get(file)[word] = record.score; + }); + }); + + // create the mapping + files.forEach((file) => { + if (fileMap.has(file) && fileMap.get(file).indexOf(word) === -1) + fileMap.get(file).push(word); + else fileMap.set(file, [word]); + }); + }); + + // now check if the files don't contain excluded terms + const results = []; + for (const [file, wordList] of fileMap) { + // check if all requirements are matched + + // as search terms with length < 3 are discarded + const filteredTermCount = [...searchTerms].filter( + (term) => term.length > 2 + ).length; + if ( + wordList.length !== searchTerms.size && + wordList.length !== filteredTermCount + ) + continue; + + // ensure that none of the excluded terms is in the search result + if ( + [...excludedTerms].some( + (term) => + terms[term] === file || + titleTerms[term] === file || + (terms[term] || []).includes(file) || + (titleTerms[term] || []).includes(file) + ) + ) + break; + + // select one (max) score for the file. + const score = Math.max(...wordList.map((w) => scoreMap.get(file)[w])); + // add result to the result list + results.push([ + docNames[file], + titles[file], + "", + null, + score, + filenames[file], + ]); + } + return results; + }, + + /** + * helper function to return a node containing the + * search summary for a given text. keywords is a list + * of stemmed words. + */ + makeSearchSummary: (htmlText, keywords) => { + const text = Search.htmlToText(htmlText); + if (text === "") return null; + + const textLower = text.toLowerCase(); + const actualStartPosition = [...keywords] + .map((k) => textLower.indexOf(k.toLowerCase())) + .filter((i) => i > -1) + .slice(-1)[0]; + const startWithContext = Math.max(actualStartPosition - 120, 0); + + const top = startWithContext === 0 ? "" : "..."; + const tail = startWithContext + 240 < text.length ? "..." : ""; + + let summary = document.createElement("p"); + summary.classList.add("context"); + summary.textContent = top + text.substr(startWithContext, 240).trim() + tail; + + return summary; + }, +}; + +_ready(Search.init); diff --git a/develop/_static/sg_gallery-binder.css b/develop/_static/sg_gallery-binder.css new file mode 100644 index 00000000..420005d2 --- /dev/null +++ b/develop/_static/sg_gallery-binder.css @@ -0,0 +1,11 @@ +/* CSS for binder integration */ + +div.binder-badge { + margin: 1em auto; + vertical-align: middle; +} + +div.lite-badge { + margin: 1em auto; + vertical-align: middle; +} diff --git a/develop/_static/sg_gallery-dataframe.css b/develop/_static/sg_gallery-dataframe.css new file mode 100644 index 00000000..fac74c43 --- /dev/null +++ b/develop/_static/sg_gallery-dataframe.css @@ -0,0 +1,47 @@ +/* Pandas dataframe css */ +/* Taken from: https://github.com/spatialaudio/nbsphinx/blob/fb3ba670fc1ba5f54d4c487573dbc1b4ecf7e9ff/src/nbsphinx.py#L587-L619 */ +html[data-theme="light"] { + --sg-text-color: #000; + --sg-tr-odd-color: #f5f5f5; + --sg-tr-hover-color: rgba(66, 165, 245, 0.2); +} +html[data-theme="dark"] { + --sg-text-color: #fff; + --sg-tr-odd-color: #373737; + --sg-tr-hover-color: rgba(30, 81, 122, 0.2); +} + +table.dataframe { + border: none !important; + border-collapse: collapse; + border-spacing: 0; + border-color: transparent; + color: var(--sg-text-color); + font-size: 12px; + table-layout: fixed; + width: auto; +} +table.dataframe thead { + border-bottom: 1px solid var(--sg-text-color); + vertical-align: bottom; +} +table.dataframe tr, +table.dataframe th, +table.dataframe td { + text-align: right; + vertical-align: middle; + padding: 0.5em 0.5em; + line-height: normal; + white-space: normal; + max-width: none; + border: none; +} +table.dataframe th { + font-weight: bold; +} +table.dataframe tbody tr:nth-child(odd) { + background: var(--sg-tr-odd-color); +} +table.dataframe tbody tr:hover { + background: var(--sg-tr-hover-color); +} diff --git a/develop/_static/sg_gallery-rendered-html.css b/develop/_static/sg_gallery-rendered-html.css new file mode 100644 index 00000000..93dc2ffb --- /dev/null +++ b/develop/_static/sg_gallery-rendered-html.css @@ -0,0 +1,224 @@ +/* Adapted from notebook/static/style/style.min.css */ +html[data-theme="light"] { + --sg-text-color: #000; + --sg-background-color: #ffffff; + --sg-code-background-color: #eff0f1; + --sg-tr-hover-color: rgba(66, 165, 245, 0.2); + --sg-tr-odd-color: #f5f5f5; +} +html[data-theme="dark"] { + --sg-text-color: #fff; + --sg-background-color: #121212; + --sg-code-background-color: #2f2f30; + --sg-tr-hover-color: rgba(66, 165, 245, 0.2); + --sg-tr-odd-color: #1f1f1f; +} + +.rendered_html { + color: var(--sg-text-color); + /* any extras will just be numbers: */ +} +.rendered_html em { + font-style: italic; +} +.rendered_html strong { + font-weight: bold; +} +.rendered_html u { + text-decoration: underline; +} +.rendered_html :link { + text-decoration: underline; +} +.rendered_html :visited { + text-decoration: underline; +} +.rendered_html h1 { + font-size: 185.7%; + margin: 1.08em 0 0 0; + font-weight: bold; + line-height: 1.0; +} +.rendered_html h2 { + font-size: 157.1%; + margin: 1.27em 0 0 0; + font-weight: bold; + line-height: 1.0; +} +.rendered_html h3 { + font-size: 128.6%; + margin: 1.55em 0 0 0; + font-weight: bold; + line-height: 1.0; +} +.rendered_html h4 { + font-size: 100%; + margin: 2em 0 0 0; + font-weight: bold; + line-height: 1.0; +} +.rendered_html h5 { + font-size: 100%; + margin: 2em 0 0 0; + font-weight: bold; + line-height: 1.0; + font-style: italic; +} +.rendered_html h6 { + font-size: 100%; + margin: 2em 0 0 0; + font-weight: bold; + line-height: 1.0; + font-style: italic; +} +.rendered_html h1:first-child { + margin-top: 0.538em; +} +.rendered_html h2:first-child { + margin-top: 0.636em; +} +.rendered_html h3:first-child { + margin-top: 0.777em; +} +.rendered_html h4:first-child { + margin-top: 1em; +} +.rendered_html h5:first-child { + margin-top: 1em; +} +.rendered_html h6:first-child { + margin-top: 1em; +} +.rendered_html ul:not(.list-inline), +.rendered_html ol:not(.list-inline) { + padding-left: 2em; +} +.rendered_html ul { + list-style: disc; +} +.rendered_html ul ul { + list-style: square; + margin-top: 0; +} +.rendered_html ul ul ul { + list-style: circle; +} +.rendered_html ol { + list-style: decimal; +} +.rendered_html ol ol { + list-style: upper-alpha; + margin-top: 0; +} +.rendered_html ol ol ol { + list-style: lower-alpha; +} +.rendered_html ol ol ol ol { + list-style: lower-roman; +} +.rendered_html ol ol ol ol ol { + list-style: decimal; +} +.rendered_html * + ul { + margin-top: 1em; +} +.rendered_html * + ol { + margin-top: 1em; +} +.rendered_html hr { + color: var(--sg-text-color); + background-color: var(--sg-text-color); +} +.rendered_html pre { + margin: 1em 2em; + padding: 0px; + background-color: var(--sg-background-color); +} +.rendered_html code { + background-color: var(--sg-code-background-color); +} +.rendered_html p code { + padding: 1px 5px; +} +.rendered_html pre code { + background-color: var(--sg-background-color); +} +.rendered_html pre, +.rendered_html code { + border: 0; + color: var(--sg-text-color); + font-size: 100%; +} +.rendered_html blockquote { + margin: 1em 2em; +} +.rendered_html table { + margin-left: auto; + margin-right: auto; + border: none; + border-collapse: collapse; + border-spacing: 0; + color: var(--sg-text-color); + font-size: 12px; + table-layout: fixed; +} +.rendered_html thead { + border-bottom: 1px solid var(--sg-text-color); + vertical-align: bottom; +} +.rendered_html tr, +.rendered_html th, +.rendered_html td { + text-align: right; + vertical-align: middle; + padding: 0.5em 0.5em; + line-height: normal; + white-space: normal; + max-width: none; + border: none; +} +.rendered_html th { + font-weight: bold; +} +.rendered_html tbody tr:nth-child(odd) { + background: var(--sg-tr-odd-color); +} +.rendered_html tbody tr:hover { + color: var(--sg-text-color); + background: var(--sg-tr-hover-color); +} +.rendered_html * + table { + margin-top: 1em; +} +.rendered_html p { + text-align: left; +} +.rendered_html * + p { + margin-top: 1em; +} +.rendered_html img { + display: block; + margin-left: auto; + margin-right: auto; +} +.rendered_html * + img { + margin-top: 1em; +} +.rendered_html img, +.rendered_html svg { + max-width: 100%; + height: auto; +} +.rendered_html img.unconfined, +.rendered_html svg.unconfined { + max-width: none; +} +.rendered_html .alert { + margin-bottom: initial; +} +.rendered_html * + .alert { + margin-top: 1em; +} +[dir="rtl"] .rendered_html p { + text-align: right; +} diff --git a/develop/_static/sg_gallery.css b/develop/_static/sg_gallery.css new file mode 100644 index 00000000..72227837 --- /dev/null +++ b/develop/_static/sg_gallery.css @@ -0,0 +1,342 @@ +/* +Sphinx-Gallery has compatible CSS to fix default sphinx themes +Tested for Sphinx 1.3.1 for all themes: default, alabaster, sphinxdoc, +scrolls, agogo, traditional, nature, haiku, pyramid +Tested for Read the Docs theme 0.1.7 */ + +/* Define light colors */ +:root, html[data-theme="light"], body[data-theme="light"]{ + --sg-tooltip-foreground: black; + --sg-tooltip-background: rgba(250, 250, 250, 0.9); + --sg-tooltip-border: #ccc transparent; + --sg-thumb-box-shadow-color: #6c757d40; + --sg-thumb-hover-border: #0069d9; + --sg-script-out: #888; + --sg-script-pre: #fafae2; + --sg-pytb-foreground: #000; + --sg-pytb-background: #ffe4e4; + --sg-pytb-border-color: #f66; + --sg-download-a-background-color: #ffc; + --sg-download-a-background-image: linear-gradient(to bottom, #ffc, #d5d57e); + --sg-download-a-border-color: 1px solid #c2c22d; + --sg-download-a-color: #000; + --sg-download-a-hover-background-color: #d5d57e; + --sg-download-a-hover-box-shadow-1: rgba(255, 255, 255, 0.1); + --sg-download-a-hover-box-shadow-2: rgba(0, 0, 0, 0.25); +} +@media(prefers-color-scheme: light) { + :root[data-theme="auto"], html[data-theme="auto"], body[data-theme="auto"] { + --sg-tooltip-foreground: black; + --sg-tooltip-background: rgba(250, 250, 250, 0.9); + --sg-tooltip-border: #ccc transparent; + --sg-thumb-box-shadow-color: #6c757d40; + --sg-thumb-hover-border: #0069d9; + --sg-script-out: #888; + --sg-script-pre: #fafae2; + --sg-pytb-foreground: #000; + --sg-pytb-background: #ffe4e4; + --sg-pytb-border-color: #f66; + --sg-download-a-background-color: #ffc; + --sg-download-a-background-image: linear-gradient(to bottom, #ffc, #d5d57e); + --sg-download-a-border-color: 1px solid #c2c22d; + --sg-download-a-color: #000; + --sg-download-a-hover-background-color: #d5d57e; + --sg-download-a-hover-box-shadow-1: rgba(255, 255, 255, 0.1); + --sg-download-a-hover-box-shadow-2: rgba(0, 0, 0, 0.25); + } +} + +html[data-theme="dark"], body[data-theme="dark"] { + --sg-tooltip-foreground: white; + --sg-tooltip-background: rgba(10, 10, 10, 0.9); + --sg-tooltip-border: #333 transparent; + --sg-thumb-box-shadow-color: #79848d40; + --sg-thumb-hover-border: #003975; + --sg-script-out: rgb(179, 179, 179); + --sg-script-pre: #2e2e22; + --sg-pytb-foreground: #fff; + --sg-pytb-background: #1b1717; + --sg-pytb-border-color: #622; + --sg-download-a-background-color: #443; + --sg-download-a-background-image: linear-gradient(to bottom, #443, #221); + --sg-download-a-border-color: 1px solid #3a3a0d; + --sg-download-a-color: #fff; + --sg-download-a-hover-background-color: #616135; + --sg-download-a-hover-box-shadow-1: rgba(0, 0, 0, 0.1); + --sg-download-a-hover-box-shadow-2: rgba(255, 255, 255, 0.25); +} +@media(prefers-color-scheme: dark){ + html[data-theme="auto"], body[data-theme="auto"] { + --sg-tooltip-foreground: white; + --sg-tooltip-background: rgba(10, 10, 10, 0.9); + --sg-tooltip-border: #333 transparent; + --sg-thumb-box-shadow-color: #79848d40; + --sg-thumb-hover-border: #003975; + --sg-script-out: rgb(179, 179, 179); + --sg-script-pre: #2e2e22; + --sg-pytb-foreground: #fff; + --sg-pytb-background: #1b1717; + --sg-pytb-border-color: #622; + --sg-download-a-background-color: #443; + --sg-download-a-background-image: linear-gradient(to bottom, #443, #221); + --sg-download-a-border-color: 1px solid #3a3a0d; + --sg-download-a-color: #fff; + --sg-download-a-hover-background-color: #616135; + --sg-download-a-hover-box-shadow-1: rgba(0, 0, 0, 0.1); + --sg-download-a-hover-box-shadow-2: rgba(255, 255, 255, 0.25); + } +} + +.sphx-glr-thumbnails { + width: 100%; + margin: 0px 0px 20px 0px; + + /* align thumbnails on a grid */ + justify-content: space-between; + display: grid; + /* each grid column should be at least 160px (this will determine + the actual number of columns) and then take as much of the + remaining width as possible */ + grid-template-columns: repeat(auto-fill, minmax(160px, 1fr)); + gap: 15px; +} +.sphx-glr-thumbnails .toctree-wrapper { + /* hide empty toctree divs added to the DOM + by sphinx even though the toctree is hidden + (they would fill grid places with empty divs) */ + display: none; +} +.sphx-glr-thumbcontainer { + background: transparent; + -moz-border-radius: 5px; + -webkit-border-radius: 5px; + border-radius: 5px; + box-shadow: 0 0 10px var(--sg-thumb-box-shadow-color); + + /* useful to absolutely position link in div */ + position: relative; + + /* thumbnail width should include padding and borders + and take all available space */ + box-sizing: border-box; + width: 100%; + padding: 10px; + border: 1px solid transparent; + + /* align content in thumbnail */ + display: flex; + flex-direction: column; + align-items: center; + gap: 7px; +} +.sphx-glr-thumbcontainer p { + position: absolute; + top: 0; + left: 0; +} +.sphx-glr-thumbcontainer p, +.sphx-glr-thumbcontainer p a { + /* link should cover the whole thumbnail div */ + width: 100%; + height: 100%; +} +.sphx-glr-thumbcontainer p a span { + /* text within link should be masked + (we are just interested in the href) */ + display: none; +} +.sphx-glr-thumbcontainer:hover { + border: 1px solid; + border-color: var(--sg-thumb-hover-border); + cursor: pointer; +} +.sphx-glr-thumbcontainer a.internal { + bottom: 0; + display: block; + left: 0; + box-sizing: border-box; + padding: 150px 10px 0; + position: absolute; + right: 0; + top: 0; +} +/* Next one is to avoid Sphinx traditional theme to cover all the +thumbnail with its default link Background color */ +.sphx-glr-thumbcontainer a.internal:hover { + background-color: transparent; +} + +.sphx-glr-thumbcontainer p { + margin: 0 0 0.1em 0; +} +.sphx-glr-thumbcontainer .figure { + margin: 10px; + width: 160px; +} +.sphx-glr-thumbcontainer img { + display: inline; + max-height: 112px; + max-width: 160px; +} +.sphx-glr-thumbcontainer[tooltip]:hover:after { + background: var(--sg-tooltip-background); + -webkit-border-radius: 4px; + -moz-border-radius: 4px; + border-radius: 4px; + color: var(--sg-tooltip-foreground); + content: attr(tooltip); + padding: 10px; + z-index: 98; + width: 100%; + height: 100%; + position: absolute; + pointer-events: none; + top: 0; + box-sizing: border-box; + overflow: hidden; + backdrop-filter: blur(3px); +} + +.sphx-glr-script-out { + color: var(--sg-script-out); + display: flex; + gap: 0.5em; +} +.sphx-glr-script-out::before { + content: "Out:"; + /* These numbers come from the pre style in the pydata sphinx theme. This + * turns out to match perfectly on the rtd theme, but be a bit too low for + * the pydata sphinx theme. As I could not find a dimension to use that was + * scaled the same way, I just picked one option that worked pretty close for + * both. */ + line-height: 1.4; + padding-top: 10px; +} +.sphx-glr-script-out .highlight { + background-color: transparent; + /* These options make the div expand... */ + flex-grow: 1; + /* ... but also keep it from overflowing its flex container. */ + overflow: auto; +} +.sphx-glr-script-out .highlight pre { + background-color: var(--sg-script-pre); + border: 0; + max-height: 30em; + overflow: auto; + padding-left: 1ex; + /* This margin is necessary in the pydata sphinx theme because pre has a box + * shadow which would be clipped by the overflow:auto in the parent div + * above. */ + margin: 2px; + word-break: break-word; +} +.sphx-glr-script-out + p { + margin-top: 1.8em; +} +blockquote.sphx-glr-script-out { + margin-left: 0pt; +} +.sphx-glr-script-out.highlight-pytb .highlight pre { + color: var(--sg-pytb-foreground); + background-color: var(--sg-pytb-background); + border: 1px solid var(--sg-pytb-border-color); + margin-top: 10px; + padding: 7px; +} + +div.sphx-glr-footer { + text-align: center; +} + +div.sphx-glr-download { + margin: 1em auto; + vertical-align: middle; +} + +div.sphx-glr-download a { + background-color: var(--sg-download-a-background-color); + background-image: var(--sg-download-a-background-image); + border-radius: 4px; + border: 1px solid var(--sg-download-a-border-color); + color: var(--sg-download-a-color); + display: inline-block; + font-weight: bold; + padding: 1ex; + text-align: center; +} + +div.sphx-glr-download code.download { + display: inline-block; + white-space: normal; + word-break: normal; + overflow-wrap: break-word; + /* border and background are given by the enclosing 'a' */ + border: none; + background: none; +} + +div.sphx-glr-download a:hover { + box-shadow: inset 0 1px 0 var(--sg-download-a-hover-box-shadow-1), 0 1px 5px var(--sg-download-a-hover-box-shadow-2); + text-decoration: none; + background-image: none; + background-color: var(--sg-download-a-hover-background-color); +} + +.sphx-glr-example-title:target::before { + display: block; + content: ""; + margin-top: -50px; + height: 50px; + visibility: hidden; +} + +ul.sphx-glr-horizontal { + list-style: none; + padding: 0; +} +ul.sphx-glr-horizontal li { + display: inline; +} +ul.sphx-glr-horizontal img { + height: auto !important; +} + +.sphx-glr-single-img { + margin: auto; + display: block; + max-width: 100%; +} + +.sphx-glr-multi-img { + max-width: 42%; + height: auto; +} + +div.sphx-glr-animation { + margin: auto; + display: block; + max-width: 100%; +} +div.sphx-glr-animation .animation { + display: block; +} + +p.sphx-glr-signature a.reference.external { + -moz-border-radius: 5px; + -webkit-border-radius: 5px; + border-radius: 5px; + padding: 3px; + font-size: 75%; + text-align: right; + margin-left: auto; + display: table; +} + +.sphx-glr-clear { + clear: both; +} + +a.sphx-glr-backref-instance { + text-decoration: none; +} diff --git a/develop/_static/sphinx_highlight.js b/develop/_static/sphinx_highlight.js new file mode 100644 index 00000000..aae669d7 --- /dev/null +++ b/develop/_static/sphinx_highlight.js @@ -0,0 +1,144 @@ +/* Highlighting utilities for Sphinx HTML documentation. */ +"use strict"; + +const SPHINX_HIGHLIGHT_ENABLED = true + +/** + * highlight a given string on a node by wrapping it in + * span elements with the given class name. + */ +const _highlight = (node, addItems, text, className) => { + if (node.nodeType === Node.TEXT_NODE) { + const val = node.nodeValue; + const parent = node.parentNode; + const pos = val.toLowerCase().indexOf(text); + if ( + pos >= 0 && + !parent.classList.contains(className) && + !parent.classList.contains("nohighlight") + ) { + let span; + + const closestNode = parent.closest("body, svg, foreignObject"); + const isInSVG = closestNode && closestNode.matches("svg"); + if (isInSVG) { + span = document.createElementNS("http://www.w3.org/2000/svg", "tspan"); + } else { + span = document.createElement("span"); + span.classList.add(className); + } + + span.appendChild(document.createTextNode(val.substr(pos, text.length))); + parent.insertBefore( + span, + parent.insertBefore( + document.createTextNode(val.substr(pos + text.length)), + node.nextSibling + ) + ); + node.nodeValue = val.substr(0, pos); + + if (isInSVG) { + const rect = document.createElementNS( + "http://www.w3.org/2000/svg", + "rect" + ); + const bbox = parent.getBBox(); + rect.x.baseVal.value = bbox.x; + rect.y.baseVal.value = bbox.y; + rect.width.baseVal.value = bbox.width; + rect.height.baseVal.value = bbox.height; + rect.setAttribute("class", className); + addItems.push({ parent: parent, target: rect }); + } + } + } else if (node.matches && !node.matches("button, select, textarea")) { + node.childNodes.forEach((el) => _highlight(el, addItems, text, className)); + } +}; +const _highlightText = (thisNode, text, className) => { + let addItems = []; + _highlight(thisNode, addItems, text, className); + addItems.forEach((obj) => + obj.parent.insertAdjacentElement("beforebegin", obj.target) + ); +}; + +/** + * Small JavaScript module for the documentation. + */ +const SphinxHighlight = { + + /** + * highlight the search words provided in localstorage in the text + */ + highlightSearchWords: () => { + if (!SPHINX_HIGHLIGHT_ENABLED) return; // bail if no highlight + + // get and clear terms from localstorage + const url = new URL(window.location); + const highlight = + localStorage.getItem("sphinx_highlight_terms") + || url.searchParams.get("highlight") + || ""; + localStorage.removeItem("sphinx_highlight_terms") + url.searchParams.delete("highlight"); + window.history.replaceState({}, "", url); + + // get individual terms from highlight string + const terms = highlight.toLowerCase().split(/\s+/).filter(x => x); + if (terms.length === 0) return; // nothing to do + + // There should never be more than one element matching "div.body" + const divBody = document.querySelectorAll("div.body"); + const body = divBody.length ? divBody[0] : document.querySelector("body"); + window.setTimeout(() => { + terms.forEach((term) => _highlightText(body, term, "highlighted")); + }, 10); + + const searchBox = document.getElementById("searchbox"); + if (searchBox === null) return; + searchBox.appendChild( + document + .createRange() + .createContextualFragment( + '" + ) + ); + }, + + /** + * helper function to hide the search marks again + */ + hideSearchWords: () => { + document + .querySelectorAll("#searchbox .highlight-link") + .forEach((el) => el.remove()); + document + .querySelectorAll("span.highlighted") + .forEach((el) => el.classList.remove("highlighted")); + localStorage.removeItem("sphinx_highlight_terms") + }, + + initEscapeListener: () => { + // only install a listener if it is really needed + if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) return; + + document.addEventListener("keydown", (event) => { + // bail for input elements + if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return; + // bail with special keys + if (event.shiftKey || event.altKey || event.ctrlKey || event.metaKey) return; + if (DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS && (event.key === "Escape")) { + SphinxHighlight.hideSearchWords(); + event.preventDefault(); + } + }); + }, +}; + +_ready(SphinxHighlight.highlightSearchWords); +_ready(SphinxHighlight.initEscapeListener); diff --git a/develop/_static/tabs.css b/develop/_static/tabs.css new file mode 100644 index 00000000..957ba60d --- /dev/null +++ b/develop/_static/tabs.css @@ -0,0 +1,89 @@ +.sphinx-tabs { + margin-bottom: 1rem; +} + +[role="tablist"] { + border-bottom: 1px solid #a0b3bf; +} + +.sphinx-tabs-tab { + position: relative; + font-family: Lato,'Helvetica Neue',Arial,Helvetica,sans-serif; + color: #1D5C87; + line-height: 24px; + margin: 0; + font-size: 16px; + font-weight: 400; + background-color: rgba(255, 255, 255, 0); + border-radius: 5px 5px 0 0; + border: 0; + padding: 1rem 1.5rem; + margin-bottom: 0; +} + +.sphinx-tabs-tab[aria-selected="true"] { + font-weight: 700; + border: 1px solid #a0b3bf; + border-bottom: 1px solid white; + margin: -1px; + background-color: white; +} + +.sphinx-tabs-tab:focus { + z-index: 1; + outline-offset: 1px; +} + +.sphinx-tabs-panel { + position: relative; + padding: 1rem; + border: 1px solid #a0b3bf; + margin: 0px -1px -1px -1px; + border-radius: 0 0 5px 5px; + border-top: 0; + background: white; +} + +.sphinx-tabs-panel.code-tab { + padding: 0.4rem; +} + +.sphinx-tab img { + margin-bottom: 24 px; +} + +/* Dark theme preference styling */ + +@media (prefers-color-scheme: dark) { + body[data-theme="auto"] .sphinx-tabs-panel { + color: white; + background-color: rgb(50, 50, 50); + } + + body[data-theme="auto"] .sphinx-tabs-tab { + color: white; + background-color: rgba(255, 255, 255, 0.05); + } + + body[data-theme="auto"] .sphinx-tabs-tab[aria-selected="true"] { + border-bottom: 1px solid rgb(50, 50, 50); + background-color: rgb(50, 50, 50); + } +} + +/* Explicit dark theme styling */ + +body[data-theme="dark"] .sphinx-tabs-panel { + color: white; + background-color: rgb(50, 50, 50); +} + +body[data-theme="dark"] .sphinx-tabs-tab { + color: white; + background-color: rgba(255, 255, 255, 0.05); +} + +body[data-theme="dark"] .sphinx-tabs-tab[aria-selected="true"] { + border-bottom: 2px solid rgb(50, 50, 50); + background-color: rgb(50, 50, 50); +} diff --git a/develop/_static/tabs.js b/develop/_static/tabs.js new file mode 100644 index 00000000..48dc303c --- /dev/null +++ b/develop/_static/tabs.js @@ -0,0 +1,145 @@ +try { + var session = window.sessionStorage || {}; +} catch (e) { + var session = {}; +} + +window.addEventListener("DOMContentLoaded", () => { + const allTabs = document.querySelectorAll('.sphinx-tabs-tab'); + const tabLists = document.querySelectorAll('[role="tablist"]'); + + allTabs.forEach(tab => { + tab.addEventListener("click", changeTabs); + }); + + tabLists.forEach(tabList => { + tabList.addEventListener("keydown", keyTabs); + }); + + // Restore group tab selection from session + const lastSelected = session.getItem('sphinx-tabs-last-selected'); + if (lastSelected != null) selectNamedTabs(lastSelected); +}); + +/** + * Key focus left and right between sibling elements using arrows + * @param {Node} e the element in focus when key was pressed + */ +function keyTabs(e) { + const tab = e.target; + let nextTab = null; + if (e.keyCode === 39 || e.keyCode === 37) { + tab.setAttribute("tabindex", -1); + // Move right + if (e.keyCode === 39) { + nextTab = tab.nextElementSibling; + if (nextTab === null) { + nextTab = tab.parentNode.firstElementChild; + } + // Move left + } else if (e.keyCode === 37) { + nextTab = tab.previousElementSibling; + if (nextTab === null) { + nextTab = tab.parentNode.lastElementChild; + } + } + } + + if (nextTab !== null) { + nextTab.setAttribute("tabindex", 0); + nextTab.focus(); + } +} + +/** + * Select or deselect clicked tab. If a group tab + * is selected, also select tab in other tabLists. + * @param {Node} e the element that was clicked + */ +function changeTabs(e) { + // Use this instead of the element that was clicked, in case it's a child + const notSelected = this.getAttribute("aria-selected") === "false"; + const positionBefore = this.parentNode.getBoundingClientRect().top; + const notClosable = !this.parentNode.classList.contains("closeable"); + + deselectTabList(this); + + if (notSelected || notClosable) { + selectTab(this); + const name = this.getAttribute("name"); + selectNamedTabs(name, this.id); + + if (this.classList.contains("group-tab")) { + // Persist during session + session.setItem('sphinx-tabs-last-selected', name); + } + } + + const positionAfter = this.parentNode.getBoundingClientRect().top; + const positionDelta = positionAfter - positionBefore; + // Scroll to offset content resizing + window.scrollTo(0, window.scrollY + positionDelta); +} + +/** + * Select tab and show associated panel. + * @param {Node} tab tab to select + */ +function selectTab(tab) { + tab.setAttribute("aria-selected", true); + + // Show the associated panel + document + .getElementById(tab.getAttribute("aria-controls")) + .removeAttribute("hidden"); +} + +/** + * Hide the panels associated with all tabs within the + * tablist containing this tab. + * @param {Node} tab a tab within the tablist to deselect + */ +function deselectTabList(tab) { + const parent = tab.parentNode; + const grandparent = parent.parentNode; + + Array.from(parent.children) + .forEach(t => t.setAttribute("aria-selected", false)); + + Array.from(grandparent.children) + .slice(1) // Skip tablist + .forEach(panel => panel.setAttribute("hidden", true)); +} + +/** + * Select grouped tabs with the same name, but no the tab + * with the given id. + * @param {Node} name name of grouped tab to be selected + * @param {Node} clickedId id of clicked tab + */ +function selectNamedTabs(name, clickedId=null) { + const groupedTabs = document.querySelectorAll(`.sphinx-tabs-tab[name="${name}"]`); + const tabLists = Array.from(groupedTabs).map(tab => tab.parentNode); + + tabLists + .forEach(tabList => { + // Don't want to change the tabList containing the clicked tab + const clickedTab = tabList.querySelector(`[id="${clickedId}"]`); + if (clickedTab === null ) { + // Select first tab with matching name + const tab = tabList.querySelector(`.sphinx-tabs-tab[name="${name}"]`); + deselectTabList(tab); + selectTab(tab); + } + }) +} + +if (typeof exports === 'undefined') { + exports = {}; +} + +exports.keyTabs = keyTabs; +exports.changeTabs = changeTabs; +exports.selectTab = selectTab; +exports.deselectTabList = deselectTabList; +exports.selectNamedTabs = selectNamedTabs; diff --git a/develop/_static/theme_override.css b/develop/_static/theme_override.css new file mode 100644 index 00000000..81ff7b38 --- /dev/null +++ b/develop/_static/theme_override.css @@ -0,0 +1,35 @@ +.wy-nav-top { + background-color: #ff8400 !important; +} +.wy-side-nav-search { + background-color: #FF8C38 !important; +} + + +div[class^="highlight"] a { + background-color: #E6E6E6; +} + +div[class^="highlight"] a:hover { + background-color: #ABECFC; +} + +.rst-versions { + position: relative; +} +.rst-versions.shift-up { + overflow-y: visible; +} + +a[class^="sphx-glr-backref-module-"] { + text-decoration: none; + background-color: rgba(0, 0, 0, 0) !important; +} +a.sphx-glr-backref-module-sphinx_gallery { + text-decoration: underline; + background-color: #E6E6E6; +} + +.anim-state label { + display: inline-block; +} diff --git a/develop/auto_examples/index.html b/develop/auto_examples/index.html new file mode 100644 index 00000000..89077dc9 --- /dev/null +++ b/develop/auto_examples/index.html @@ -0,0 +1,544 @@ + + + + + + + All Notebook Examples — EEG Notebooks 0.0.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+ +
+

All Notebook Examples

+

This section has examples of different functionality available in the module.

+

Examples are organized by topic, and can be explored in any order.

+
+

Cueing Single Subject Analysis

+
Cueing Single Subject Analysis
+
+

Cueing Group Analysis

+
Cueing Group Analysis
+
+

Cueing Behavioural Analysis Winter 2019

+
Cueing Behavioural Analysis Winter 2019
+
+

Cueing Group Analysis Winter 2019

+
Cueing Group Analysis Winter 2019
+
+

N170 run experiment

+
N170 run experiment
+
+

N170 Load and Visualize Data

+
N170 Load and Visualize Data
+
+

N170 Decoding

+
N170 Decoding
+
+

P300 run experiment

+
P300 run experiment
+
+

P300 Load and Visualize Data

+
P300 Load and Visualize Data
+
+

P300 Decoding

+
P300 Decoding
+
+

SSVEP run experiment

+
SSVEP run experiment
+
+

SSVEP Visualization

+
SSVEP Visualization
+
+

SSVEP Decoding

+
SSVEP Decoding
+
+
+ +

Gallery generated by Sphinx-Gallery

+
+ + +
+
+
+ +
+ +
+

© Copyright John Griffiths, Jadin Tredup, NeuroTechX, & Contributors.

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + + + + \ No newline at end of file diff --git a/develop/auto_examples/sg_execution_times.html b/develop/auto_examples/sg_execution_times.html new file mode 100644 index 00000000..7b38e01f --- /dev/null +++ b/develop/auto_examples/sg_execution_times.html @@ -0,0 +1,520 @@ + + + + + + + Computation times — EEG Notebooks 0.0.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+ +
+

Computation times

+

00:00.000 total execution time for 0 files from auto_examples:

+
+ + + + + + + + + + + + + + + + + +

Example

Time

Mem (MB)

N/A

N/A

N/A

+
+
+ + +
+
+
+ +
+ +
+

© Copyright John Griffiths, Jadin Tredup, NeuroTechX, & Contributors.

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + + + + \ No newline at end of file diff --git a/develop/auto_examples/visual_cueing/01r__cueing_singlesub_analysis.html b/develop/auto_examples/visual_cueing/01r__cueing_singlesub_analysis.html new file mode 100644 index 00000000..c2f09fe5 --- /dev/null +++ b/develop/auto_examples/visual_cueing/01r__cueing_singlesub_analysis.html @@ -0,0 +1,860 @@ + + + + + + + Cueing Single Subject Analysis — EEG Notebooks 0.0.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ + +
+

Cueing Single Subject Analysis

+
+

Setup

+
# Some standard pythonic imports
+import os,numpy as np#,sys,glob,pandas as pd
+from collections import OrderedDict
+import warnings
+warnings.filterwarnings('ignore')
+from matplotlib import pyplot as plt
+import matplotlib.patches as patches
+
+# MNE functions
+from mne import Epochs,find_events#, concatenate_raws
+from mne.time_frequency import tfr_morlet
+
+# EEG-Notebooks functions
+from eegnb.analysis.analysis_utils import load_data,plot_conditions
+from eegnb.datasets import fetch_dataset
+
+
+
+
+

Load Data

+

We will use the eeg-expy visual cueing example dataset

+
eegnb_data_path = os.path.join(os.path.expanduser('~/'),'.eegnb', 'data')
+cueing_data_path = os.path.join(eegnb_data_path, 'visual-cueing', 'kylemathlab_dev')
+
+# If dataset hasn't been downloaded yet, download it
+if not os.path.isdir(cueing_data_path):
+    fetch_dataset(data_dir=eegnb_data_path, experiment='visual-cueing', site='kylemathlab_dev');
+
+
+sub = 302
+sess = 1
+raw = load_data(sub,1, # subject, session
+                experiment='visual-cueing',site='kylemathlab_dev',device_name='muse2016',
+                data_dir = eegnb_data_path)
+
+raw.append(
+      load_data(sub,2, # subject, session
+                experiment='visual-cueing', site='kylemathlab_dev', device_name='muse2016',
+                data_dir = eegnb_data_path))
+
+
+
Downloading...
+From (original): https://drive.google.com/uc?id=1ABOVJ9S0BeJOsqdGFnexaTFZ-ZcsIXfQ
+From (redirected): https://drive.google.com/uc?id=1ABOVJ9S0BeJOsqdGFnexaTFZ-ZcsIXfQ&confirm=t&uuid=015d1c9b-6ead-4318-b5d9-333a0b7d7b30
+To: /home/runner/.eegnb/data/downloaded_data.zip
+
+  0%|          | 0.00/102M [00:00<?, ?B/s]
+  6%|▌         | 6.29M/102M [00:00<00:01, 62.1MB/s]
+ 12%|█▏        | 12.6M/102M [00:00<00:03, 27.5MB/s]
+ 17%|█▋        | 17.3M/102M [00:00<00:03, 26.1MB/s]
+ 25%|██▌       | 25.7M/102M [00:00<00:02, 31.2MB/s]
+ 34%|███▎      | 34.1M/102M [00:01<00:01, 36.8MB/s]
+ 42%|████▏     | 42.5M/102M [00:01<00:01, 39.8MB/s]
+ 50%|█████     | 50.9M/102M [00:01<00:01, 34.0MB/s]
+ 58%|█████▊    | 59.2M/102M [00:01<00:01, 40.7MB/s]
+ 67%|██████▋   | 67.6M/102M [00:01<00:00, 43.1MB/s]
+ 75%|███████▍  | 76.0M/102M [00:01<00:00, 46.1MB/s]
+ 88%|████████▊ | 89.1M/102M [00:02<00:00, 62.8MB/s]
+ 96%|█████████▌| 97.0M/102M [00:02<00:00, 53.2MB/s]
+100%|██████████| 102M/102M [00:02<00:00, 43.7MB/s]
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0302/session001/subject302_session1_recording_2018-11-20-17.10.25.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0302/session002/subject302_session2_recording_2018-11-20-17.18.04.csv
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0302/session002/subject302_session2_recording_2018-11-20-17.31.03.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+
+
+
+
+

Visualize the power spectrum

+

Plot raw data

+
raw.plot();
+
+
+01r  cueing singlesub analysis
Using matplotlib as 2D backend.
+
+<MNEBrowseFigure size 800x800 with 4 Axes>
+
+
+
+
+

Power Spectral Density

+

One way to analyze the SSVEP is to plot the power spectral density, or PSD. SSVEPs should appear as peaks in power for certain frequencies. We expect clear peaks in the spectral domain at the stimulation frequencies of 30 and 20 Hz.

+
raw.compute_psd().plot();
+
+# Should see the electrical noise at 60 Hz, and maybe a peak at the red and blue channels between 7-14 Hz (Alpha)
+
+
+EEG
Effective window size : 8.000 (s)
+
+<MNELineFigure size 1000x350 with 2 Axes>
+
+
+
+
+

Filtering

+

Most ERP components are composed of lower frequency fluctuations in the EEG signal. Thus, we can filter out all frequencies between 1 and 30 hz in order to increase our ability to detect them.

+
raw.filter(1,30, method='iir');
+raw.compute_psd(fmin=1, fmax=30).plot();
+
+
+EEG
Filtering raw data in 3 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+Effective window size : 8.000 (s)
+
+<MNELineFigure size 1000x350 with 2 Axes>
+
+
+
+
+

Epoching

+

Next, we will chunk (epoch) the data into segments representing the data 1000ms before to 2000ms after each cue, we will reject every epoch where the amplitude of the signal exceeded 100 uV, which should most eye blinks.

+
raw.filter(1,30, method='iir')
+events = find_events(raw)
+event_id = {'LeftCue': 1, 'RightCue': 2}
+
+rej_thresh_uV = 150
+rej_thresh = rej_thresh_uV*1e-6
+
+epochs = Epochs(raw, events=events, event_id=event_id,
+                tmin=-1, tmax=2, baseline=(-1, 0),
+                reject={'eeg':rej_thresh}, preload=True,
+                verbose=False, picks=[0, 1, 2, 3])
+
+print('sample drop %: ', (1 - len(epochs.events)/len(events)) * 100)
+
+conditions = OrderedDict()
+#conditions['LeftCue'] = [1]
+#conditions['RightCue'] = [2]
+conditions['LeftCue'] = ['LeftCue']
+conditions['RightCue'] = ['RightCue']
+diffwave = ('LeftCue', 'RightCue')
+
+fig, ax = plot_conditions(epochs, conditions=conditions,
+                                ci=97.5, n_boot=1000, title='',
+                                diff_waveform=diffwave, ylim=(-20,20))
+
+
+TP9, AF8, AF7, TP10
Filtering raw data in 3 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+213 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+sample drop %:  95.77464788732395
+
+
+
+
+

Spectrogram

+

We can also look for SSVEPs in the spectrogram, which uses color to represent the power of frequencies in the EEG signal over time

+
frequencies =  np.linspace(6, 30, 100, endpoint=True)
+
+wave_cycles = 6
+
+# Compute morlet wavelet
+
+# Left Cue
+tfr, itc = tfr_morlet(epochs['LeftCue'], freqs=frequencies,
+                      n_cycles=wave_cycles, return_itc=True)
+tfr = tfr.apply_baseline((-1,-.5),mode='mean')
+tfr.plot(picks=[0], mode='logratio',
+         title='TP9 - Ipsi');
+tfr.plot(picks=[1], mode='logratio',
+         title='TP10 - Contra');
+power_Ipsi_TP9 = tfr.data[0,:,:]
+power_Contra_TP10 = tfr.data[1,:,:]
+
+# Right Cue
+tfr, itc = tfr_morlet(epochs['RightCue'], freqs=frequencies,
+                      n_cycles=wave_cycles, return_itc=True)
+tfr = tfr.apply_baseline((-1,-.5),mode='mean')
+tfr.plot(picks=[0], mode='logratio',
+         title='TP9 - Contra');
+tfr.plot(picks=[1], mode='logratio',
+         title='TP10 - Ipsi');
+power_Contra_TP9 = tfr.data[0,:,:]
+power_Ipsi_TP10 = tfr.data[1,:,:]
+
+
+
    +
  • TP9 - Ipsi
  • +
  • TP10 - Contra
  • +
  • TP9 - Contra
  • +
  • TP10 - Ipsi
  • +
+
Applying baseline correction (mode: mean)
+No baseline correction applied
+No baseline correction applied
+Applying baseline correction (mode: mean)
+No baseline correction applied
+No baseline correction applied
+
+
+
+
+

Now we compute and plot the differences

+

time frequency window for analysis

+
f_low = 7 # Hz
+f_high = 10
+f_diff = f_high-f_low
+
+t_low = 0 # s
+t_high = 1
+t_diff = t_high-t_low
+
+# Plot Differences
+times = epochs.times
+power_Avg_Ipsi =   (power_Ipsi_TP9+power_Ipsi_TP10)/2;
+power_Avg_Contra = (power_Contra_TP9+power_Contra_TP10)/2;
+power_Avg_Diff = power_Avg_Ipsi-power_Avg_Contra;
+
+# find max to make color range
+plot_max = np.max([np.max(np.abs(power_Avg_Ipsi)), np.max(np.abs(power_Avg_Contra))])
+plot_diff_max = np.max(np.abs(power_Avg_Diff))
+
+# Ipsi
+fig, ax = plt.subplots(1)
+im = plt.imshow(power_Avg_Ipsi,
+           extent=[times[0], times[-1], frequencies[0], frequencies[-1]],
+           aspect='auto', origin='lower', cmap='coolwarm', vmin=-plot_max, vmax=plot_max)
+plt.xlabel('Time (sec)')
+plt.ylabel('Frequency (Hz)')
+plt.title('Power Average Ipsilateral to Cue')
+cb = fig.colorbar(im)
+cb.set_label('Power')
+# Create a Rectangle patch
+rect = patches.Rectangle((t_low,f_low),t_diff,f_diff,linewidth=1,edgecolor='k',facecolor='none')
+# Add the patch to the Axes
+ax.add_patch(rect)
+
+#TP10
+fig, ax = plt.subplots(1)
+im = plt.imshow(power_Avg_Contra,
+           extent=[times[0], times[-1], frequencies[0], frequencies[-1]],
+           aspect='auto', origin='lower', cmap='coolwarm', vmin=-plot_max, vmax=plot_max)
+plt.xlabel('Time (sec)')
+plt.ylabel('Frequency (Hz)')
+plt.title(str(sub) + ' - Power Average Contra to Cue')
+cb = fig.colorbar(im)
+cb.set_label('Power')
+# Create a Rectangle patch
+rect = patches.Rectangle((t_low,f_low),t_diff,f_diff,linewidth=1,edgecolor='k',facecolor='none')
+# Add the patch to the Axes
+ax.add_patch(rect)
+
+#difference between conditions
+fig, ax = plt.subplots(1)
+im = plt.imshow(power_Avg_Diff,
+           extent=[times[0], times[-1], frequencies[0], frequencies[-1]],
+           aspect='auto', origin='lower', cmap='coolwarm', vmin=-plot_diff_max, vmax=plot_diff_max)
+plt.xlabel('Time (sec)')
+plt.ylabel('Frequency (Hz)')
+plt.title('Power Difference Ipsi-Contra')
+cb = fig.colorbar(im)
+cb.set_label('Ipsi-Contra Power')
+# Create a Rectangle patch
+rect = patches.Rectangle((t_low,f_low),t_diff,f_diff,linewidth=1,edgecolor='k',facecolor='none')
+# Add the patch to the Axes
+ax.add_patch(rect)
+
+
+# We expect greater alpha power ipsilateral to the cue direction (positive values) from 0 to 1.5 seconds
+
+
+
    +
  • Power Average Ipsilateral to Cue
  • +
  • 302 - Power Average Contra to Cue
  • +
  • Power Difference Ipsi-Contra
  • +
+
<matplotlib.patches.Rectangle object at 0x7fddc5f5dbe0>
+
+
+
+
+

Target Epoching

+

Next, we will chunk (epoch) the data into segments representing the data .200ms before to 1000ms after each target, we will reject every epoch where the amplitude of the signal exceeded ? uV, which should most eye blinks.

+
events = find_events(raw)
+event_id = {'InvalidTarget_Left': 11, 'InvalidTarget_Right': 12,
+           'ValidTarget_Left': 21,'ValidTarget_Right': 11}
+
+epochs = Epochs(raw, events=events, event_id=event_id,
+                tmin=-.2, tmax=1, baseline=(-.2, 0),
+                reject={'eeg':.0001}, preload=True,
+                verbose=False, picks=[0, 1, 2, 3])
+print('sample drop %: ', (1 - len(epochs.events)/len(events)) * 100)
+
+conditions = OrderedDict()
+conditions['ValidTarget'] = ['ValidTarget_Left', 'ValidTarget_Right']
+conditions['InvalidTarget'] = ['InvalidTarget_Left', 'InvalidTarget_Right']
+diffwave = ('ValidTarget', 'InvalidTarget')
+
+fig, ax = plot_conditions(epochs, conditions=conditions,
+                                ci=97.5, n_boot=1000, title='',
+                                diff_waveform=diffwave, ylim=(-20,20))
+
+
+TP9, AF8, AF7, TP10
213 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+sample drop %:  89.67136150234741
+
+
+

Total running time of the script: (0 minutes 10.934 seconds)

+ +

Gallery generated by Sphinx-Gallery

+
+
+ + +
+
+ +
+
+
+
+ + + + + + + \ No newline at end of file diff --git a/develop/auto_examples/visual_cueing/02r__cueing_group_analysis.html b/develop/auto_examples/visual_cueing/02r__cueing_group_analysis.html new file mode 100644 index 00000000..342a2090 --- /dev/null +++ b/develop/auto_examples/visual_cueing/02r__cueing_group_analysis.html @@ -0,0 +1,2252 @@ + + + + + + + Cueing Group Analysis — EEG Notebooks 0.0.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ + +
+

Cueing Group Analysis

+
+

Setup

+
# Standard pythonic importa
+import  os,sys,glob,numpy as np,pandas as pd
+from collections import OrderedDict
+import warnings
+warnings.filterwarnings('ignore')
+from matplotlib import pyplot as plt
+import matplotlib.patches as patches
+
+# MNE functions
+from mne import Epochs, find_events, concatenate_raws
+from mne.time_frequency import tfr_morlet
+
+# EEG-Notebooks functions
+from eegnb.datasets import datasets
+from eegnb.analysis.analysis_utils import load_data
+
+
+
+
+

Download the data

+
eegnb_data_path = os.path.join(os.path.expanduser('~/'),'.eegnb', 'data')
+cueing_data_path = os.path.join(eegnb_data_path, 'visual-cueing', 'kylemathlab_dev')
+
+# If dataset hasn't been downloaded yet, download it
+if not os.path.isdir(cueing_data_path):
+      datasets.fetch_dataset(data_dir=eegnb_data_path, experiment='visual-cueing', site='kylemathlab_dev')
+
+
+
+
+

Load data into MNE objects

+
# MNE is a very powerful Python library for analyzing EEG data. It provides helpful functions for performing key tasks such as filtering EEG data, rejecting artifacts, and grouping EEG data into chunks (epochs).
+
+# The first step after loading dependencies is use MNE to read the data we've collected into an MNE Raw object
+
+
+
subs = [101, 102, 103, 104, 105, 106, 108, 109, 110, 111, 112,
+        202, 203, 204, 205, 207, 208, 209, 210, 211,
+        301, 302, 303, 304, 305, 306, 307, 308, 309]
+
+diff_out = []
+Ipsi_out = []
+Contra_out = []
+Ipsi_spectra_out = []
+Contra_spectra_out = []
+diff_spectra_out = []
+ERSP_diff_out = []
+ERSP_Ipsi_out = []
+ERSP_Contra_out = []
+
+frequencies =  np.linspace(6, 30, 100, endpoint=True)
+wave_cycles = 6
+
+# time frequency window for analysis
+f_low = 7 # Hz
+f_high = 10
+f_diff = f_high-f_low
+
+t_low = 0 # s
+t_high = 1
+t_diff = t_high-t_low
+
+bad_subs= [6, 7, 13, 26]
+really_bad_subs = [11, 12, 19]
+sub_count = 0
+
+
+
+for sub in subs:
+    print(sub)
+
+    sub_count += 1
+
+
+    if (sub_count in really_bad_subs):
+        rej_thresh_uV = 90
+    elif (sub_count in bad_subs):
+        rej_thresh_uV = 90
+    else:
+        rej_thresh_uV = 90
+
+    rej_thresh = rej_thresh_uV*1e-6
+
+
+    # Load both sessions
+    raw = load_data(sub,1, # subject, session
+                    experiment='visual-cueing',site='kylemathlab_dev',device_name='muse2016',
+                    data_dir = eegnb_data_path)
+
+    raw.append(
+          load_data(sub,2, # subject, session
+                    experiment='visual-cueing', site='kylemathlab_dev', device_name='muse2016',
+                    data_dir = eegnb_data_path))
+
+
+    # Filter Raw Data
+    raw.filter(1,30, method='iir')
+
+    #Select Events
+    events = find_events(raw)
+    event_id = {'LeftCue': 1, 'RightCue': 2}
+    epochs = Epochs(raw, events=events, event_id=event_id,
+                    tmin=-1, tmax=2, baseline=(-1, 0),
+                    reject={'eeg':rej_thresh}, preload=True,
+                    verbose=False, picks=[0, 3])
+    print('Trials Remaining: ' + str(len(epochs.events)) + '.')
+
+    # Compute morlet wavelet
+
+    # Left Cue
+    tfr, itc = tfr_morlet(epochs['LeftCue'], freqs=frequencies,
+                          n_cycles=wave_cycles, return_itc=True)
+    tfr = tfr.apply_baseline((-1,-.5),mode='mean')
+    #tfr.plot(picks=[0], mode='logratio',
+    #         title='TP9 - Ipsi');
+    #tfr.plot(picks=[3], mode='logratio',
+    #         title='TP10 - Contra');
+    power_Ipsi_TP9 = tfr.data[0,:,:]
+    power_Contra_TP10 = tfr.data[1,:,:]
+
+    # Right Cue
+    tfr, itc = tfr_morlet(epochs['RightCue'], freqs=frequencies,
+                          n_cycles=wave_cycles, return_itc=True)
+    tfr = tfr.apply_baseline((-1,-.5),mode='mean')
+    #tfr.plot(picks=[0], mode='logratio',
+    #         title='TP9 - Contra');
+    #tfr.plot(picks=[3], mode='logratio',
+    #         title='TP10 - Ipsi');
+    power_Contra_TP9 = tfr.data[0,:,:]
+    power_Ipsi_TP10 = tfr.data[1,:,:]
+
+    # Plot Differences
+    #%matplotlib inline
+    times = epochs.times
+    power_Avg_Ipsi =   (power_Ipsi_TP9+power_Ipsi_TP10)/2;
+    power_Avg_Contra = (power_Contra_TP9+power_Contra_TP10)/2;
+    power_Avg_Diff = power_Avg_Ipsi-power_Avg_Contra;
+
+
+    #find max to make color range
+    plot_max = np.max([np.max(np.abs(power_Avg_Ipsi)), np.max(np.abs(power_Avg_Contra))])
+    plot_diff_max = np.max(np.abs(power_Avg_Diff))
+
+
+
+    #Ipsi
+    fig, ax = plt.subplots(1)
+    im = plt.imshow(power_Avg_Ipsi,
+               extent=[times[0], times[-1], frequencies[0], frequencies[-1]],
+               aspect='auto', origin='lower', cmap='coolwarm', vmin=-plot_max, vmax=plot_max)
+    plt.xlabel('Time (sec)')
+    plt.ylabel('Frequency (Hz)')
+    plt.title('Power Average Ipsilateral to Cue')
+    cb = fig.colorbar(im)
+    cb.set_label('Power')
+    # Create a Rectangle patch
+    rect = patches.Rectangle((t_low,f_low),t_diff,f_diff,linewidth=1,edgecolor='k',facecolor='none')
+    # Add the patch to the Axes
+    ax.add_patch(rect)
+
+    #TP10
+    fig, ax = plt.subplots(1)
+    im = plt.imshow(power_Avg_Contra,
+               extent=[times[0], times[-1], frequencies[0], frequencies[-1]],
+               aspect='auto', origin='lower', cmap='coolwarm', vmin=-plot_max, vmax=plot_max)
+    plt.xlabel('Time (sec)')
+    plt.ylabel('Frequency (Hz)')
+    plt.title(str(sub) + ' - Power Average Contra to Cue')
+    cb = fig.colorbar(im)
+    cb.set_label('Power')
+    # Create a Rectangle patch
+    rect = patches.Rectangle((t_low,f_low),t_diff,f_diff,linewidth=1,edgecolor='k',facecolor='none')
+    # Add the patch to the Axes
+    ax.add_patch(rect)
+
+    #difference between conditions
+    fig, ax = plt.subplots(1)
+    im = plt.imshow(power_Avg_Diff,
+               extent=[times[0], times[-1], frequencies[0], frequencies[-1]],
+               aspect='auto', origin='lower', cmap='coolwarm', vmin=-plot_diff_max, vmax=plot_diff_max)
+    plt.xlabel('Time (sec)')
+    plt.ylabel('Frequency (Hz)')
+    plt.title('Power Difference Ipsi-Contra')
+    cb = fig.colorbar(im)
+    cb.set_label('Ipsi-Contra Power')
+    # Create a Rectangle patch
+    rect = patches.Rectangle((t_low,f_low),t_diff,f_diff,linewidth=1,edgecolor='k',facecolor='none')
+    # Add the patch to the Axes
+    ax.add_patch(rect)
+
+
+
+
+    #output data into array
+    Ipsi_out.append(np.mean(power_Avg_Ipsi[np.argmax(frequencies>f_low):
+                                           np.argmax(frequencies>f_high)-1,
+                            np.argmax(times>t_low):np.argmax(times>t_high)-1 ]
+                           )
+                   )
+    Ipsi_spectra_out.append(np.mean(power_Avg_Ipsi[:,np.argmax(times>t_low):
+                                                   np.argmax(times>t_high)-1 ],1
+                                   )
+                           )
+
+    Contra_out.append(np.mean(power_Avg_Contra[np.argmax(frequencies>f_low):
+                                               np.argmax(frequencies>f_high)-1,
+                            np.argmax(times>t_low):np.argmax(times>t_high)-1 ]
+                             )
+                     )
+
+    Contra_spectra_out.append(np.mean(power_Avg_Contra[:,np.argmax(times>t_low):
+                                                       np.argmax(times>t_high)-1 ],1))
+
+
+    diff_out.append(np.mean(power_Avg_Diff[np.argmax(frequencies>f_low):
+                                           np.argmax(frequencies>f_high)-1,
+                            np.argmax(times>t_low):np.argmax(times>t_high)-1 ]
+                           )
+                   )
+    diff_spectra_out.append(np.mean(power_Avg_Diff[:,np.argmax(times>t_low):
+                                                   np.argmax(times>t_high)-1 ],1
+                                   )
+                           )
+
+
+    ERSP_diff_out.append(power_Avg_Diff)
+    ERSP_Ipsi_out.append(power_Avg_Ipsi)
+    ERSP_Contra_out.append(power_Avg_Contra)
+
+
+
+print(np.shape(ERSP_diff_out))
+print(np.shape(Contra_spectra_out))
+
+print(diff_out)
+
+
+
    +
  • Power Average Ipsilateral to Cue
  • +
  • 101 - Power Average Contra to Cue
  • +
  • Power Difference Ipsi-Contra
  • +
  • Power Average Ipsilateral to Cue
  • +
  • 102 - Power Average Contra to Cue
  • +
  • Power Difference Ipsi-Contra
  • +
  • Power Average Ipsilateral to Cue
  • +
  • 103 - Power Average Contra to Cue
  • +
  • Power Difference Ipsi-Contra
  • +
  • Power Average Ipsilateral to Cue
  • +
  • 104 - Power Average Contra to Cue
  • +
  • Power Difference Ipsi-Contra
  • +
  • Power Average Ipsilateral to Cue
  • +
  • 105 - Power Average Contra to Cue
  • +
  • Power Difference Ipsi-Contra
  • +
  • Power Average Ipsilateral to Cue
  • +
  • 106 - Power Average Contra to Cue
  • +
  • Power Difference Ipsi-Contra
  • +
  • Power Average Ipsilateral to Cue
  • +
  • 108 - Power Average Contra to Cue
  • +
  • Power Difference Ipsi-Contra
  • +
  • Power Average Ipsilateral to Cue
  • +
  • 109 - Power Average Contra to Cue
  • +
  • Power Difference Ipsi-Contra
  • +
  • Power Average Ipsilateral to Cue
  • +
  • 110 - Power Average Contra to Cue
  • +
  • Power Difference Ipsi-Contra
  • +
  • Power Average Ipsilateral to Cue
  • +
  • 111 - Power Average Contra to Cue
  • +
  • Power Difference Ipsi-Contra
  • +
  • Power Average Ipsilateral to Cue
  • +
  • 112 - Power Average Contra to Cue
  • +
  • Power Difference Ipsi-Contra
  • +
  • Power Average Ipsilateral to Cue
  • +
  • 202 - Power Average Contra to Cue
  • +
  • Power Difference Ipsi-Contra
  • +
  • Power Average Ipsilateral to Cue
  • +
  • 203 - Power Average Contra to Cue
  • +
  • Power Difference Ipsi-Contra
  • +
  • Power Average Ipsilateral to Cue
  • +
  • 204 - Power Average Contra to Cue
  • +
  • Power Difference Ipsi-Contra
  • +
  • Power Average Ipsilateral to Cue
  • +
  • 205 - Power Average Contra to Cue
  • +
  • Power Difference Ipsi-Contra
  • +
  • Power Average Ipsilateral to Cue
  • +
  • 207 - Power Average Contra to Cue
  • +
  • Power Difference Ipsi-Contra
  • +
  • Power Average Ipsilateral to Cue
  • +
  • 208 - Power Average Contra to Cue
  • +
  • Power Difference Ipsi-Contra
  • +
  • Power Average Ipsilateral to Cue
  • +
  • 209 - Power Average Contra to Cue
  • +
  • Power Difference Ipsi-Contra
  • +
  • Power Average Ipsilateral to Cue
  • +
  • 210 - Power Average Contra to Cue
  • +
  • Power Difference Ipsi-Contra
  • +
  • Power Average Ipsilateral to Cue
  • +
  • 211 - Power Average Contra to Cue
  • +
  • Power Difference Ipsi-Contra
  • +
  • Power Average Ipsilateral to Cue
  • +
  • 301 - Power Average Contra to Cue
  • +
  • Power Difference Ipsi-Contra
  • +
  • Power Average Ipsilateral to Cue
  • +
  • 302 - Power Average Contra to Cue
  • +
  • Power Difference Ipsi-Contra
  • +
  • Power Average Ipsilateral to Cue
  • +
  • 303 - Power Average Contra to Cue
  • +
  • Power Difference Ipsi-Contra
  • +
  • Power Average Ipsilateral to Cue
  • +
  • 304 - Power Average Contra to Cue
  • +
  • Power Difference Ipsi-Contra
  • +
  • Power Average Ipsilateral to Cue
  • +
  • 305 - Power Average Contra to Cue
  • +
  • Power Difference Ipsi-Contra
  • +
  • Power Average Ipsilateral to Cue
  • +
  • 306 - Power Average Contra to Cue
  • +
  • Power Difference Ipsi-Contra
  • +
  • Power Average Ipsilateral to Cue
  • +
  • 307 - Power Average Contra to Cue
  • +
  • Power Difference Ipsi-Contra
  • +
  • Power Average Ipsilateral to Cue
  • +
  • 308 - Power Average Contra to Cue
  • +
  • Power Difference Ipsi-Contra
  • +
  • Power Average Ipsilateral to Cue
  • +
  • 309 - Power Average Contra to Cue
  • +
  • Power Difference Ipsi-Contra
  • +
+
101
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0101/session001/subject101_session1_recording_2018-11-20-16.42.02.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61284
+    Range : 0 ... 61283 =      0.000 ...   239.387 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0101/session002/subject101_session2_recording_2018-11-20-16.53.23.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61284
+    Range : 0 ... 61283 =      0.000 ...   239.387 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+122 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 10.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+102
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0102/session001/subject102_session1_recording_2018-11-20-17.05.27.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0102/session002/subject102_session2_recording_2018-11-20-17.14.10.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+92 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 40.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+103
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0103/session001/subject103_session1_recording_2018-11-20-17.26.43.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0103/session002/subject103_session2_recording_2018-11-20-17.33.59.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+160 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 58.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+104
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0104/session001/subject104_session1_recording_2018-11-21-18.18.35.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0104/session002/subject104_session2_recording_2018-11-21-18.27.12.csv
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0104/session002/subject104_session2_recording_2018-11-20-17.49.47.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61284
+    Range : 0 ... 61283 =      0.000 ...   239.387 secs
+Ready.
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61284
+    Range : 0 ... 61283 =      0.000 ...   239.387 secs
+Ready.
+Filtering raw data in 3 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+272 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 39.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+105
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0105/session001/subject105_session1_recording_2018-11-20-18.17.07.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0105/session002/subject105_session2_recording_2018-11-20-18.23.37.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+111 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 21.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+106
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0106/session001/subject106_session1_recording_2018-11-20-18.00.27.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61284
+    Range : 0 ... 61283 =      0.000 ...   239.387 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0106/session002/subject106_session2_recording_2018-11-20-18.07.03.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61284
+    Range : 0 ... 61283 =      0.000 ...   239.387 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+201 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 2.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+108
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0108/session001/subject108_session1_recording_2018-11-22-17.07.36.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0108/session002/subject108_session2_recording_2018-11-22-17.15.34.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61284
+    Range : 0 ... 61283 =      0.000 ...   239.387 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+130 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 0.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+109
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0109/session001/subject109_session1_recording_2018-11-22-17.27.44.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61284
+    Range : 0 ... 61283 =      0.000 ...   239.387 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0109/session002/subject109_session2_recording_2018-11-22-17.34.20.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+160 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 5.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+110
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0110/session001/subject110_session1_recording_2018-11-22-16.46.55.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0110/session002/subject110_session2_recording_2018-11-22-16.56.10.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61284
+    Range : 0 ... 61283 =      0.000 ...   239.387 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+107 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 28.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+111
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0111/session001/subject111_session1_recording_2018-11-22-17.43.50.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0111/session002/subject111_session2_recording_2018-11-22-17.49.42.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+223 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 32.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+112
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0112/session001/subject112_session1_recording_2018-11-22-17.59.31.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0112/session002/subject112_session2_recording_2018-11-22-18.07.30.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61284
+    Range : 0 ... 61283 =      0.000 ...   239.387 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+86 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 0.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+202
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0202/session001/subject202_session1_recording_2018-11-20-16.39.02.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0202/session002/subject202_session2_recording_2018-11-20-16.46.28.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+156 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 61.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+203
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0203/session001/subject203_session1_recording_2018-11-20-16.59.53.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0203/session002/subject203_session2_recording_2018-11-20-17.08.00.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+120 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 4.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+204
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0204/session001/subject204_session1_recording_2018-11-20-17.22.49.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61308
+    Range : 0 ... 61307 =      0.000 ...   239.480 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0204/session002/subject204_session2_recording_2018-11-20-17.29.51.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61308
+    Range : 0 ... 61307 =      0.000 ...   239.480 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+183 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 49.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+205
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0205/session001/subject205_session1_recording_2018-11-20-17.41.48.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0205/session002/subject205_session2_recording_2018-11-20-17.48.11.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+194 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 87.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+207
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0207/session001/subject207_session1_recording_2018-11-22-16.33.38.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61284
+    Range : 0 ... 61283 =      0.000 ...   239.387 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0207/session002/subject207_session2_recording_2018-11-22-16.41.04.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61284
+    Range : 0 ... 61283 =      0.000 ...   239.387 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+181 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 70.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+208
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0208/session001/subject208_session1_recording_2018-11-22-16.54.46.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0208/session002/subject208_session2_recording_2018-11-22-17.02.17.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+181 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 70.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+209
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0209/session001/subject209_session1_recording_2018-11-22-17.21.18.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0209/session002/subject209_session2_recording_2018-11-22-17.29.01.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+118 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 5.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+210
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0210/session001/subject210_session1_recording_2018-11-22-17.42.48.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61284
+    Range : 0 ... 61283 =      0.000 ...   239.387 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0210/session002/subject210_session2_recording_2018-11-22-17.49.59.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+170 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 67.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+211
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0211/session001/subject211_session1_recording_2018-11-20-18.01.25.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0211/session002/subject211_session2_recording_2018-11-20-18.11.42.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+96 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 9.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+301
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0301/session001/subject301_session1_recording_2018-11-20-16.52.26.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=30564
+    Range : 0 ... 30563 =      0.000 ...   119.387 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0301/session002/subject301_session2_recording_2018-11-20-16.57.44.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=30564
+    Range : 0 ... 30563 =      0.000 ...   119.387 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+54 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 26.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+302
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0302/session001/subject302_session1_recording_2018-11-20-17.10.25.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0302/session002/subject302_session2_recording_2018-11-20-17.18.04.csv
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0302/session002/subject302_session2_recording_2018-11-20-17.31.03.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+Filtering raw data in 3 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+213 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 11.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+303
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0303/session001/subject303_session1_recording_2018-11-20-17.49.48.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0303/session002/subject303_session2_recording_2018-11-20-17.39.08.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+198 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 8.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+304
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0304/session001/subject304_session1_recording_2018-11-20-17.59.43.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61284
+    Range : 0 ... 61283 =      0.000 ...   239.387 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0304/session002/subject304_session2_recording_2018-11-20-18.06.46.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61284
+    Range : 0 ... 61283 =      0.000 ...   239.387 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+195 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 24.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+305
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0305/session001/subject305_session1_recording_2018-11-20-18.22.30.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0305/session002/subject305_session2_recording_2018-11-20-18.29.57.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+166 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 9.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+306
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0306/session001/subject306_session1_recording_2018-11-22-16.40.52.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61308
+    Range : 0 ... 61307 =      0.000 ...   239.480 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0306/session002/subject306_session2_recording_2018-11-22-16.48.01.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61308
+    Range : 0 ... 61307 =      0.000 ...   239.480 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+147 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 1.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+307
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0307/session001/subject307_session1_recording_2018-11-22-17.02.17.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61308
+    Range : 0 ... 61307 =      0.000 ...   239.480 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0307/session002/subject307_session2_recording_2018-11-22-17.09.25.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61308
+    Range : 0 ... 61307 =      0.000 ...   239.480 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+171 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 49.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+308
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0308/session001/subject308_session1_recording_2018-11-22-17.21.43.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0308/session002/subject308_session2_recording_2018-11-22-17.28.31.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61308
+    Range : 0 ... 61307 =      0.000 ...   239.480 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+180 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 71.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+309
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0309/session001/subject309_session1_recording_2018-11-22-17.41.24.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61308
+    Range : 0 ... 61307 =      0.000 ...   239.480 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0309/session002/subject309_session2_recording_2018-11-22-17.48.17.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61308
+    Range : 0 ... 61307 =      0.000 ...   239.480 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+196 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 74.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+(29, 100, 769)
+(29, 100)
+[-1.8633016529356232e-10, -5.194492698041424e-11, 3.965717366845577e-11, -9.439937924298538e-11, 1.5395426956926177e-10, nan, nan, 1.437520530470476e-10, 6.156413801290107e-12, -3.4542289627374594e-11, nan, 1.404002269024156e-10, 4.5250575579728836e-11, 8.13341442171729e-11, 1.7562617842729167e-12, 3.498985471214225e-11, -7.358448888412755e-11, -2.066739867471836e-11, 5.833832231476192e-11, -1.1353507280272121e-10, 2.0578545669354385e-10, 8.13612907825663e-11, 2.231124565129559e-09, 4.417204016507064e-13, 1.5824220701839133e-10, nan, 2.972694444208325e-11, -2.7053453190038292e-11, 1.0032262566342049e-10]
+
+
+
+
+

Combine subjects

+
GrandAvg_diff = np.nanmean(ERSP_diff_out,0)
+GrandAvg_Ipsi = np.nanmean(ERSP_Ipsi_out,0)
+GrandAvg_Contra = np.nanmean(ERSP_Contra_out,0)
+
+GrandAvg_spec_Ipsi = np.nanmean(Ipsi_spectra_out,0)
+GrandAvg_spec_Contra = np.nanmean(Contra_spectra_out,0)
+GrandAvg_spec_diff = np.nanmean(diff_spectra_out,0)
+
+num_good = len(diff_out) - sum(np.isnan(diff_out))
+GrandAvg_spec_Ipsi_ste = np.nanstd(Ipsi_spectra_out,0)/np.sqrt(num_good)
+GrandAvg_spec_Contra_ste = np.nanstd(Contra_spectra_out,0)/np.sqrt(num_good)
+GrandAvg_spec_diff_ste = np.nanstd(diff_spectra_out,0)/np.sqrt(num_good)
+
+#Spectra error bars
+fig, ax = plt.subplots(1)
+plt.errorbar(frequencies,GrandAvg_spec_Ipsi,yerr=GrandAvg_spec_Ipsi_ste)
+plt.errorbar(frequencies,GrandAvg_spec_Contra,yerr=GrandAvg_spec_Contra_ste)
+
+plt.legend(('Ipsi','Contra'))
+plt.xlabel('Frequency (Hz)')
+plt.ylabel('Power (uV^2)')
+plt.hlines(0,3,33)
+
+#Spectra Diff error bars
+fig, ax = plt.subplots(1)
+plt.errorbar(frequencies,GrandAvg_spec_diff,yerr=GrandAvg_spec_diff_ste)
+
+plt.legend('Ipsi-Contra')
+plt.xlabel('Frequency (Hz)')
+plt.ylabel('Power (uV^2)')
+plt.hlines(0,3,33)
+
+#Grand Average Ipsi
+plot_max = np.max([np.max(np.abs(GrandAvg_Ipsi)), np.max(np.abs(GrandAvg_Contra))])
+fig, ax = plt.subplots(1)
+im = plt.imshow(GrandAvg_Ipsi,
+           extent=[times[0], times[-1], frequencies[0], frequencies[-1]],
+           aspect='auto', origin='lower', cmap='coolwarm', vmin=-plot_max, vmax=plot_max)
+plt.xlabel('Time (sec)')
+plt.ylabel('Frequency (Hz)')
+plt.title('Power Ipsi')
+cb = fig.colorbar(im)
+cb.set_label('Power')
+# Create a Rectangle patch
+rect = patches.Rectangle((t_low,f_low),t_diff,f_diff,linewidth=1,edgecolor='k',facecolor='none')
+# Add the patch to the Axes
+ax.add_patch(rect)
+
+#Grand Average Contra
+fig, ax = plt.subplots(1)
+im = plt.imshow(GrandAvg_Contra,
+           extent=[times[0], times[-1], frequencies[0], frequencies[-1]],
+           aspect='auto', origin='lower', cmap='coolwarm', vmin=-plot_max, vmax=plot_max)
+plt.xlabel('Time (sec)')
+plt.ylabel('Frequency (Hz)')
+plt.title('Power Contra')
+cb = fig.colorbar(im)
+cb.set_label('Power')
+# Create a Rectangle patch
+rect = patches.Rectangle((t_low,f_low),t_diff,f_diff,linewidth=1,edgecolor='k',facecolor='none')
+# Add the patch to the Axes
+ax.add_patch(rect)
+
+#Grand Average Ipsi-Contra Difference
+plot_max_diff = np.max(np.abs(GrandAvg_diff))
+fig, ax = plt.subplots(1)
+im = plt.imshow(GrandAvg_diff,
+           extent=[times[0], times[-1], frequencies[0], frequencies[-1]],
+           aspect='auto', origin='lower', cmap='coolwarm', vmin=-plot_max_diff, vmax=plot_max_diff)
+plt.xlabel('Time (sec)')
+plt.ylabel('Frequency (Hz)')
+plt.title('Power Difference Ipsi-Contra')
+cb = fig.colorbar(im)
+cb.set_label('Ipsi-Contra Power')
+# Create a Rectangle patch
+rect = patches.Rectangle((t_low,f_low),t_diff,f_diff,linewidth=1,edgecolor='k',facecolor='none')
+# Add the patch to the Axes
+ax.add_patch(rect)
+
+
+
    +
  • 02r  cueing group analysis
  • +
  • 02r  cueing group analysis
  • +
  • Power Ipsi
  • +
  • Power Contra
  • +
  • Power Difference Ipsi-Contra
  • +
+
<matplotlib.patches.Rectangle object at 0x7fddc2640d30>
+
+
+
+
+

Compute t test

+
import scipy
+num_good = len(diff_out) - sum(np.isnan(diff_out))
+
+[tstat, pval] = scipy.stats.ttest_ind(diff_out,np.zeros(len(diff_out)),nan_policy='omit')
+print('Ipsi Mean: '+  str(np.nanmean(Ipsi_out)))
+print('Contra Mean: '+  str(np.nanmean(Contra_out)))
+print('Mean Diff: '+  str(np.nanmean(diff_out)))
+print('t(' + str(num_good-1) + ') = ' + str(round(tstat,3)))
+print('p = ' + str(round(pval,3)))
+
+
+
Ipsi Mean: 9.02456136295644e-11
+Contra Mean: -2.617586361170848e-11
+Mean Diff: 1.164214772412729e-10
+t(24) = 1.394
+p = 0.169
+
+
+
+
+

Save average powers ipsi and contra

+
import pandas as pd
+print(diff_out)
+raw_data = {'Ipsi Power': Ipsi_out,
+        'Contra Power': Contra_out}
+df = pd.DataFrame(raw_data, columns = ['Ipsi Power', 'Contra Power'])
+df.to_csv('375CueingEEG.csv')
+print('Saved subject averages for each condition to 375CueingEEG.csv file in present directory')
+
+
+
[-1.8633016529356232e-10, -5.194492698041424e-11, 3.965717366845577e-11, -9.439937924298538e-11, 1.5395426956926177e-10, nan, nan, 1.437520530470476e-10, 6.156413801290107e-12, -3.4542289627374594e-11, nan, 1.404002269024156e-10, 4.5250575579728836e-11, 8.13341442171729e-11, 1.7562617842729167e-12, 3.498985471214225e-11, -7.358448888412755e-11, -2.066739867471836e-11, 5.833832231476192e-11, -1.1353507280272121e-10, 2.0578545669354385e-10, 8.13612907825663e-11, 2.231124565129559e-09, 4.417204016507064e-13, 1.5824220701839133e-10, nan, 2.972694444208325e-11, -2.7053453190038292e-11, 1.0032262566342049e-10]
+Saved subject averages for each condition to 375CueingEEG.csv file in present directory
+
+
+
+
+

Save spectra

+
df = pd.DataFrame(Ipsi_spectra_out,columns=frequencies)
+df.to_csv('375CueingIpsiSpec.csv')
+
+df = pd.DataFrame(Contra_spectra_out,columns=frequencies)
+df.to_csv('375CueingContraSpec.csv')
+print('Saved Spectra to 375Cueing*Spec.csv file in present directory')
+
+
+
Saved Spectra to 375Cueing*Spec.csv file in present directory
+
+
+

Total running time of the script: (0 minutes 29.319 seconds)

+ +

Gallery generated by Sphinx-Gallery

+
+
+ + +
+
+ +
+
+
+
+ + + + + + + \ No newline at end of file diff --git a/develop/auto_examples/visual_cueing/03r__cueing_behaviour_analysis_winter2019.html b/develop/auto_examples/visual_cueing/03r__cueing_behaviour_analysis_winter2019.html new file mode 100644 index 00000000..5f5f6f14 --- /dev/null +++ b/develop/auto_examples/visual_cueing/03r__cueing_behaviour_analysis_winter2019.html @@ -0,0 +1,867 @@ + + + + + + + Cueing Behavioural Analysis Winter 2019 — EEG Notebooks 0.0.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ + +
+

Cueing Behavioural Analysis Winter 2019

+
+

Setup

+
# Standard Pythonic imports
+import os,sys,glob,numpy as np,pandas as pd
+import matplotlib.pyplot as plt
+import scipy.io as sio
+
+# EEG-Notebooks imports
+from eegnb.datasets import datasets
+
+
+

Download the data

+
eegnb_data_path = os.path.join(os.path.expanduser('~/'),'.eegnb', 'data')
+cueing_data_path = os.path.join(eegnb_data_path, 'visual-cueing', 'kylemathlab_dev')
+
+# If dataset hasn't been downloaded yet, download it
+if not os.path.isdir(cueing_data_path):
+      datasets.fetch_dataset(data_dir=eegnb_data_path, experiment='visual-cueing', site='kylemathlab_dev')
+
+
+
+
+

Analyze .mat behavioural data for Accuracy and RT

+

Load in subjects

+
# # Fall 2018
+subs = [101, 102, 103, 104, 106, 108, 109, 110, 111, 112,
+        202, 203, 204, 205, 207, 208, 209, 210, 211,
+        301, 302, 303, 304, 305, 306, 307, 308, 309]
+
+# 105 - no trials in one condition
+
+# # Winter 2019
+# subs = [1101, 1102, 1103, 1104, 1105, 1106, 1108, 1109, 1110,
+#         1202, 1203, 1205, 1206, 1209, 1210, 1211, 1215,
+#         1301, 1302, 1313,
+#         1401, 1402, 1403, 1404, 1405,  1408, 1410, 1411, 1412, 1413, 1413, 1414, 1415, 1416]
+
+# # 1107 - no csv session 1
+# # 1201 - no csv session 1
+# # 1304 - Muse 2
+# # 1308 - Muse 2
+# # 1311 - Muse 2
+# # 1314 - Muse 2
+# # 1407 - only session1
+
+# Both
+
+# Fall 2018
+# subs = [101, 102, 103, 104, 106, 108, 109, 110, 111, 112,
+#         202, 203, 204, 205, 207, 208, 209, 210, 211,
+#         301, 302, 303, 304, 305, 306, 307, 308, 309,
+#         1101, 1102, 1103, 1104, 1105, 1106, 1108, 1109, 1110,
+#         1202, 1203, 1205, 1206, 1209, 1210, 1211, 1215,
+#         1301, 1302, 1313,
+#         1401, 1402, 1403, 1404, 1405,  1408, 1410, 1411, 1412, 1413, 1413, 1414, 1415, 1416]
+
+
+

Set some settings

+
# basic numbers
+n_subs = len(subs)
+n_sesh = 2
+conditions = ['valid','invalid']
+n_cond = len(conditions)
+
+# cutoff trials that are too slow or fast
+rt_toofast = 250
+rt_tooslow = 1500
+
+#creates arrays to save output
+count_rt = np.zeros((n_subs, n_sesh, n_cond))
+median_rt = np.zeros((n_subs, n_sesh, n_cond))
+prop_accu = np.zeros((n_subs, n_sesh, n_cond))
+
+
+

Single Subject example

+
#select single subject
+sub = subs[0]
+print('Subject - ' + str(sub))
+
+#just one session
+sesh = 1
+
+#load file
+#path =  './subject' + str(sub) + '/session' + str(sesh) + '/'
+path =  cueing_data_path + '/muse2016/subject' + str('%04.f' %sub) + '/session' + str('%03.f' %(sesh+1)) + '/'
+file =  [x for x in os.listdir(path) if x.endswith('.mat')][0]
+output_dict = sio.loadmat(path + file)
+print(path + file)
+
+#pull out important info
+output = output_dict['output']
+accuracy = output[:,6]
+rt = output[:,7]
+validity = output[:,3]
+print(accuracy,rt,validity)
+
+# median rt on each condition
+print('')
+print(rt)
+print(rt[validity == 0])
+print(rt[(validity == 0) & (rt <= rt_tooslow)])
+
+validRT     =  np.nanmedian(rt[ (validity == 1) &
+                              (rt >= rt_toofast) &
+                              (rt <= rt_tooslow)])
+
+print('Valid RT = ' + str(validRT) + ' ms')
+
+InvalidRT =  np.nanmedian(rt[ (validity == 0) &
+                              (rt >= rt_toofast) &
+                              (rt <= rt_tooslow)])
+
+print('Invalid RT = ' + str(InvalidRT) + ' ms')
+
+
+
Subject - 101
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject0101/session002/subject101_session2_behOutput_2018-11-20-16.58.50.mat
+[1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1.
+ 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1.
+ 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1.
+ 1.] [435.031485 423.867152 335.010084 434.945121 297.102568 402.462939
+ 309.984897 352.377032 315.902263 346.695745 409.051914 337.994502
+ 412.490445 253.304399 325.551731 451.072688 362.531119 449.032591
+ 312.999804 293.670213 327.738388 526.629063 459.253055 355.719449
+ 352.932556 345.884407 454.775045 350.070761 313.989374 374.227764
+ 369.010847 345.467709 334.978303 334.730519 363.791318 451.067033
+ 301.155788 419.084514 271.027066 423.420538 342.584127 337.859108
+ 314.203547 339.341316 467.860489 327.351041 419.173015 340.925891
+ 318.901094 351.947652 437.583955 290.667171 319.596349 331.919014
+ 390.521833 412.642289 305.489389 348.551979 273.162904 347.040148
+ 339.326291 386.592227 317.4145   393.78332  426.145324 350.753921
+ 321.549098 313.940851 379.253401 338.095756 309.040606 403.22982
+ 368.109211] [1. 1. 0. 1. 1. 0. 0. 1. 1. 1. 1. 1. 1. 1. 1. 0. 1. 1. 1. 0. 1. 1. 1. 1.
+ 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1. 0. 1. 1. 1. 1. 1. 1. 1. 1. 1. 0.
+ 0. 1. 1. 1. 1. 1. 0. 1. 1. 1. 1. 1. 1. 0. 1. 1. 1. 1. 1. 1. 1. 1. 1. 1.
+ 1.]
+
+[435.031485 423.867152 335.010084 434.945121 297.102568 402.462939
+ 309.984897 352.377032 315.902263 346.695745 409.051914 337.994502
+ 412.490445 253.304399 325.551731 451.072688 362.531119 449.032591
+ 312.999804 293.670213 327.738388 526.629063 459.253055 355.719449
+ 352.932556 345.884407 454.775045 350.070761 313.989374 374.227764
+ 369.010847 345.467709 334.978303 334.730519 363.791318 451.067033
+ 301.155788 419.084514 271.027066 423.420538 342.584127 337.859108
+ 314.203547 339.341316 467.860489 327.351041 419.173015 340.925891
+ 318.901094 351.947652 437.583955 290.667171 319.596349 331.919014
+ 390.521833 412.642289 305.489389 348.551979 273.162904 347.040148
+ 339.326291 386.592227 317.4145   393.78332  426.145324 350.753921
+ 321.549098 313.940851 379.253401 338.095756 309.040606 403.22982
+ 368.109211]
+[335.010084 402.462939 309.984897 451.072688 293.670213 419.084514
+ 340.925891 318.901094 390.521833 386.592227]
+[335.010084 402.462939 309.984897 451.072688 293.670213 419.084514
+ 340.925891 318.901094 390.521833 386.592227]
+Valid RT = 347.04014799990546 ms
+Invalid RT = 363.75905900013095 ms
+
+
+

Loop through subjects

+
for isub, sub in enumerate(subs):
+    print('Subject - ' + str(sub))
+    for sesh in range(n_sesh):
+        # get the path and file name and load data
+        #path =  './subject' + str(sub) + '/session' + str(sesh+1) + '/'
+        path =  cueing_data_path + '/muse2016/subject' + str('%04.f' %sub) + '/session' + str('%03.f' %(sesh+1)) + '/'
+
+        file =  [x for x in os.listdir(path) if x.endswith('.mat')][0]
+        output_dict = sio.loadmat(path + file)
+
+        # pull out important stuff
+        output = output_dict['output']
+        accuracy = output[:,6]
+        rt = output[:,7]
+        validity = output[:,3]
+
+        # median rt on each condition
+        median_rt[isub,sesh,:]      = [  np.nanmedian(rt[ (validity == 1) & (rt >= rt_toofast) & (rt <= rt_tooslow)]),
+                                     np.nanmedian(rt[ (validity == 0) & (rt >= rt_toofast) & (rt <= rt_tooslow)]) ]
+
+        # proportion accurate (number accurate / count)
+        prop_accu[isub,sesh,:]  = [ np.sum(accuracy[(validity == 1) & (rt >= rt_toofast) & (rt <= rt_tooslow)]) /
+                                   np.sum((validity == 1) & (rt >= rt_toofast) & (rt <= rt_tooslow)),
+                                   np.sum(accuracy[(validity == 0) & (rt >= rt_toofast) & (rt <= rt_tooslow)]) /
+                                   np.sum((validity == 0) & (rt >= rt_toofast) & (rt <= rt_tooslow)) ]
+
+
+
Subject - 101
+Subject - 102
+Subject - 103
+Subject - 104
+Subject - 106
+Subject - 108
+Subject - 109
+Subject - 110
+Subject - 111
+Subject - 112
+Subject - 202
+Subject - 203
+Subject - 204
+Subject - 205
+Subject - 207
+Subject - 208
+Subject - 209
+Subject - 210
+Subject - 211
+Subject - 301
+Subject - 302
+Subject - 303
+Subject - 304
+Subject - 305
+Subject - 306
+Subject - 307
+Subject - 308
+Subject - 309
+
+
+

Average over sessions and print data

+
# Summary stats and collapse sessions
+Out_median_RT = np.squeeze(np.nanmean(median_rt,axis=1))
+Out_prop_accu = np.squeeze(np.nanmean(prop_accu,axis=1))
+
+print('Median RT')
+print(Out_median_RT)
+print('Proportion Accurate')
+print(Out_prop_accu)
+
+
+
Median RT
+[[361.7079635  364.45366275]
+ [547.10713075 611.45526175]
+ [600.073256   587.019756  ]
+ [535.04144725 533.250092  ]
+ [419.24263125 439.1580675 ]
+ [456.3330375  631.1320265 ]
+ [446.95026625 556.038382  ]
+ [484.0763345  478.4887    ]
+ [443.5412585  472.331497  ]
+ [436.66653125 501.3115865 ]
+ [495.63977675 587.358365  ]
+ [520.97468575 973.144689  ]
+ [395.5098235  370.232933  ]
+ [489.59985525 560.08098575]
+ [455.6651645  500.22833625]
+ [662.9326765  643.0934405 ]
+ [488.56224425 474.7125945 ]
+ [482.1316255  543.0173155 ]
+ [713.4103845  894.0804795 ]
+ [485.48053725 533.6649235 ]
+ [464.47033775 517.368321  ]
+ [488.45884825 500.39610975]
+ [527.5814465  533.4898155 ]
+ [477.23867575 446.72875575]
+ [484.94210575 578.6717025 ]
+ [681.7458885  804.780758  ]
+ [419.736862   485.569911  ]
+ [435.42846425 437.12191   ]]
+Proportion Accurate
+[[0.98305085 0.96428571]
+ [1.         1.        ]
+ [0.91680961 0.88974359]
+ [0.97767857 0.96428571]
+ [0.9822995  1.        ]
+ [0.95403439 0.6       ]
+ [0.92346939 0.97727273]
+ [0.99019608 0.97058824]
+ [0.97395994 1.        ]
+ [0.68275862 0.83333333]
+ [0.89914021 0.89732143]
+ [0.92080745 0.73333333]
+ [0.94716042 0.91666667]
+ [0.95755518 1.        ]
+ [0.95959184 0.94949495]
+ [0.88296296 0.94117647]
+ [0.90046296 0.92857143]
+ [0.96551724 0.875     ]
+ [0.91171329 0.92857143]
+ [0.98333333 0.9       ]
+ [0.94733656 0.87820513]
+ [0.94187987 0.95833333]
+ [0.96479592 0.94736842]
+ [1.         0.90833333]
+ [0.94186047 0.8       ]
+ [1.         1.        ]
+ [0.97222222 0.97222222]
+ [0.95718391 1.        ]]
+
+
+

Plot barplot of results

+
# bar plot results
+plt.figure()
+# Accuracy
+ax = plt.subplot(211)
+plt.bar([0,1], np.nanmean(Out_prop_accu,axis=0), 0.6, yerr = np.nanstd(Out_prop_accu,axis=0)/np.sqrt(n_subs))
+plt.ylim(.9,.96)
+plt.title('Accuracy')
+plt.ylabel('Proportion Correct')
+ax.set_xticks([0,1])
+ax.set_xticklabels(conditions)
+# RT
+ax = plt.subplot(212)
+plt.bar([0,1], np.nanmean(Out_median_RT,axis=0), 0.6, yerr = np.nanstd(Out_median_RT,axis=0)/np.sqrt(n_subs))
+plt.ylim(450,600)
+plt.title('Reaction Time')
+plt.ylabel('RT (ms)')
+plt.xlabel('Condition')
+ax.set_xticks([0,1])
+ax.set_xticklabels(conditions)
+plt.show()
+
+
+Accuracy, Reaction Time

Output spreadsheet

+
## CSV output
+column_dict = {'Participant':subs,
+               'AccValid':Out_prop_accu[:,0],
+               'AccInvalid':Out_prop_accu[:,1],
+               'RTValid':Out_median_RT[:,0],
+               'RTInvalid':Out_median_RT[:,1] }
+df = pd.DataFrame(column_dict)
+print(df)
+df.to_csv('375CueingBehPy.csv',index=False)
+
+
+
    Participant  AccValid  AccInvalid     RTValid   RTInvalid
+0           101  0.983051    0.964286  361.707963  364.453663
+1           102  1.000000    1.000000  547.107131  611.455262
+2           103  0.916810    0.889744  600.073256  587.019756
+3           104  0.977679    0.964286  535.041447  533.250092
+4           106  0.982299    1.000000  419.242631  439.158067
+5           108  0.954034    0.600000  456.333038  631.132027
+6           109  0.923469    0.977273  446.950266  556.038382
+7           110  0.990196    0.970588  484.076334  478.488700
+8           111  0.973960    1.000000  443.541259  472.331497
+9           112  0.682759    0.833333  436.666531  501.311586
+10          202  0.899140    0.897321  495.639777  587.358365
+11          203  0.920807    0.733333  520.974686  973.144689
+12          204  0.947160    0.916667  395.509823  370.232933
+13          205  0.957555    1.000000  489.599855  560.080986
+14          207  0.959592    0.949495  455.665165  500.228336
+15          208  0.882963    0.941176  662.932677  643.093440
+16          209  0.900463    0.928571  488.562244  474.712594
+17          210  0.965517    0.875000  482.131626  543.017316
+18          211  0.911713    0.928571  713.410385  894.080479
+19          301  0.983333    0.900000  485.480537  533.664923
+20          302  0.947337    0.878205  464.470338  517.368321
+21          303  0.941880    0.958333  488.458848  500.396110
+22          304  0.964796    0.947368  527.581447  533.489816
+23          305  1.000000    0.908333  477.238676  446.728756
+24          306  0.941860    0.800000  484.942106  578.671703
+25          307  1.000000    1.000000  681.745889  804.780758
+26          308  0.972222    0.972222  419.736862  485.569911
+27          309  0.957184    1.000000  435.428464  437.121910
+
+
+

Total running time of the script: (0 minutes 0.155 seconds)

+ +

Gallery generated by Sphinx-Gallery

+
+
+ + +
+
+ +
+
+
+
+ + + + + + + \ No newline at end of file diff --git a/develop/auto_examples/visual_cueing/04r__cueing_group_analysis_winter2019.html b/develop/auto_examples/visual_cueing/04r__cueing_group_analysis_winter2019.html new file mode 100644 index 00000000..16dc732f --- /dev/null +++ b/develop/auto_examples/visual_cueing/04r__cueing_group_analysis_winter2019.html @@ -0,0 +1,2391 @@ + + + + + + + Cueing Group Analysis Winter 2019 — EEG Notebooks 0.0.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ + +
+

Cueing Group Analysis Winter 2019

+
+

Setup

+
# Standard Pythonic imports
+import os,sys,glob,numpy as np, pandas as pd
+import scipy
+from collections import OrderedDict
+import warnings
+warnings.filterwarnings('ignore')
+from matplotlib import pyplot as plt
+import matplotlib.patches as patches
+
+# MNE functions
+from mne import Epochs, find_events, concatenate_raws
+from mne.time_frequency import tfr_morlet
+
+# EEG-Noteooks functions
+from eegnb.analysis.analysis_utils import load_data
+from eegnb.datasets import fetch_dataset
+
+
+
+
+

Load the data

+
eegnb_data_path = os.path.join(os.path.expanduser('~/'),'.eegnb', 'data')
+cueing_data_path = os.path.join(eegnb_data_path, 'visual-cueing', 'kylemathlab_dev')
+
+# If dataset hasn't been downloaded yet, download it
+if not os.path.isdir(cueing_data_path):
+      fetch_dataset(data_dir=eegnb_data_path, experiment='visual-cueing', site='kylemathlab_dev')
+
+
+
+
+

Put the data into MNE Epochs

+

Fall 2018 +subs = [101, 102, 103, 104, 105, 106, 108, 109, 110, 111, 112,

+
+

202, 203, 204, 205, 207, 208, 209, 210, 211, +301, 302, 303, 304, 305, 306, 307, 308, 309]

+
+

Winter 2019

+
subs = [1101, 1102, 1103, 1104, 1105, 1106, 1108, 1109, 1110,
+        1202, 1203, 1205, 1206, 1209, 1210, 1211, 1215,
+        1301, 1302, 1313,
+        1401, 1402, 1403, 1404, 1405,  1408, 1410, 1411, 1412, 1413, 1413, 1414, 1415, 1416]
+#
+# Both
+# subs = [101, 102, 103, 104, 105, 106, 108, 109, 110, 111, 112,
+#         202, 203, 204, 205, 207, 208, 209, 210, 211,
+#         301, 302, 303, 304, 305, 306, 307, 308, 309,
+#         1101, 1102, 1103, 1104, 1105, 1106, 1108, 1109, 1110,
+#         1202, 1203, 1205, 1206, 1209, 1210, 1211, 1215,
+#         1301, 1302, 1313,
+#         1401, 1402, 1403, 1404, 1405,  1408, 1410, 1411, 1412, 1413, 1413, 1414, 1415, 1416]
+#
+#
+# placeholders to add to for each subject
+diff_out = []
+Ipsi_out = []
+Contra_out = []
+Ipsi_spectra_out = []
+Contra_spectra_out = []
+diff_spectra_out = []
+ERSP_diff_out = []
+ERSP_Ipsi_out = []
+ERSP_Contra_out = []
+
+frequencies =  np.linspace(6, 30, 100, endpoint=True)
+wave_cycles = 6
+
+# time frequency window for analysis
+f_low = 7 # Hz
+f_high = 10
+f_diff = f_high-f_low
+
+t_low = 0 # s
+t_high = 1
+t_diff = t_high-t_low
+
+bad_subs= [6, 7, 13, 26]
+really_bad_subs = [11, 12, 19]
+sub_count = 0
+
+
+
+for sub in subs:
+    print(sub)
+
+    sub_count += 1
+
+
+    if (sub_count in really_bad_subs):
+        rej_thresh_uV = 90
+    elif (sub_count in bad_subs):
+        rej_thresh_uV = 90
+    else:
+        rej_thresh_uV = 90
+
+    rej_thresh = rej_thresh_uV*1e-6
+
+
+    # Load both sessions
+    raw = load_data(sub,1, # subject, session
+                    experiment='visual-cueing',site='kylemathlab_dev',device_name='muse2016',
+                    data_dir = eegnb_data_path)
+
+    raw.append(
+          load_data(sub,2, # subject, session
+                    experiment='visual-cueing', site='kylemathlab_dev', device_name='muse2016',
+                    data_dir = eegnb_data_path))
+
+
+    # Filter Raw Data
+    raw.filter(1,30, method='iir')
+
+    #Select Events
+    events = find_events(raw)
+    event_id = {'LeftCue': 1, 'RightCue': 2}
+    epochs = Epochs(raw, events=events, event_id=event_id,
+                    tmin=-1, tmax=2, baseline=(-1, 0),
+                    reject={'eeg':rej_thresh}, preload=True,
+                    verbose=False, picks=[0, 3])
+    print('Trials Remaining: ' + str(len(epochs.events)) + '.')
+
+    # Compute morlet wavelet
+    # Left Cue
+    tfr, itc = tfr_morlet(epochs['LeftCue'], freqs=frequencies,
+                          n_cycles=wave_cycles, return_itc=True)
+    tfr = tfr.apply_baseline((-1,-.5),mode='mean')
+    power_Ipsi_TP9 = tfr.data[0,:,:]
+    power_Contra_TP10 = tfr.data[1,:,:]
+
+    # Right Cue
+    tfr, itc = tfr_morlet(epochs['RightCue'], freqs=frequencies,
+                          n_cycles=wave_cycles, return_itc=True)
+    tfr = tfr.apply_baseline((-1,-.5),mode='mean')
+    power_Contra_TP9 = tfr.data[0,:,:]
+    power_Ipsi_TP10 = tfr.data[1,:,:]
+
+    # Compute averages Differences
+    power_Avg_Ipsi =   (power_Ipsi_TP9+power_Ipsi_TP10)/2;
+    power_Avg_Contra = (power_Contra_TP9+power_Contra_TP10)/2;
+    power_Avg_Diff = power_Avg_Ipsi-power_Avg_Contra;
+
+    #output data into array
+    times = epochs.times
+    Ipsi_out.append(np.mean(power_Avg_Ipsi[np.argmax(frequencies>f_low):
+                                           np.argmax(frequencies>f_high)-1,
+                            np.argmax(times>t_low):np.argmax(times>t_high)-1 ]
+                           )
+                   )
+    Ipsi_spectra_out.append(np.mean(power_Avg_Ipsi[:,np.argmax(times>t_low):
+                                                   np.argmax(times>t_high)-1 ],1
+                                   )
+                           )
+
+    Contra_out.append(np.mean(power_Avg_Contra[np.argmax(frequencies>f_low):
+                                               np.argmax(frequencies>f_high)-1,
+                            np.argmax(times>t_low):np.argmax(times>t_high)-1 ]
+                             )
+                     )
+
+    Contra_spectra_out.append(np.mean(power_Avg_Contra[:,np.argmax(times>t_low):
+                                                       np.argmax(times>t_high)-1 ],1))
+
+
+    diff_out.append(np.mean(power_Avg_Diff[np.argmax(frequencies>f_low):
+                                           np.argmax(frequencies>f_high)-1,
+                            np.argmax(times>t_low):np.argmax(times>t_high)-1 ]
+                           )
+                   )
+    diff_spectra_out.append(np.mean(power_Avg_Diff[:,np.argmax(times>t_low):
+                                                   np.argmax(times>t_high)-1 ],1
+                                   )
+                           )
+
+    #save the spectrograms to average over after
+    ERSP_diff_out.append(power_Avg_Diff)
+    ERSP_Ipsi_out.append(power_Avg_Ipsi)
+    ERSP_Contra_out.append(power_Avg_Contra)
+
+
+
1101
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1101/session001/subject1101_session1_recording_2019-03-26-20.08.20.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61284
+    Range : 0 ... 61283 =      0.000 ...   239.387 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1101/session002/subject1101_session2_recording_2019-03-26-20.16.25.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+67 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 1.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+1102
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1102/session001/subject1102_session1_recording_2019-03-26-20.27.26.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61284
+    Range : 0 ... 61283 =      0.000 ...   239.387 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1102/session002/subject1102_session2_recording_2019-03-26-20.35.07.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61284
+    Range : 0 ... 61283 =      0.000 ...   239.387 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+136 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 31.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+1103
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1103/session001/subject1103_session1_recording_2019-03-26-20.45.55.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61284
+    Range : 0 ... 61283 =      0.000 ...   239.387 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1103/session002/subject1103_session2_recording_2019-03-26-20.52.46.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61284
+    Range : 0 ... 61283 =      0.000 ...   239.387 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+161 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 64.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+1104
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1104/session001/subject1104_session1_recording_2019-03-26-21.02.02.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1104/session002/subject1104_session2_recording_2019-03-26-21.08.38.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61284
+    Range : 0 ... 61283 =      0.000 ...   239.387 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+187 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 51.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+1105
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1105/session001/subject1105_session1_recording_2019-03-26-21.18.06.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61284
+    Range : 0 ... 61283 =      0.000 ...   239.387 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1105/session002/subject1105_session2_recording_2019-03-26-21.24.15.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+192 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 73.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+1106
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1106/session001/subject1106_session1_recording_2019-03-26-21.37.04.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1106/session002/subject1106_session2_recording_2019-03-26-21.44.54.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+134 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 56.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+1108
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1108/session001/subject1108_session1_recording_2019-03-28-20.37.17.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1108/session002/subject1108_session2_recording_2019-03-28-20.44.39.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+146 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 25.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+1109
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1109/session001/subject1109_session1_recording_2019-03-28-20.53.09.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1109/session002/subject1109_session2_recording_2019-03-28-21.00.47.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+176 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 47.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+1110
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1110/session001/subject1110_session1_recording_2019-03-28-21.09.52.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1110/session002/subject1110_session2_recording_2019-03-28-21.16.24.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+168 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 12.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+1202
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1202/session001/subject1202_session1_recording_2019-03-26-21.00.19.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61308
+    Range : 0 ... 61307 =      0.000 ...   239.480 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1202/session002/subject1202_session2_recording_2019-03-26-21.07.19.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+173 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 73.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+1203
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1203/session001/subject1203_session1_recording_2019-03-26-21.18.01.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1203/session002/subject1203_session2_recording_2019-03-26-21.23.52.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+211 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 88.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+1205
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1205/session001/subject1205_session1_recording_2019-03-26-20.27.40.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61308
+    Range : 0 ... 61307 =      0.000 ...   239.480 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1205/session002/subject1205_session2_recording_2019-03-26-20.33.10.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61308
+    Range : 0 ... 61307 =      0.000 ...   239.480 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+226 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 83.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+1206
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1206/session001/subject1206_session1_recording_2019-03-26-20.43.14.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61308
+    Range : 0 ... 61307 =      0.000 ...   239.480 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1206/session002/subject1206_session2_recording_2019-03-26-20.49.14.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+214 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 66.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+1209
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1209/session001/subject1209_session1_recording_2019-03-28-20.35.06.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61284
+    Range : 0 ... 61283 =      0.000 ...   239.387 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1209/session002/subject1209_session2_recording_2019-03-28-20.41.47.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+186 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 40.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+1210
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1210/session001/subject1210_session1_recording_2019-03-28-20.51.18.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61284
+    Range : 0 ... 61283 =      0.000 ...   239.387 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1210/session002/subject1210_session2_recording_2019-03-28-20.57.32.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61284
+    Range : 0 ... 61283 =      0.000 ...   239.387 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+198 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 27.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+1211
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1211/session001/subject1211_session1_recording_2019-03-28-20.13.27.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1211/session002/subject1211_session2_recording_2019-03-28-20.23.34.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+139 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 53.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+1215
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1215/session001/subject1215_session1_recording_2019-03-28-21.07.04.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61284
+    Range : 0 ... 61283 =      0.000 ...   239.387 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1215/session002/subject1215_session2_recording_2019-03-28-21.13.37.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+193 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 46.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+1301
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1301/session001/subject1301_session1_recording_2019-03-26-20.10.43.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1301/session002/subject1301_session2_recording_2019-03-26-20.17.14.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+197 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 0.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+1302
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1302/session001/subject1302_session1_recording_2019-03-26-20.30.19.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1302/session002/subject1302_session2_recording_2019-03-26-20.36.50.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+194 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 0.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+1313
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1313/session001/subject1313_session1_recording_2019-03-28-21.32.56.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61284
+    Range : 0 ... 61283 =      0.000 ...   239.387 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1313/session002/subject1313_session2_recording_2019-03-28-21.39.30.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61284
+    Range : 0 ... 61283 =      0.000 ...   239.387 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+186 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 56.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+1401
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1401/session001/subject1401_session1_recording_2019-03-26-20.03.18.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1401/session002/subject1401_session2_recording_2019-03-26-20.11.54.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+125 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 43.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+1402
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1402/session001/subject1402_session1_recording_2019-03-26-20.23.18.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1402/session002/subject1402_session2_recording_2019-03-26-20.31.17.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+158 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 65.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+1403
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1403/session001/subject1403_session1_recording_2019-03-26-20.40.55.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1403/session002/subject1403_session2_recording_2019-03-26-20.48.07.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61284
+    Range : 0 ... 61283 =      0.000 ...   239.387 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+172 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 17.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+1404
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1404/session001/subject1404_session1_recording_2019-03-26-20.58.09.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1404/session002/subject1404_session2_recording_2019-03-26-21.06.17.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61284
+    Range : 0 ... 61283 =      0.000 ...   239.387 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+112 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 38.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+1405
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1405/session001/subject1405_session1_recording_2019-03-26-21.17.49.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61284
+    Range : 0 ... 61283 =      0.000 ...   239.387 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1405/session002/subject1405_session2_recording_2019-03-26-21.25.03.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+162 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 26.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+1408
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1408/session001/subject1408_session1_recording_2019-03-26-21.36.18.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61284
+    Range : 0 ... 61283 =      0.000 ...   239.387 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1408/session002/subject1408_session2_recording_2019-03-26-21.44.04.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61284
+    Range : 0 ... 61283 =      0.000 ...   239.387 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+171 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 16.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+1410
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1410/session001/subject1410_session1_recording_2019-03-28-20.01.52.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61308
+    Range : 0 ... 61307 =      0.000 ...   239.480 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1410/session002/subject1410_session2_recording_2019-03-28-20.09.14.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+172 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 53.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+1411
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1411/session001/subject1411_session1_recording_2019-03-28-21.24.26.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1411/session002/subject1411_session2_recording_2019-03-28-21.31.09.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+160 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 53.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+1412
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1412/session001/subject1412_session1_recording_2019-03-28-21.09.06.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1412/session002/subject1412_session2_recording_2019-03-28-21.15.30.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+168 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 46.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+1413
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1413/session001/subject1413_session1_recording_2019-03-28-20.34.09.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1413/session002/subject1413_session2_recording_2019-03-28-20.40.57.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+154 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 41.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+1413
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1413/session001/subject1413_session1_recording_2019-03-28-20.34.09.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1413/session002/subject1413_session2_recording_2019-03-28-20.40.57.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+154 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 41.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+1414
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1414/session001/subject1414_session1_recording_2019-03-28-20.18.34.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61308
+    Range : 0 ... 61307 =      0.000 ...   239.480 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1414/session002/subject1414_session2_recording_2019-03-28-20.24.57.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+185 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 81.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+1415
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1415/session001/subject1415_session1_recording_2019-03-28-21.39.58.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1415/session002/subject1415_session2_recording_2019-03-28-21.47.11.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+158 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 19.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+1416
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1416/session001/subject1416_session1_recording_2019-03-28-20.51.40.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-cueing/kylemathlab_dev/muse2016/subject1416/session002/subject1416_session2_recording_2019-03-28-20.58.50.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=61296
+    Range : 0 ... 61295 =      0.000 ...   239.434 secs
+Ready.
+Filtering raw data in 2 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+160 events found on stim channel stim
+Event IDs: [ 1  2 11 12 21 22]
+Trials Remaining: 40.
+Applying baseline correction (mode: mean)
+Applying baseline correction (mode: mean)
+
+
+
+
+

Combine subjects

+
#average spectrograms
+GrandAvg_diff = np.nanmean(ERSP_diff_out,0)
+GrandAvg_Ipsi = np.nanmean(ERSP_Ipsi_out,0)
+GrandAvg_Contra = np.nanmean(ERSP_Contra_out,0)
+
+#average spectra
+GrandAvg_spec_Ipsi = np.nanmean(Ipsi_spectra_out,0)
+GrandAvg_spec_Contra = np.nanmean(Contra_spectra_out,0)
+GrandAvg_spec_diff = np.nanmean(diff_spectra_out,0)
+
+#error bars for spectra (standard error)
+num_good = len(diff_out) - sum(np.isnan(diff_out))
+GrandAvg_spec_Ipsi_ste = np.nanstd(Ipsi_spectra_out,0)/np.sqrt(num_good)
+GrandAvg_spec_Contra_ste = np.nanstd(Contra_spectra_out,0)/np.sqrt(num_good)
+GrandAvg_spec_diff_ste = np.nanstd(diff_spectra_out,0)/np.sqrt(num_good)
+
+
+
#Plot Spectra error bars
+fig, ax = plt.subplots(1)
+plt.errorbar(frequencies,GrandAvg_spec_Ipsi,yerr=GrandAvg_spec_Ipsi_ste)
+plt.errorbar(frequencies,GrandAvg_spec_Contra,yerr=GrandAvg_spec_Contra_ste)
+plt.legend(('Ipsi','Contra'))
+plt.xlabel('Frequency (Hz)')
+plt.ylabel('Power (uV^2)')
+plt.hlines(0,3,33)
+
+
+04r  cueing group analysis winter2019
<matplotlib.collections.LineCollection object at 0x7fddc28c64f0>
+
+
+
#Plot Spectra Diff error bars
+fig, ax = plt.subplots(1)
+plt.errorbar(frequencies,GrandAvg_spec_diff,yerr=GrandAvg_spec_diff_ste)
+plt.legend('Ipsi-Contra')
+plt.xlabel('Frequency (Hz)')
+plt.ylabel('Power (uV^2)')
+plt.hlines(0,3,33)
+
+
+04r  cueing group analysis winter2019
<matplotlib.collections.LineCollection object at 0x7fddc2be6e80>
+
+
+

Grand Average Ipsi

+
plot_max = np.max([np.max(np.abs(GrandAvg_Ipsi)), np.max(np.abs(GrandAvg_Contra))])
+fig, ax = plt.subplots(1)
+im = plt.imshow(GrandAvg_Ipsi,
+           extent=[times[0], times[-1], frequencies[0], frequencies[-1]],
+           aspect='auto', origin='lower', cmap='coolwarm', vmin=-plot_max, vmax=plot_max)
+plt.xlabel('Time (sec)')
+plt.ylabel('Frequency (Hz)')
+plt.title('Power Ipsi')
+cb = fig.colorbar(im)
+cb.set_label('Power')
+# Create a Rectangle patch
+rect = patches.Rectangle((t_low,f_low),t_diff,f_diff,linewidth=1,edgecolor='k',facecolor='none')
+# Add the patch to the Axes
+ax.add_patch(rect)
+#
+##e#################################################################################################
+#
+# Grand Average Contra
+#
+fig, ax = plt.subplots(1)
+im = plt.imshow(GrandAvg_Contra,
+           extent=[times[0], times[-1], frequencies[0], frequencies[-1]],
+           aspect='auto', origin='lower', cmap='coolwarm', vmin=-plot_max, vmax=plot_max)
+plt.xlabel('Time (sec)')
+plt.ylabel('Frequency (Hz)')
+plt.title('Power Contra')
+cb = fig.colorbar(im)
+cb.set_label('Power')
+# Create a Rectangle patch
+rect = patches.Rectangle((t_low,f_low),t_diff,f_diff,linewidth=1,edgecolor='k',facecolor='none')
+# Add the patch to the Axes
+ax.add_patch(rect)
+#
+
+
+
    +
  • Power Ipsi
  • +
  • Power Contra
  • +
+
<matplotlib.patches.Rectangle object at 0x7fddc2d46910>
+
+
+

Grand Average Ipsi-Contra Difference

+
plot_max_diff = np.max(np.abs(GrandAvg_diff))
+fig, ax = plt.subplots(1)
+im = plt.imshow(GrandAvg_diff,
+           extent=[times[0], times[-1], frequencies[0], frequencies[-1]],
+           aspect='auto', origin='lower', cmap='coolwarm', vmin=-plot_max_diff, vmax=plot_max_diff)
+plt.xlabel('Time (sec)')
+plt.ylabel('Frequency (Hz)')
+plt.title('Power Difference Ipsi-Contra')
+cb = fig.colorbar(im)
+cb.set_label('Ipsi-Contra Power')
+# Create a Rectangle patch
+rect = patches.Rectangle((t_low,f_low),t_diff,f_diff,linewidth=1,edgecolor='k',facecolor='none')
+# Add the patch to the Axes
+ax.add_patch(rect)
+
+
+Power Difference Ipsi-Contra
<matplotlib.patches.Rectangle object at 0x7fddc2cf2e50>
+
+
+
+
+

Compute t test

+
num_good = len(diff_out) - sum(np.isnan(diff_out))
+
+[tstat, pval] = scipy.stats.ttest_ind(diff_out,np.zeros(len(diff_out)),nan_policy='omit')
+print('Ipsi Mean: '+  str(np.nanmean(Ipsi_out)))
+print('Contra Mean: '+  str(np.nanmean(Contra_out)))
+print('Mean Diff: '+  str(np.nanmean(diff_out)))
+print('t(' + str(num_good-1) + ') = ' + str(round(tstat,3)))
+print('p = ' + str(round(pval,3)))
+
+
+
Ipsi Mean: 3.5735930197157026e-11
+Contra Mean: 2.6421745679207323e-11
+Mean Diff: 9.314184517949713e-12
+t(30) = 0.456
+p = 0.65
+
+
+
+
+

Save average powers ipsi and contra

+
print(diff_out)
+raw_data = {'Ipsi Power': Ipsi_out,
+        'Contra Power': Contra_out}
+df = pd.DataFrame(raw_data, columns = ['Ipsi Power', 'Contra Power'])
+print(df)
+df.to_csv('375CueingEEG.csv')
+print('Saved subject averages for each condition to 375CueingEEG.csv file in present directory')
+
+
+
[nan, 1.789004769369718e-11, 6.402571924638044e-11, 1.201698855806472e-10, 1.6436811298970808e-10, 1.1465401250727375e-10, 8.107123680349165e-11, -3.1169419915324295e-11, 6.49372710835747e-11, 4.3529314385416065e-11, 2.8760356184521736e-11, 3.773350340044497e-11, 2.113134995947907e-11, 1.5571207459949488e-10, -4.6291150675500723e-10, -8.052232335321791e-11, -4.6644221372822607e-11, nan, nan, -3.438951097875039e-11, 7.091042185483073e-11, 1.298338355801429e-11, 1.3604198590381474e-10, -5.861633427481533e-11, -6.204912953285817e-11, 1.8344086913139518e-10, 2.380242500968444e-11, -8.805033297719282e-11, -7.508128947253835e-11, 2.907964859843707e-11, 2.907964859843707e-11, -4.146716305783299e-11, -1.5307351374032941e-10, 2.3393198398387354e-11]
+      Ipsi Power  Contra Power
+0            NaN           NaN
+1  -3.901325e-11 -5.690330e-11
+2   4.568905e-11 -1.833667e-11
+3   2.037251e-11 -9.979737e-11
+4   2.567893e-10  9.242117e-11
+5   1.128359e-10 -1.818109e-12
+6   3.090019e-10  2.279306e-10
+7  -1.325457e-10 -1.013762e-10
+8   4.086011e-11 -2.407716e-11
+9   8.168927e-11  3.815995e-11
+10  1.073335e-10  7.857313e-11
+11  5.134699e-10  4.757364e-10
+12 -1.088382e-11 -3.201517e-11
+13  1.006811e-10 -5.503096e-11
+14 -2.543183e-10  2.085932e-10
+15 -3.615777e-11  4.436455e-11
+16 -5.354581e-11 -6.901593e-12
+17           NaN           NaN
+18           NaN           NaN
+19  5.637907e-13  3.495330e-11
+20 -9.265134e-12 -8.017556e-11
+21 -2.448927e-10 -2.578761e-10
+22  2.682534e-10  1.322114e-10
+23  4.775386e-11  1.063702e-10
+24 -5.500639e-11  7.042736e-12
+25  1.907671e-11 -1.643642e-10
+26  2.746872e-11  3.666293e-12
+27 -9.699470e-11 -8.944371e-12
+28  1.345766e-10  2.096579e-10
+29  3.654300e-11  7.463347e-12
+30  3.654300e-11  7.463347e-12
+31 -3.302737e-11  8.439791e-12
+32 -4.426000e-11  1.088135e-10
+33 -4.177686e-11 -6.517005e-11
+Saved subject averages for each condition to 375CueingEEG.csv file in present directory
+
+
+
+
+

Save spectra

+
df = pd.DataFrame(Ipsi_spectra_out,columns=frequencies)
+print(df)
+df.to_csv('375CueingIpsiSpec.csv')
+
+df = pd.DataFrame(Contra_spectra_out,columns=frequencies)
+df.to_csv('375CueingContraSpec.csv')
+print('Saved Spectra to 375CueingContraSpec.csv file in present directory')
+
+
+
       6.000000      6.242424   ...     29.757576     30.000000
+0            NaN           NaN  ...           NaN           NaN
+1   7.991890e-11  8.036284e-11  ... -2.094144e-11 -2.057510e-11
+2   8.482874e-11  8.635330e-11  ... -3.867214e-12 -3.719235e-12
+3   1.394826e-10  1.324403e-10  ... -3.411441e-12 -4.411093e-12
+4   1.355015e-10  1.519385e-10  ... -5.270396e-12 -5.232011e-12
+5   4.146935e-11  4.747567e-11  ... -6.757861e-12 -6.344323e-12
+6   1.591767e-10  1.802644e-10  ... -3.011956e-12 -3.111111e-12
+7   2.051924e-10  1.359738e-10  ...  9.878931e-12  9.096915e-12
+8  -2.289620e-11 -1.966269e-11  ... -5.976043e-12 -5.762940e-12
+9   2.639212e-10  2.390503e-10  ...  1.273891e-12  1.085691e-12
+10  1.734091e-10  1.751927e-10  ...  1.127110e-12  1.042786e-12
+11 -4.155194e-11  5.554974e-11  ...  3.215457e-11  3.061286e-11
+12  1.132429e-10  9.957683e-11  ... -7.976622e-13 -6.777715e-13
+13  1.521996e-10  2.196098e-10  ... -1.447362e-11 -1.356378e-11
+14 -1.992109e-12  8.659712e-12  ...  1.917412e-12  1.278172e-12
+15 -1.632265e-10 -1.465498e-10  ... -4.835722e-12 -4.643791e-12
+16  1.051757e-10  9.010762e-11  ...  5.687247e-12  5.318740e-12
+17           NaN           NaN  ...           NaN           NaN
+18           NaN           NaN  ...           NaN           NaN
+19 -3.037587e-11 -3.195487e-11  ... -9.292653e-13 -1.176655e-12
+20 -4.576869e-11 -7.575594e-11  ...  5.139473e-12  4.728775e-12
+21 -3.017372e-10 -2.806035e-10  ... -1.168381e-13 -2.039849e-13
+22  1.892637e-10  1.928711e-10  ... -1.549501e-12 -1.594952e-12
+23 -1.863042e-10 -1.441354e-10  ...  9.789338e-12  9.213076e-12
+24 -1.099290e-10 -1.131996e-10  ...  6.197682e-12  5.769963e-12
+25 -2.486010e-10 -1.910190e-10  ...  9.456576e-12  9.193358e-12
+26 -2.444701e-10 -2.199233e-10  ...  1.492537e-11  1.519870e-11
+27 -1.015551e-10 -9.978091e-11  ... -1.160645e-11 -1.092205e-11
+28 -1.236814e-10 -1.335374e-10  ...  2.298211e-12  2.066240e-12
+29 -1.196307e-10 -1.035983e-10  ...  3.293275e-12  2.487449e-12
+30 -1.196307e-10 -1.035983e-10  ...  3.293275e-12  2.487449e-12
+31  4.570382e-11  2.986854e-11  ...  2.506011e-12  2.287051e-12
+32  6.194128e-10  6.055475e-10  ... -1.599190e-12 -1.352438e-12
+33 -1.126135e-10 -1.308744e-10  ... -3.366325e-11 -3.285204e-11
+
+[34 rows x 100 columns]
+Saved Spectra to 375CueingContraSpec.csv file in present directory
+
+
+

Total running time of the script: (0 minutes 24.491 seconds)

+ +

Gallery generated by Sphinx-Gallery

+
+
+ + +
+
+ +
+
+
+
+ + + + + + + \ No newline at end of file diff --git a/develop/auto_examples/visual_cueing/index.html b/develop/auto_examples/visual_cueing/index.html new file mode 100644 index 00000000..bc11fc96 --- /dev/null +++ b/develop/auto_examples/visual_cueing/index.html @@ -0,0 +1,518 @@ + + + + + + + <no title> — EEG Notebooks 0.0.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+ + +
+
+
+
+ + + + + + + \ No newline at end of file diff --git a/develop/auto_examples/visual_cueing/sg_execution_times.html b/develop/auto_examples/visual_cueing/sg_execution_times.html new file mode 100644 index 00000000..37f38154 --- /dev/null +++ b/develop/auto_examples/visual_cueing/sg_execution_times.html @@ -0,0 +1,532 @@ + + + + + + + Computation times — EEG Notebooks 0.0.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+ +
+

Computation times

+

01:04.900 total execution time for 4 files from auto_examples/visual_cueing:

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

Example

Time

Mem (MB)

Cueing Group Analysis (02r__cueing_group_analysis.py)

00:29.319

0.0

Cueing Group Analysis Winter 2019 (04r__cueing_group_analysis_winter2019.py)

00:24.491

0.0

Cueing Single Subject Analysis (01r__cueing_singlesub_analysis.py)

00:10.934

0.0

Cueing Behavioural Analysis Winter 2019 (03r__cueing_behaviour_analysis_winter2019.py)

00:00.155

0.0

+
+
+ + +
+
+
+ +
+ +
+

© Copyright John Griffiths, Jadin Tredup, NeuroTechX, & Contributors.

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + + + + \ No newline at end of file diff --git a/develop/auto_examples/visual_gonogo/index.html b/develop/auto_examples/visual_gonogo/index.html new file mode 100644 index 00000000..e50e21fe --- /dev/null +++ b/develop/auto_examples/visual_gonogo/index.html @@ -0,0 +1,501 @@ + + + + + + + <no title> — EEG Notebooks 0.0.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+ + +
+
+
+
+ + + + + + + \ No newline at end of file diff --git a/develop/auto_examples/visual_gonogo/sg_execution_times.html b/develop/auto_examples/visual_gonogo/sg_execution_times.html new file mode 100644 index 00000000..0db5db32 --- /dev/null +++ b/develop/auto_examples/visual_gonogo/sg_execution_times.html @@ -0,0 +1,520 @@ + + + + + + + Computation times — EEG Notebooks 0.0.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+ +
+

Computation times

+

00:00.000 total execution time for 0 files from auto_examples/visual_gonogo:

+
+ + + + + + + + + + + + + + + + + +

Example

Time

Mem (MB)

N/A

N/A

N/A

+
+
+ + +
+
+
+ +
+ +
+

© Copyright John Griffiths, Jadin Tredup, NeuroTechX, & Contributors.

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + + + + \ No newline at end of file diff --git a/develop/auto_examples/visual_n170/00x__n170_run_experiment.html b/develop/auto_examples/visual_n170/00x__n170_run_experiment.html new file mode 100644 index 00000000..2e50a269 --- /dev/null +++ b/develop/auto_examples/visual_n170/00x__n170_run_experiment.html @@ -0,0 +1,548 @@ + + + + + + + N170 run experiment — EEG Notebooks 0.0.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ + +
+

N170 run experiment

+

This example demonstrates the initiation of an EEG stream with eeg-expy, and how to run +an experiment.

+
+

Setup

+

Imports

+
from eegnb import generate_save_fn
+from eegnb.devices.eeg import EEG
+from eegnb.experiments import VisualN170
+
+# Define some variables
+board_name = "muse2" # board name
+experiment_name = "visual_n170" # experiment name
+subject_id = 0 # test subject id
+session_nb = 0 # session number
+record_duration = 120 # recording duration
+
+# generate save path
+save_fn = generate_save_fn(board_name, experiment_name, subject_id, session_nb)
+
+# create device object
+eeg_device = EEG(device=board_name)
+
+# Experiment type
+experiment = VisualN170(duration=record_duration, eeg=eeg_device, save_fn=save_fn, use_vr=False)
+
+
+
+
+

Run experiment

+
experiment.run()
+
+# Saved csv location
+print("Recording saved in", experiment.save_fn)
+
+
+ +

Gallery generated by Sphinx-Gallery

+
+
+ + +
+
+ +
+
+
+
+ + + + + + + \ No newline at end of file diff --git a/develop/auto_examples/visual_n170/01r__n170_viz.html b/develop/auto_examples/visual_n170/01r__n170_viz.html new file mode 100644 index 00000000..ff12f1fb --- /dev/null +++ b/develop/auto_examples/visual_n170/01r__n170_viz.html @@ -0,0 +1,749 @@ + + + + + + + N170 Load and Visualize Data — EEG Notebooks 0.0.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ + +
+

N170 Load and Visualize Data

+

This example demonstrates loading, organizing, and visualizing ERP response data from the visual N170 experiment.

+

Images of faces and houses are shown in a rapid serial visual presentation (RSVP) stream.

+

The data used is the first subject and first session of the one of the eeg-expy N170 example datasets, recorded using the InteraXon MUSE EEG headset (2016 model). +This session consists of six two-minute blocks of continuous recording.

+

We first use the fetch_datasets to obtain a list of filenames. If these files are not already present +in the specified data directory, they will be quickly downloaded from the cloud.

+

After loading the data, we place it in an MNE Epochs object, and obtain the trial-averaged response.

+

The final figure plotted at the end shows the N170 response ERP waveform.

+
+

Setup

+
# Some standard pythonic imports
+import os
+from matplotlib import pyplot as plt
+from collections import OrderedDict
+import warnings
+warnings.filterwarnings('ignore')
+
+# MNE functions
+from mne import Epochs,find_events
+
+# EEG-Notebooks functions
+from eegnb.analysis.analysis_utils import load_data,plot_conditions
+from eegnb.datasets import fetch_dataset
+
+
+
+
+

Load Data

+

We will use the eeg-expy N170 example dataset

+

Note that if you are running this locally, the following cell will download +the example dataset, if you do not already have it.

+
eegnb_data_path = os.path.join(os.path.expanduser('~/'),'.eegnb', 'data')
+n170_data_path = os.path.join(eegnb_data_path, 'visual-N170', 'eegnb_examples')
+
+# If dataset hasn't been downloaded yet, download it
+if not os.path.isdir(n170_data_path):
+    fetch_dataset(data_dir=eegnb_data_path, experiment='visual-N170', site='eegnb_examples');
+
+subject = 1
+session = 1
+raw = load_data(subject,session,
+                experiment='visual-N170', site='eegnb_examples', device_name='muse2016_bfn',
+                data_dir = eegnb_data_path)
+
+
+
Downloading...
+From (original): https://drive.google.com/uc?id=1oStfxzEqf36R5d-2Auyw4DLnPj9E_FAH
+From (redirected): https://drive.google.com/uc?id=1oStfxzEqf36R5d-2Auyw4DLnPj9E_FAH&confirm=t&uuid=963ef67c-fe37-4f4e-a1c3-82d6d5ae3840
+To: /home/runner/.eegnb/data/downloaded_data.zip
+
+  0%|          | 0.00/33.1M [00:00<?, ?B/s]
+ 14%|█▍        | 4.72M/33.1M [00:00<00:00, 32.0MB/s]
+ 27%|██▋       | 8.91M/33.1M [00:00<00:01, 23.9MB/s]
+ 52%|█████▏    | 17.3M/33.1M [00:00<00:00, 34.2MB/s]
+ 78%|███████▊  | 25.7M/33.1M [00:00<00:00, 40.3MB/s]
+100%|██████████| 33.1M/33.1M [00:00<00:00, 45.0MB/s]
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-N170/eegnb_examples/muse2016_bfn/subject0001/session001/recording_2022-03-20-22.42.26.csv
+
+/home/runner/.eegnb/data/visual-N170/eegnb_examples/muse2016_bfn/subject0001/session001/recording_2022-03-20-23.17.10.csv
+
+/home/runner/.eegnb/data/visual-N170/eegnb_examples/muse2016_bfn/subject0001/session001/recording_2022-03-20-23.57.53.csv
+
+/home/runner/.eegnb/data/visual-N170/eegnb_examples/muse2016_bfn/subject0001/session001/recording_2022-03-20-22.51.41.csv
+
+/home/runner/.eegnb/data/visual-N170/eegnb_examples/muse2016_bfn/subject0001/session001/recording_2022-03-20-23.02.59.csv
+
+/home/runner/.eegnb/data/visual-N170/eegnb_examples/muse2016_bfn/subject0001/session001/recording_2022-03-20-23.41.11.csv
+
+/home/runner/.eegnb/data/visual-N170/eegnb_examples/muse2016_bfn/subject0001/session001/recording_2022-03-20-23.29.19.csv
+
+/home/runner/.eegnb/data/visual-N170/eegnb_examples/muse2016_bfn/subject0001/session001/recording_2022-03-20-22.33.14.csv
+
+/home/runner/.eegnb/data/visual-N170/eegnb_examples/muse2016_bfn/subject0001/session001/recording_2022-03-20-23.09.14.csv
+
+/home/runner/.eegnb/data/visual-N170/eegnb_examples/muse2016_bfn/subject0001/session001/recording_2022-03-20-23.51.54.csv
+
+
+
+
+['TP9', 'Fp1', 'Fp2', 'TP10', 'stim']
+['TP9', 'Fp1', 'Fp2', 'TP10', 'stim']
+Creating RawArray with float64 data, n_channels=5, n_times=76768
+    Range : 0 ... 76767 =      0.000 ...   299.871 secs
+Ready.
+['TP9', 'Fp1', 'Fp2', 'TP10', 'stim']
+['TP9', 'Fp1', 'Fp2', 'TP10', 'stim']
+Creating RawArray with float64 data, n_channels=5, n_times=76936
+    Range : 0 ... 76935 =      0.000 ...   300.527 secs
+Ready.
+['TP9', 'Fp1', 'Fp2', 'TP10', 'stim']
+['TP9', 'Fp1', 'Fp2', 'TP10', 'stim']
+Creating RawArray with float64 data, n_channels=5, n_times=76780
+    Range : 0 ... 76779 =      0.000 ...   299.918 secs
+Ready.
+['TP9', 'Fp1', 'Fp2', 'TP10', 'stim']
+['TP9', 'Fp1', 'Fp2', 'TP10', 'stim']
+Creating RawArray with float64 data, n_channels=5, n_times=76744
+    Range : 0 ... 76743 =      0.000 ...   299.777 secs
+Ready.
+['TP9', 'Fp1', 'Fp2', 'TP10', 'stim']
+['TP9', 'Fp1', 'Fp2', 'TP10', 'stim']
+Creating RawArray with float64 data, n_channels=5, n_times=76912
+    Range : 0 ... 76911 =      0.000 ...   300.434 secs
+Ready.
+['TP9', 'Fp1', 'Fp2', 'TP10', 'stim']
+['TP9', 'Fp1', 'Fp2', 'TP10', 'stim']
+Creating RawArray with float64 data, n_channels=5, n_times=76816
+    Range : 0 ... 76815 =      0.000 ...   300.059 secs
+Ready.
+['TP9', 'Fp1', 'Fp2', 'TP10', 'stim']
+['TP9', 'Fp1', 'Fp2', 'TP10', 'stim']
+Creating RawArray with float64 data, n_channels=5, n_times=76768
+    Range : 0 ... 76767 =      0.000 ...   299.871 secs
+Ready.
+['TP9', 'Fp1', 'Fp2', 'TP10', 'stim']
+['TP9', 'Fp1', 'Fp2', 'TP10', 'stim']
+Creating RawArray with float64 data, n_channels=5, n_times=93280
+    Range : 0 ... 93279 =      0.000 ...   364.371 secs
+Ready.
+['TP9', 'Fp1', 'Fp2', 'TP10', 'stim']
+['TP9', 'Fp1', 'Fp2', 'TP10', 'stim']
+Creating RawArray with float64 data, n_channels=5, n_times=76816
+    Range : 0 ... 76815 =      0.000 ...   300.059 secs
+Ready.
+['TP9', 'Fp1', 'Fp2', 'TP10', 'stim']
+['TP9', 'Fp1', 'Fp2', 'TP10', 'stim']
+Creating RawArray with float64 data, n_channels=5, n_times=76816
+    Range : 0 ... 76815 =      0.000 ...   300.059 secs
+Ready.
+
+
+
+
+

Visualize the power spectrum

+
raw.plot_psd()
+
+
+EEG
NOTE: plot_psd() is a legacy function. New code should use .compute_psd().plot().
+Effective window size : 8.000 (s)
+
+<MNELineFigure size 1000x350 with 2 Axes>
+
+
+
+
+

Filtering

+
raw.filter(1,30, method='iir')
+raw.plot_psd(fmin=1, fmax=30);
+
+
+EEG
Filtering raw data in 10 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+NOTE: plot_psd() is a legacy function. New code should use .compute_psd().plot().
+Effective window size : 8.000 (s)
+
+<MNELineFigure size 1000x350 with 2 Axes>
+
+
+
+
+

Epoching

+
# Create an array containing the timestamps and type of each stimulus (i.e. face or house)
+events = find_events(raw)
+event_id = {'House': 1, 'Face': 2}
+
+# Create an MNE Epochs object representing all the epochs around stimulus presentation
+epochs = Epochs(raw, events=events, event_id=event_id,
+                tmin=-0.1, tmax=0.6, baseline=None,
+                reject={'eeg': 5e-5}, preload=True,
+                verbose=False, picks=[0,1,2,3])
+print('sample drop %: ', (1 - len(epochs.events)/len(events)) * 100)
+epochs
+
+
+
3740 events found on stim channel stim
+Event IDs: [1 2]
+sample drop %:  9.919786096256688
+
+
+
+ + + + + + + + + + + + + + + + + + + +
Number of events3369
EventsFace: 1702
House: 1667
Time range-0.102 – 0.602 s
Baselineoff
+
+
+
+
+

Epoch average

+
conditions = OrderedDict()
+#conditions['House'] = [1]
+#conditions['Face'] = [2]
+conditions['House'] = ['House']
+conditions['Face'] = ['Face']
+diffwav = ('Face', 'House')
+
+fig, ax = plot_conditions(epochs, conditions=conditions,
+                          ci=97.5, n_boot=1000, title='',
+                          diff_waveform=diffwav,
+                          channel_order=[1,0,2,3])
+# reordering of epochs.ch_names according to [[0,2],[1,3]] of subplot axes
+
+# Manually adjust the ylims
+for i in [0,2]: ax[i].set_ylim([-0.5e6,0.5e6])
+for i in [1,3]: ax[i].set_ylim([-1.5e6,2.5e6])
+plt.tight_layout()
+
+
+Fp1, Fp2, TP9, TP10

Total running time of the script: (0 minutes 4.369 seconds)

+ +

Gallery generated by Sphinx-Gallery

+
+
+ + +
+
+ +
+
+
+
+ + + + + + + \ No newline at end of file diff --git a/develop/auto_examples/visual_n170/02r__n170_decoding.html b/develop/auto_examples/visual_n170/02r__n170_decoding.html new file mode 100644 index 00000000..342ad099 --- /dev/null +++ b/develop/auto_examples/visual_n170/02r__n170_decoding.html @@ -0,0 +1,814 @@ + + + + + + + N170 Decoding — EEG Notebooks 0.0.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ + +
+

N170 Decoding

+

This example runs a set of machine learning algorithms on the N170 faces/houses +dataset, and compares them in terms of classification performance.

+

The data used is exactly the same as in the N170 load_and_visualize example.

+
+

Setup

+
# Some standard pythonic imports
+import warnings
+warnings.filterwarnings('ignore')
+import os,numpy as np,pandas as pd
+from collections import OrderedDict
+import seaborn as sns
+from matplotlib import pyplot as plt
+
+# MNE functions
+from mne import Epochs,find_events
+from mne.decoding import Vectorizer
+
+# EEG-Notebooks functions
+from eegnb.analysis.analysis_utils import load_data
+from eegnb.datasets import fetch_dataset
+
+# Scikit-learn and Pyriemann ML functionalities
+from sklearn.pipeline import make_pipeline
+from sklearn.linear_model import LogisticRegression
+from sklearn.preprocessing import StandardScaler
+from sklearn.discriminant_analysis import LinearDiscriminantAnalysis as LDA
+from sklearn.model_selection import cross_val_score, StratifiedShuffleSplit
+from pyriemann.estimation import ERPCovariances, XdawnCovariances
+from pyriemann.tangentspace import TangentSpace
+from pyriemann.classification import MDM
+
+
+
+
+

Load Data

+

( See the n170 load_and_visualize example for further description of this)

+
eegnb_data_path = os.path.join(os.path.expanduser('~/'),'.eegnb', 'data')
+n170_data_path = os.path.join(eegnb_data_path, 'visual-N170', 'eegnb_examples')
+
+# If dataset hasn't been downloaded yet, download it
+if not os.path.isdir(n170_data_path):
+    fetch_dataset(data_dir=eegnb_data_path, experiment='visual-N170', site='eegnb_examples')
+
+subject = 1
+session = 1
+raw = load_data(subject,session,
+                experiment='visual-N170', site='eegnb_examples', device_name='muse2016',
+                data_dir = eegnb_data_path)
+
+
+
Loading these files:
+
+/home/runner/.eegnb/data/visual-N170/eegnb_examples/muse2016/subject0001/session001/data_2017-09-13-15.35.26.csv
+
+/home/runner/.eegnb/data/visual-N170/eegnb_examples/muse2016/subject0001/session001/data_2017-09-13-15.42.33.csv
+
+/home/runner/.eegnb/data/visual-N170/eegnb_examples/muse2016/subject0001/session001/data_2017-09-13-15.30.01.csv
+
+/home/runner/.eegnb/data/visual-N170/eegnb_examples/muse2016/subject0001/session001/data_2017-09-13-15.40.17.csv
+
+/home/runner/.eegnb/data/visual-N170/eegnb_examples/muse2016/subject0001/session001/data_2017-09-13-15.45.08.csv
+
+/home/runner/.eegnb/data/visual-N170/eegnb_examples/muse2016/subject0001/session001/data_2017-09-13-15.32.50.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=30732
+    Range : 0 ... 30731 =      0.000 ...   120.043 secs
+Ready.
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=30720
+    Range : 0 ... 30719 =      0.000 ...   119.996 secs
+Ready.
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=30732
+    Range : 0 ... 30731 =      0.000 ...   120.043 secs
+Ready.
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=30732
+    Range : 0 ... 30731 =      0.000 ...   120.043 secs
+Ready.
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=30732
+    Range : 0 ... 30731 =      0.000 ...   120.043 secs
+Ready.
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=30732
+    Range : 0 ... 30731 =      0.000 ...   120.043 secs
+Ready.
+
+
+
+
+

Filteriing

+
raw.filter(1,30, method='iir')
+
+
+
Filtering raw data in 6 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+
+
+
+ General + + + + + + + + + + + + + + + + + + + +
Measurement dateUnknown
ExperimenterUnknown
ParticipantUnknown
+
+
+ Channels + + + + + + + + + + + + + + + + + + + + + + + +
Digitized points7 points
Good channels4 EEG, 1 misc, 1 Stimulus
Bad channelsNone
EOG channelsNot available
ECG channelsNot available
+
+
+ Data + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Sampling frequency256.00 Hz
Highpass1.00 Hz
Lowpass30.00 Hz
Duration00:12:01 (HH:MM:SS)
+
+
+
+
+
+

Epoching

+
# Create an array containing the timestamps and type of each stimulus (i.e. face or house)
+events = find_events(raw)
+event_id = {'House': 1, 'Face': 2}
+
+# Create an MNE Epochs object representing all the epochs around stimulus presentation
+epochs = Epochs(raw, events=events, event_id=event_id,
+                tmin=-0.1, tmax=0.8, baseline=None,
+                reject={'eeg': 75e-6}, preload=True,
+                verbose=False, picks=[0,1,2,3])
+
+print('sample drop %: ', (1 - len(epochs.events)/len(events)) * 100)
+epochs
+
+
+
1174 events found on stim channel stim
+Event IDs: [1 2]
+sample drop %:  4.003407155025551
+
+
+
+ + + + + + + + + + + + + + + + + + + +
Number of events1127
EventsFace: 562
House: 565
Time range-0.102 – 0.801 s
Baselineoff
+
+
+
+
+

Run classification

+
clfs = OrderedDict()
+clfs['Vect + LR'] = make_pipeline(Vectorizer(), StandardScaler(), LogisticRegression())
+clfs['Vect + RegLDA'] = make_pipeline(Vectorizer(), LDA(shrinkage='auto', solver='eigen'))
+clfs['ERPCov + TS'] = make_pipeline(ERPCovariances(estimator='oas'), TangentSpace(), LogisticRegression())
+clfs['ERPCov + MDM'] = make_pipeline(ERPCovariances(estimator='oas'), MDM())
+clfs['XdawnCov + TS'] = make_pipeline(XdawnCovariances(estimator='oas'), TangentSpace(), LogisticRegression())
+clfs['XdawnCov + MDM'] = make_pipeline(XdawnCovariances(estimator='oas'), MDM())
+
+# format data
+epochs.pick_types(eeg=True)
+X = epochs.get_data() * 1e6
+times = epochs.times
+y = epochs.events[:, -1]
+
+# define cross validation
+cv = StratifiedShuffleSplit(n_splits=20, test_size=0.25,
+                                    random_state=42)
+
+# run cross validation for each pipeline
+auc = []
+methods = []
+for m in clfs:
+    print(m)
+    try:
+        res = cross_val_score(clfs[m], X, y==2, scoring='roc_auc',
+                              cv=cv, n_jobs=-1)
+        auc.extend(res)
+        methods.extend([m]*len(res))
+    except:
+        pass
+
+
+
NOTE: pick_types() is a legacy function. New code should use inst.pick(...).
+Vect + LR
+Vect + RegLDA
+ERPCov + TS
+ERPCov + MDM
+XdawnCov + TS
+XdawnCov + MDM
+
+
+
+
+

Plot Decoding Results

+
results = pd.DataFrame(data=auc, columns=['AUC'])
+results['Method'] = methods
+
+fig = plt.figure(figsize=[8,4])
+sns.barplot(data=results, x='AUC', y='Method')
+plt.xlim(0.4, 0.9)
+sns.despine()
+
+
+02r  n170 decoding

Total running time of the script: (0 minutes 30.511 seconds)

+ +

Gallery generated by Sphinx-Gallery

+
+
+ + +
+
+ +
+
+
+
+ + + + + + + \ No newline at end of file diff --git a/develop/auto_examples/visual_n170/index.html b/develop/auto_examples/visual_n170/index.html new file mode 100644 index 00000000..777b7a79 --- /dev/null +++ b/develop/auto_examples/visual_n170/index.html @@ -0,0 +1,515 @@ + + + + + + + <no title> — EEG Notebooks 0.0.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+ + +
+
+
+
+ + + + + + + \ No newline at end of file diff --git a/develop/auto_examples/visual_n170/sg_execution_times.html b/develop/auto_examples/visual_n170/sg_execution_times.html new file mode 100644 index 00000000..5bf900d7 --- /dev/null +++ b/develop/auto_examples/visual_n170/sg_execution_times.html @@ -0,0 +1,528 @@ + + + + + + + Computation times — EEG Notebooks 0.0.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+ +
+

Computation times

+

00:34.880 total execution time for 3 files from auto_examples/visual_n170:

+
+ + + + + + + + + + + + + + + + + + + + + + + + + +

Example

Time

Mem (MB)

N170 Decoding (02r__n170_decoding.py)

00:30.511

0.0

N170 Load and Visualize Data (01r__n170_viz.py)

00:04.369

0.0

N170 run experiment (00x__n170_run_experiment.py)

00:00.000

0.0

+
+
+ + +
+
+
+ +
+ +
+

© Copyright John Griffiths, Jadin Tredup, NeuroTechX, & Contributors.

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + + + + \ No newline at end of file diff --git a/develop/auto_examples/visual_p300/00x__p300_run_experiment.html b/develop/auto_examples/visual_p300/00x__p300_run_experiment.html new file mode 100644 index 00000000..6b6655ca --- /dev/null +++ b/develop/auto_examples/visual_p300/00x__p300_run_experiment.html @@ -0,0 +1,550 @@ + + + + + + + P300 run experiment — EEG Notebooks 0.0.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ + +
+

P300 run experiment

+

This example demonstrates the initiation of an EEG stream with eeg-expy, and how to run +an experiment.

+
+

Setup

+

Imports

+
import os
+from eegnb import generate_save_fn
+from eegnb.devices.eeg import EEG
+from eegnb.experiments import VisualP300
+
+# Define some variables
+board_name = "muse2"
+experiment = "visual_p300"
+subject_id = 0
+session_nb = 0
+record_duration = 120
+
+
+
+
+

Initiate EEG device

+

Start EEG device

+
eeg_device = EEG(device=board_name)
+
+# Create save file name
+save_fn = generate_save_fn(board_name, experiment, subject_id, session_nb)
+print(save_fn)
+
+
+
+
+

Run experiment

+

Create Experiment Object

+
p300 = VisualP300(duration=record_duration, eeg=eeg_device, save_fn=save_fn)
+p300.run()
+
+
+ +

Gallery generated by Sphinx-Gallery

+
+
+ + +
+
+ +
+
+
+
+ + + + + + + \ No newline at end of file diff --git a/develop/auto_examples/visual_p300/01r__p300_viz.html b/develop/auto_examples/visual_p300/01r__p300_viz.html new file mode 100644 index 00000000..0a5572e7 --- /dev/null +++ b/develop/auto_examples/visual_p300/01r__p300_viz.html @@ -0,0 +1,689 @@ + + + + + + + P300 Load and Visualize Data — EEG Notebooks 0.0.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ + +
+

P300 Load and Visualize Data

+

This example demonstrates loading, organizing, and visualizing ERP response data from the visual P300 experiment. The experiment uses a visual oddball paradigm. Images of cats and dogs are shwn in a rapid serial visual presentation (RSVP) stream, with cats and dogs categorized respectively as ‘targets’ or ‘non-targets’, according to which has high or low probability of occurring, respectively.

+

The data used is the first subject and first session of the one of the eeg-expy P300 example datasets, recorded using the InteraXon MUSE EEG headset (2016 model). This session consists of six two-minute blocks of continuous recording.

+

We first use the fetch_datasets to obtain a list of filenames. If these files are not already present +in the specified data directory, they will be quickly downloaded from the cloud.

+

After loading the data, we place it in an MNE Epochs object, and obtain the trial-averaged response.

+

The final figure plotted at the end shows the P300 response ERP waveform.

+
+

Setup

+
# Some standard pythonic imports
+import os
+from matplotlib import pyplot as plt
+from collections import OrderedDict
+import warnings
+warnings.filterwarnings('ignore')
+
+# MNE functions
+from mne import Epochs,find_events
+
+# EEG-Notebooks functions
+from eegnb.analysis.analysis_utils import load_data,plot_conditions
+from eegnb.datasets import fetch_dataset
+
+
+
+
Load Data

We will use the eeg-expy N170 example dataset

+

Note that if you are running this locally, the following cell will download +the example dataset, if you do not already have it.

+
+
+
+
eegnb_data_path = os.path.join(os.path.expanduser('~/'),'.eegnb', 'data')
+p300_data_path = os.path.join(eegnb_data_path, 'visual-P300', 'eegnb_examples')
+
+# If dataset hasn't been downloaded yet, download it
+if not os.path.isdir(p300_data_path):
+    fetch_dataset(data_dir=eegnb_data_path, experiment='visual-P300', site='eegnb_examples');
+
+
+subject = 1
+session = 1
+raw = load_data(subject,session,
+                experiment='visual-P300', site='eegnb_examples', device_name='muse2016',
+                data_dir = eegnb_data_path)
+
+
+
Downloading...
+From: https://drive.google.com/uc?id=1OLcj-zSjqdNrsBSUAsGBXOwWDnGWTVFC
+To: /home/runner/.eegnb/data/downloaded_data.zip
+
+  0%|          | 0.00/18.4M [00:00<?, ?B/s]
+ 34%|███▍      | 6.29M/18.4M [00:00<00:00, 61.4MB/s]
+100%|██████████| 18.4M/18.4M [00:00<00:00, 93.1MB/s]
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-P300/eegnb_examples/muse2016/subject0001/session001/data_2017-02-04-15_45_13.csv
+
+/home/runner/.eegnb/data/visual-P300/eegnb_examples/muse2016/subject0001/session001/data_2017-02-04-16_03_08.csv
+
+/home/runner/.eegnb/data/visual-P300/eegnb_examples/muse2016/subject0001/session001/data_2017-02-04-15_47_49.csv
+
+/home/runner/.eegnb/data/visual-P300/eegnb_examples/muse2016/subject0001/session001/data_2017-02-04-15_58_30.csv
+
+/home/runner/.eegnb/data/visual-P300/eegnb_examples/muse2016/subject0001/session001/data_2017-02-04-15_55_07.csv
+
+/home/runner/.eegnb/data/visual-P300/eegnb_examples/muse2016/subject0001/session001/data_2017-02-04-15_51_07.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=30732
+    Range : 0 ... 30731 =      0.000 ...   120.043 secs
+Ready.
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=30732
+    Range : 0 ... 30731 =      0.000 ...   120.043 secs
+Ready.
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=30732
+    Range : 0 ... 30731 =      0.000 ...   120.043 secs
+Ready.
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=30732
+    Range : 0 ... 30731 =      0.000 ...   120.043 secs
+Ready.
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=30732
+    Range : 0 ... 30731 =      0.000 ...   120.043 secs
+Ready.
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=30732
+    Range : 0 ... 30731 =      0.000 ...   120.043 secs
+Ready.
+
+
+
+
+

Visualize the power spectrum

+
raw.plot_psd()
+
+
+EEG
NOTE: plot_psd() is a legacy function. New code should use .compute_psd().plot().
+Effective window size : 8.000 (s)
+
+<MNELineFigure size 1000x350 with 2 Axes>
+
+
+
+
+

Filteriing

+
raw.filter(1,30, method='iir')
+raw.plot_psd(fmin=1, fmax=30);
+
+
+EEG
Filtering raw data in 6 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+NOTE: plot_psd() is a legacy function. New code should use .compute_psd().plot().
+Effective window size : 8.000 (s)
+
+<MNELineFigure size 1000x350 with 2 Axes>
+
+
+
+
+

Epoching

+
# Create an array containing the timestamps and type of each stimulus (i.e. face or house)
+events = find_events(raw)
+event_id = {'non-target': 1, 'target': 2}
+epochs = Epochs(raw, events=events, event_id=event_id,
+                tmin=-0.1, tmax=0.8, baseline=None,                                                                           reject={'eeg': 100e-6}, preload=True,
+                verbose=False, picks=[0,1,2,3])
+
+print('sample drop %: ', (1 - len(epochs.events)/len(events)) * 100)
+
+
+
1161 events found on stim channel stim
+Event IDs: [1 2]
+sample drop %:  1.5503875968992276
+
+
+
+
+

Epoch average

+
conditions = OrderedDict()
+conditions['non-target'] = ['non-target']
+conditions['target'] = ['target']
+diffwav = ["non-target", "target"]
+
+fig, ax = plot_conditions(epochs, conditions=conditions,
+                          ci=97.5, n_boot=1000, title='',
+                          channel_order=[1,0,2,3],ylim=[-2E6,2.5E6],
+                          diff_waveform = diffwav)
+
+# Manually adjust the ylims
+for i in [0,2]: ax[i].set_ylim([-0.5e6,0.5e6])
+for i in [1,3]: ax[i].set_ylim([-1.5e6,2.5e6])
+
+plt.tight_layout()
+
+
+AF7, AF8, TP9, TP10

Total running time of the script: (0 minutes 11.982 seconds)

+ +

Gallery generated by Sphinx-Gallery

+
+
+ + +
+
+ +
+
+
+
+ + + + + + + \ No newline at end of file diff --git a/develop/auto_examples/visual_p300/02r__p300_decoding.html b/develop/auto_examples/visual_p300/02r__p300_decoding.html new file mode 100644 index 00000000..5ac0f0f6 --- /dev/null +++ b/develop/auto_examples/visual_p300/02r__p300_decoding.html @@ -0,0 +1,800 @@ + + + + + + + P300 Decoding — EEG Notebooks 0.0.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ + +
+

P300 Decoding

+

This example runs a set of machine learning algorithms on the P300 cats/dogs +dataset, and compares them in terms of classification performance.

+

The data used is exactly the same as in the P300 load_and_visualize example.

+
+

Setup

+
# Some standard pythonic imports
+import warnings
+warnings.filterwarnings('ignore')
+import os,numpy as np,pandas as pd
+from collections import OrderedDict
+import seaborn as sns
+from matplotlib import pyplot as plt
+
+# MNE functions
+from mne import Epochs,find_events
+from mne.decoding import Vectorizer
+
+# EEG-Notebooks functions
+from eegnb.analysis.analysis_utils import load_data
+from eegnb.datasets import fetch_dataset
+
+# Scikit-learn and Pyriemann ML functionalities
+from sklearn.pipeline import make_pipeline
+from sklearn.linear_model import LogisticRegression
+from sklearn.preprocessing import StandardScaler
+from sklearn.discriminant_analysis import LinearDiscriminantAnalysis as LDA
+from sklearn.model_selection import cross_val_score, StratifiedShuffleSplit
+from pyriemann.estimation import ERPCovariances, XdawnCovariances, Xdawn
+from pyriemann.tangentspace import TangentSpace
+from pyriemann.classification import MDM
+
+
+
+
+

Load Data

+

( See the P300 load_and_visualize example for further description of this)

+
eegnb_data_path = os.path.join(os.path.expanduser('~/'),'.eegnb', 'data')
+p300_data_path = os.path.join(eegnb_data_path, 'visual-P300', 'eegnb_examples')
+
+# If dataset hasn't been downloaded yet, download it
+if not os.path.isdir(p300_data_path):
+    fetch_dataset(data_dir=eegnb_data_path, experiment='visual-P300', site='eegnb_examples')
+
+
+subject = 1
+session = 1
+raw = load_data(subject,session,
+                experiment='visual-P300', site='eegnb_examples', device_name='muse2016',
+                data_dir = eegnb_data_path)
+
+
+
Loading these files:
+
+/home/runner/.eegnb/data/visual-P300/eegnb_examples/muse2016/subject0001/session001/data_2017-02-04-15_45_13.csv
+
+/home/runner/.eegnb/data/visual-P300/eegnb_examples/muse2016/subject0001/session001/data_2017-02-04-16_03_08.csv
+
+/home/runner/.eegnb/data/visual-P300/eegnb_examples/muse2016/subject0001/session001/data_2017-02-04-15_47_49.csv
+
+/home/runner/.eegnb/data/visual-P300/eegnb_examples/muse2016/subject0001/session001/data_2017-02-04-15_58_30.csv
+
+/home/runner/.eegnb/data/visual-P300/eegnb_examples/muse2016/subject0001/session001/data_2017-02-04-15_55_07.csv
+
+/home/runner/.eegnb/data/visual-P300/eegnb_examples/muse2016/subject0001/session001/data_2017-02-04-15_51_07.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=30732
+    Range : 0 ... 30731 =      0.000 ...   120.043 secs
+Ready.
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=30732
+    Range : 0 ... 30731 =      0.000 ...   120.043 secs
+Ready.
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=30732
+    Range : 0 ... 30731 =      0.000 ...   120.043 secs
+Ready.
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=30732
+    Range : 0 ... 30731 =      0.000 ...   120.043 secs
+Ready.
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=30732
+    Range : 0 ... 30731 =      0.000 ...   120.043 secs
+Ready.
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=30732
+    Range : 0 ... 30731 =      0.000 ...   120.043 secs
+Ready.
+
+
+
+
+

Filteriing

+
raw.filter(1,30, method='iir')
+
+
+
Filtering raw data in 6 contiguous segments
+Setting up band-pass filter from 1 - 30 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 1.00, 30.00 Hz: -6.02, -6.02 dB
+
+
+
+
+ General + + + + + + + + + + + + + + + + + + + +
Measurement dateUnknown
ExperimenterUnknown
ParticipantUnknown
+
+
+ Channels + + + + + + + + + + + + + + + + + + + + + + + +
Digitized points7 points
Good channels4 EEG, 1 misc, 1 Stimulus
Bad channelsNone
EOG channelsNot available
ECG channelsNot available
+
+
+ Data + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Sampling frequency256.00 Hz
Highpass1.00 Hz
Lowpass30.00 Hz
Duration00:12:01 (HH:MM:SS)
+
+
+
+
+
+

Epoching

+
# Create an array containing the timestamps and type of each stimulus (i.e. face or house)
+events = find_events(raw)
+event_id = {'Non-Target': 1, 'Target': 2}
+epochs = Epochs(raw, events=events, event_id=event_id,
+                tmin=-0.1, tmax=0.8, baseline=None,                                                                                     reject={'eeg': 100e-6}, preload=True,                                                                                   verbose=False, picks=[0,1,2,3])
+
+print('sample drop %: ', (1 - len(epochs.events)/len(events)) * 100)
+
+epochs
+
+
+
1161 events found on stim channel stim
+Event IDs: [1 2]
+sample drop %:  1.5503875968992276
+
+
+
+ + + + + + + + + + + + + + + + + + + +
Number of events1143
EventsNon-Target: 959
Target: 184
Time range-0.102 – 0.801 s
Baselineoff
+
+
+
+
+

Classfication

+
clfs = OrderedDict()
+clfs['Vect + LR'] = make_pipeline(Vectorizer(), StandardScaler(), LogisticRegression())
+clfs['Vect + RegLDA'] = make_pipeline(Vectorizer(), LDA(shrinkage='auto', solver='eigen'))
+clfs['Xdawn + RegLDA'] = make_pipeline(Xdawn(2, classes=[1]), Vectorizer(), LDA(shrinkage='auto', solver='eigen'))
+
+clfs['XdawnCov + TS'] = make_pipeline(XdawnCovariances(estimator='oas'), TangentSpace(), LogisticRegression())
+clfs['XdawnCov + MDM'] = make_pipeline(XdawnCovariances(estimator='oas'), MDM())
+
+
+clfs['ERPCov + TS'] = make_pipeline(ERPCovariances(), TangentSpace(), LogisticRegression())
+clfs['ERPCov + MDM'] = make_pipeline(ERPCovariances(), MDM())
+
+# format data
+epochs.pick_types(eeg=True)
+X = epochs.get_data() * 1e6
+times = epochs.times
+y = epochs.events[:, -1]
+
+# define cross validation
+cv = StratifiedShuffleSplit(n_splits=10, test_size=0.25, random_state=42)
+
+# run cross validation for each pipeline
+auc = []
+methods = []
+for m in clfs:
+    res = cross_val_score(clfs[m], X, y==2, scoring='roc_auc', cv=cv, n_jobs=-1)
+    auc.extend(res)
+    methods.extend([m]*len(res))
+
+results = pd.DataFrame(data=auc, columns=['AUC'])
+results['Method'] = methods
+
+plt.figure(figsize=[8,4])
+sns.barplot(data=results, x='AUC', y='Method')
+plt.xlim(0.2, 0.85)
+sns.despine()
+
+
+02r  p300 decoding
NOTE: pick_types() is a legacy function. New code should use inst.pick(...).
+
+
+

Total running time of the script: (0 minutes 12.321 seconds)

+ +

Gallery generated by Sphinx-Gallery

+
+
+ + +
+
+ +
+
+
+
+ + + + + + + \ No newline at end of file diff --git a/develop/auto_examples/visual_p300/index.html b/develop/auto_examples/visual_p300/index.html new file mode 100644 index 00000000..13941795 --- /dev/null +++ b/develop/auto_examples/visual_p300/index.html @@ -0,0 +1,515 @@ + + + + + + + <no title> — EEG Notebooks 0.0.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+ + +
+
+
+
+ + + + + + + \ No newline at end of file diff --git a/develop/auto_examples/visual_p300/sg_execution_times.html b/develop/auto_examples/visual_p300/sg_execution_times.html new file mode 100644 index 00000000..4343e21e --- /dev/null +++ b/develop/auto_examples/visual_p300/sg_execution_times.html @@ -0,0 +1,528 @@ + + + + + + + Computation times — EEG Notebooks 0.0.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+ +
+

Computation times

+

00:24.304 total execution time for 3 files from auto_examples/visual_p300:

+
+ + + + + + + + + + + + + + + + + + + + + + + + + +

Example

Time

Mem (MB)

P300 Decoding (02r__p300_decoding.py)

00:12.321

0.0

P300 Load and Visualize Data (01r__p300_viz.py)

00:11.982

0.0

P300 run experiment (00x__p300_run_experiment.py)

00:00.000

0.0

+
+
+ + +
+
+
+ +
+ +
+

© Copyright John Griffiths, Jadin Tredup, NeuroTechX, & Contributors.

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + + + + \ No newline at end of file diff --git a/develop/auto_examples/visual_ssvep/00x__ssvep_run_experiment.html b/develop/auto_examples/visual_ssvep/00x__ssvep_run_experiment.html new file mode 100644 index 00000000..d4538dd0 --- /dev/null +++ b/develop/auto_examples/visual_ssvep/00x__ssvep_run_experiment.html @@ -0,0 +1,549 @@ + + + + + + + SSVEP run experiment — EEG Notebooks 0.0.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ + +
+

SSVEP run experiment

+

This example demonstrates the initiation of an EEG stream with eeg-expy, and how to run +an experiment.

+
+

Setup

+

Imports

+
import os
+from eegnb import generate_save_fn
+from eegnb.devices.eeg import EEG
+from eegnb.experiments import VisualSSVEP
+
+# Define some variables
+board_name = "muse2"
+experiment = "visual_ssvep"
+subject_id = 0
+session_nb = 0
+record_duration = 120
+
+
+
+
+

Initiate EEG device

+

Start EEG device

+
eeg_device = EEG(device=board_name)
+
+# Create save file name
+save_fn = generate_save_fn(board_name, experiment, subject_id, session_nb)
+print(save_fn)
+
+
+
+
+

Run experiment

+
ssvep = VisualSSVEP(duration=record_duration, eeg=eeg_device, save_fn=save_fn)
+ssvep.run()
+
+
+ +

Gallery generated by Sphinx-Gallery

+
+
+ + +
+
+ +
+
+
+
+ + + + + + + \ No newline at end of file diff --git a/develop/auto_examples/visual_ssvep/01r__ssvep_viz.html b/develop/auto_examples/visual_ssvep/01r__ssvep_viz.html new file mode 100644 index 00000000..082fbdc4 --- /dev/null +++ b/develop/auto_examples/visual_ssvep/01r__ssvep_viz.html @@ -0,0 +1,816 @@ + + + + + + + SSVEP Visualization — EEG Notebooks 0.0.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ + +
+

SSVEP Visualization

+

This example demonstrates loading, organizing, and visualizing data from the steady-state visual evoked potentials (SSVEP) experiment.

+

The data used is the first subject and first session of the one of the eeg-expy ssvep example datasets, recorded using the InteraXon MUSE EEG headset (2016 model). This session consists of six two-minute blocks of continuous recording.

+

We first use the fetch_datasets to obtain a list of filenames. If these files are not already present +in the specified data directory, they will be quickly downloaded from the cloud.

+

After loading the data, we place it in an MNE Epochs object, and obtain the trial-averaged response.

+

The final figures show the visual frequencies appearing in the measured power spectrum.

+
# Some standard pythonic imports
+import os, numpy as np, pandas as pd
+from collections import OrderedDict
+import warnings
+warnings.filterwarnings('ignore')
+from matplotlib import pyplot as plt
+
+# MNE functions
+from mne import Epochs,find_events
+from mne.time_frequency import tfr_morlet
+
+# EEG-Notebooks functions
+from eegnb.analysis.analysis_utils import load_data,plot_conditions
+from eegnb.datasets import fetch_dataset
+
+
+
+
Load Data

We will use the eeg-expy SSVEP example dataset

+

Note that if you are running this locally, the following cell will download +the example dataset, if you do not already have it.

+
+
+
+
eegnb_data_path = os.path.join(os.path.expanduser('~/'),'.eegnb', 'data')
+ssvep_data_path = os.path.join(eegnb_data_path, 'visual-SSVEP', 'eegnb_examples')
+
+# If dataset hasn't been downloaded yet, download it
+if not os.path.isdir(ssvep_data_path):
+    fetch_dataset(data_dir=eegnb_data_path, experiment='visual-SSVEP', site='eegnb_examples');
+
+
+subject = 1
+session = 1
+raw = load_data(subject, session,
+                experiment='visual-SSVEP', site='eegnb_examples', device_name='muse2016',
+                data_dir = eegnb_data_path,
+                replace_ch_names={'Right AUX': 'POz'})
+raw.set_channel_types({'POz': 'eeg'})
+
+
+
Downloading...
+From: https://drive.google.com/uc?id=1zj9Wx-YEMJo7GugUUu7Sshcybfsr-Fze
+To: /home/runner/.eegnb/data/downloaded_data.zip
+
+  0%|          | 0.00/5.14M [00:00<?, ?B/s]
+ 92%|█████████▏| 4.72M/5.14M [00:00<00:00, 23.1MB/s]
+100%|██████████| 5.14M/5.14M [00:00<00:00, 25.0MB/s]
+
+
+Loading these files:
+
+/home/runner/.eegnb/data/visual-SSVEP/eegnb_examples/muse2016/subject0001/session001/data_2017-09-14-21.27.36.csv
+
+/home/runner/.eegnb/data/visual-SSVEP/eegnb_examples/muse2016/subject0001/session001/data_2017-09-14-21.29.57.csv
+
+/home/runner/.eegnb/data/visual-SSVEP/eegnb_examples/muse2016/subject0001/session001/data_2017-09-14-21.20.04.csv
+
+/home/runner/.eegnb/data/visual-SSVEP/eegnb_examples/muse2016/subject0001/session001/data_2017-09-14-21.32.15.csv
+
+/home/runner/.eegnb/data/visual-SSVEP/eegnb_examples/muse2016/subject0001/session001/data_2017-09-14-21.22.51.csv
+
+/home/runner/.eegnb/data/visual-SSVEP/eegnb_examples/muse2016/subject0001/session001/data_2017-09-14-21.25.17.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'POz', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=30720
+    Range : 0 ... 30719 =      0.000 ...   119.996 secs
+Ready.
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'POz', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=30732
+    Range : 0 ... 30731 =      0.000 ...   120.043 secs
+Ready.
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'POz', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=30732
+    Range : 0 ... 30731 =      0.000 ...   120.043 secs
+Ready.
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'POz', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=30732
+    Range : 0 ... 30731 =      0.000 ...   120.043 secs
+Ready.
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'POz', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=30732
+    Range : 0 ... 30731 =      0.000 ...   120.043 secs
+Ready.
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'POz', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=30720
+    Range : 0 ... 30719 =      0.000 ...   119.996 secs
+Ready.
+
+
+
+
+ General + + + + + + + + + + + + + + + + + + + +
Measurement dateUnknown
ExperimenterUnknown
ParticipantUnknown
+
+
+ Channels + + + + + + + + + + + + + + + + + + + + + + + +
Digitized points7 points
Good channels5 EEG, 1 Stimulus
Bad channelsNone
EOG channelsNot available
ECG channelsNot available
+
+
+ Data + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Sampling frequency256.00 Hz
Highpass0.00 Hz
Lowpass128.00 Hz
Duration00:12:01 (HH:MM:SS)
+
+
+
+
+

Visualize the power spectrum

+
raw.plot_psd()
+
+
+EEG
NOTE: plot_psd() is a legacy function. New code should use .compute_psd().plot().
+Effective window size : 8.000 (s)
+
+<MNELineFigure size 1000x350 with 2 Axes>
+
+
+
+
+

Epoching

+
# Next, we will chunk (epoch) the data into segments representing the data 100ms before to 800ms after each stimulus.
+# Note: we will not reject epochs here because the amplitude of the SSVEP at POz is so large it is difficult to separate from eye blinks
+
+events = find_events(raw)
+event_id = {'30 Hz': 1, '20 Hz': 2}
+epochs = Epochs(raw, events=events, event_id=event_id,
+                tmin=-0.5, tmax=4, baseline=None, preload=True,
+                verbose=False, picks=[0, 1, 2, 3, 4])
+print('sample drop %: ', (1 - len(epochs.events)/len(events)) * 100)
+
+
+
197 events found on stim channel stim
+Event IDs: [1 2]
+sample drop %:  2.538071065989844
+
+
+
+
+

Stimuli-Specific PSD

+
# Next, we can compare the PSD of epochs specifically during 20hz and 30hz stimulus presentation
+
+f, axs = plt.subplots(2, 1, figsize=(10, 10))
+
+welch_params=dict(method='welch',
+                  n_fft=1028,
+                  n_per_seg=256 * 3,
+                  picks='all')
+
+psd1, freq1 = epochs['30 Hz'].compute_psd(**welch_params).get_data(return_freqs=True)
+psd2, freq2 = epochs['20 Hz'].compute_psd(**welch_params).get_data(return_freqs=True)
+psd1 = 10 * np.log10(psd1)
+psd2 = 10 * np.log10(psd2)
+
+psd1_mean = psd1.mean(0)
+psd1_std = psd1.mean(0)
+
+psd2_mean = psd2.mean(0)
+psd2_std = psd2.mean(0)
+
+axs[0].plot(freq1, psd1_mean[[0, 3], :].mean(0), color='b', label='30 Hz')
+axs[0].plot(freq2, psd2_mean[[0, 3], :].mean(0), color='r', label='20 Hz')
+
+axs[1].plot(freq1, psd1_mean[4, :], color='b', label='30 Hz')
+axs[1].plot(freq2, psd2_mean[4, :], color='r', label='20 Hz')
+
+axs[0].set_title('TP9 and TP10')
+axs[1].set_title('POz')
+
+axs[0].set_ylabel('Power Spectral Density (dB)')
+axs[1].set_ylabel('Power Spectral Density (dB)')
+
+axs[0].set_xlim((2, 50))
+axs[1].set_xlim((2, 50))
+
+axs[1].set_xlabel('Frequency (Hz)')
+
+axs[0].legend()
+axs[1].legend()
+
+plt.show();
+
+# With this visualization we can clearly see distinct peaks at 30hz and 20hz in the PSD, corresponding to the frequency of the visual stimulation. The peaks are much larger at the POz electrode, but still visible at TP9 and TP10
+
+
+TP9 and TP10, POz
Effective window size : 4.016 (s)
+Effective window size : 4.016 (s)
+
+
+
+
+

Spectrogram

+
# We can also look for SSVEPs in the spectrogram, which uses color to represent the power of frequencies in the EEG signal over time
+
+frequencies = np.logspace(1, 1.75, 60)
+tfr, itc = tfr_morlet(epochs['30 Hz'], freqs=frequencies,picks='all',
+                              n_cycles=15, return_itc=True)
+tfr.plot(picks=[4], baseline=(-0.5, -0.1), mode='logratio',
+                 title='POz - 30 Hz stim');
+
+tfr, itc = tfr_morlet(epochs['20 Hz'], freqs=frequencies,picks='all',
+                              n_cycles=15, return_itc=True)
+tfr.plot(picks=[4], baseline=(-0.5, -0.1), mode='logratio',
+                 title='POz - 20 Hz stim');
+
+# Set Layout engine to tight to fix error with using colorbar layout error
+plt.figure().set_layout_engine('tight');
+plt.tight_layout()
+
+# Once again we can see clear SSVEPs at 30hz and 20hz
+
+
+
    +
  • POz - 30 Hz stim
  • +
  • POz - 20 Hz stim
  • +
  • 01r  ssvep viz
  • +
+
Applying baseline correction (mode: logratio)
+Applying baseline correction (mode: logratio)
+
+
+

Total running time of the script: (0 minutes 12.710 seconds)

+ +

Gallery generated by Sphinx-Gallery

+
+
+ + +
+
+ +
+
+
+
+ + + + + + + \ No newline at end of file diff --git a/develop/auto_examples/visual_ssvep/02r__ssvep_decoding.html b/develop/auto_examples/visual_ssvep/02r__ssvep_decoding.html new file mode 100644 index 00000000..8c4b7e54 --- /dev/null +++ b/develop/auto_examples/visual_ssvep/02r__ssvep_decoding.html @@ -0,0 +1,745 @@ + + + + + + + SSVEP Decoding — EEG Notebooks 0.0.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ + +
+

SSVEP Decoding

+

This notebook runs only the data analysis part of experiment.

+

Look at the notes to see how this can be run on the web with binder or google collab.

+

All of the additional notes are removed; only the code cells are kept.

+
+

Setup

+
# Some standard pythonic imports
+import warnings
+warnings.filterwarnings('ignore')
+import os,numpy as np,pandas as pd
+from collections import OrderedDict
+import seaborn as sns
+from matplotlib import pyplot as plt
+
+# MNE functions
+from mne import Epochs,find_events
+from mne.decoding import Vectorizer
+
+# EEG-Notebooks functions
+from eegnb.analysis.analysis_utils import load_data
+from eegnb.datasets import fetch_dataset
+
+# Scikit-learn and Pyriemann ML functionalities
+from sklearn.pipeline import make_pipeline
+from sklearn.linear_model import LogisticRegression
+from sklearn.preprocessing import StandardScaler
+from sklearn.discriminant_analysis import LinearDiscriminantAnalysis as LDA
+from sklearn.model_selection import cross_val_score, StratifiedShuffleSplit
+from pyriemann.estimation import Covariances, ERPCovariances, XdawnCovariances
+from pyriemann.spatialfilters import CSP
+from pyriemann.tangentspace import TangentSpace
+from pyriemann.classification import MDM
+
+
+
+
+

Load Data

+

( See the ssvep load_and_visualize example for further description of this)

+
eegnb_data_path = os.path.join(os.path.expanduser('~/'),'.eegnb', 'data')
+ssvep_data_path = os.path.join(eegnb_data_path, 'visual-SSVEP', 'eegnb_examples')
+
+# If dataset hasn't been downloaded yet, download it
+if not os.path.isdir(ssvep_data_path):
+    fetch_dataset(data_dir=eegnb_data_path, experiment='visual-SSVEP', site='eegnb_examples')
+
+subject = 1
+session = 1
+raw = load_data(subject, session,
+                experiment='visual-SSVEP', site='eegnb_examples', device_name='muse2016',
+                data_dir = eegnb_data_path,
+                replace_ch_names={'Right AUX': 'POz'})
+
+
+
Loading these files:
+
+/home/runner/.eegnb/data/visual-SSVEP/eegnb_examples/muse2016/subject0001/session001/data_2017-09-14-21.27.36.csv
+
+/home/runner/.eegnb/data/visual-SSVEP/eegnb_examples/muse2016/subject0001/session001/data_2017-09-14-21.29.57.csv
+
+/home/runner/.eegnb/data/visual-SSVEP/eegnb_examples/muse2016/subject0001/session001/data_2017-09-14-21.20.04.csv
+
+/home/runner/.eegnb/data/visual-SSVEP/eegnb_examples/muse2016/subject0001/session001/data_2017-09-14-21.32.15.csv
+
+/home/runner/.eegnb/data/visual-SSVEP/eegnb_examples/muse2016/subject0001/session001/data_2017-09-14-21.22.51.csv
+
+/home/runner/.eegnb/data/visual-SSVEP/eegnb_examples/muse2016/subject0001/session001/data_2017-09-14-21.25.17.csv
+
+
+
+
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'POz', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=30720
+    Range : 0 ... 30719 =      0.000 ...   119.996 secs
+Ready.
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'POz', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=30732
+    Range : 0 ... 30731 =      0.000 ...   120.043 secs
+Ready.
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'POz', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=30732
+    Range : 0 ... 30731 =      0.000 ...   120.043 secs
+Ready.
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'POz', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=30732
+    Range : 0 ... 30731 =      0.000 ...   120.043 secs
+Ready.
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'POz', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=30732
+    Range : 0 ... 30731 =      0.000 ...   120.043 secs
+Ready.
+['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX', 'stim']
+['TP9', 'AF7', 'AF8', 'TP10', 'POz', 'stim']
+Creating RawArray with float64 data, n_channels=6, n_times=30720
+    Range : 0 ... 30719 =      0.000 ...   119.996 secs
+Ready.
+
+
+
+
+

Epoching

+
# Next, we will chunk (epoch) the data into segments representing the data 100ms before to 800ms after each stimulus.
+# Note: we will not reject epochs here because the amplitude of the SSVEP at POz is so large it is difficult to separate from eye blinks
+
+events = find_events(raw)
+event_id = {'30 Hz': 1, '20 Hz': 2}
+epochs = Epochs(raw, events=events, event_id=event_id,
+                tmin=-0.5, tmax=4, baseline=None, preload=True,
+                verbose=False, picks=[0, 1, 2, 3, 4])
+print('sample drop %: ', (1 - len(epochs.events)/len(events)) * 100)
+
+
+
197 events found on stim channel stim
+Event IDs: [1 2]
+sample drop %:  2.538071065989844
+
+
+
+
+

Decoding

+
# We can use a filter bank approach on the original 4 Muse electrodes (to see how the headband alone without external electrodes could be used to classify SSVEP):
+
+#    - Apply bandpass filters around both stimulation frequencies
+#    - Concatenate bandpass-filtered channels
+#    - Extract epochs (from 1 to 3 s after stimulus onset, to avoid classifying the ERP)
+#    - Apply common classification pipelines
+
+# Bandpass filter the raw data
+muse_raw = raw.drop_channels(['POz'])
+raw_filt_30Hz = muse_raw.copy().filter(25, 35, method='iir')
+raw_filt_20Hz = muse_raw.copy().filter(15, 25, method='iir')
+raw_filt_30Hz.rename_channels(lambda x: x + '_30Hz')
+raw_filt_20Hz.rename_channels(lambda x: x + '_20Hz')
+
+# Concatenate with the bandpass filtered channels
+raw_all = raw_filt_30Hz.add_channels([raw_filt_20Hz],
+                                            force_update_info=True)
+
+# Extract epochs
+events = find_events(raw_all)
+event_id = {'30 Hz': 1, '20 Hz': 2}
+
+epochs_all = Epochs(raw_all, events=events, event_id=event_id,
+                    tmin=1, tmax=3, baseline=None,
+                    reject={'eeg': 100e-6}, preload=True, verbose=False,)
+
+epochs_all.pick_types(eeg=True)
+X = epochs_all.get_data() * 1e6
+times = epochs.times
+y = epochs_all.events[:, -1]
+
+
+
Filtering raw data in 6 contiguous segments
+Setting up band-pass filter from 25 - 35 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 25.00, 35.00 Hz: -6.02, -6.02 dB
+
+Filtering raw data in 6 contiguous segments
+Setting up band-pass filter from 15 - 25 Hz
+
+IIR filter parameters
+---------------------
+Butterworth bandpass zero-phase (two-pass forward and reverse) non-causal filter:
+- Filter order 16 (effective, after forward-backward)
+- Cutoffs at 15.00, 25.00 Hz: -6.02, -6.02 dB
+
+197 events found on stim channel stim_30Hz
+Event IDs: [1 2]
+197 events found on stim channel stim_20Hz
+Event IDs: [1 2]
+NOTE: pick_types() is a legacy function. New code should use inst.pick(...).
+
+
+
+
+

Decoding

+
# Next, we will use 4 different machine learning pipelines to classify the SSVEP based on the data we collected. The
+
+# - CSP + RegLDA : Common Spatial Patterns + Regularized Linear Discriminat Analysis. This is a very common EEG analysis pipeline.
+# - Cov + TS : Covariance + Tangent space mapping. One of the most reliable Riemannian geometry-based pipelines.
+# - Cov + MDM: Covariance + MDM. A very simple, yet effective (for low channel count), Riemannian geometry classifier.
+# - CSP + Cov + TS: Common Spatial Patterns + Covariance + Tangent spacem mapping. Riemannian pipeline with the standard CSP procedure beforehand
+
+# Evaluation is done through cross-validation, with area-under-the-curve (AUC) as metric (AUC is probably the best metric for binary and unbalanced classification problem)
+
+# Note: because we're doing machine learning here, the following cell may take a while to complete
+
+clfs = OrderedDict()
+clfs['CSP + RegLDA'] = make_pipeline(Covariances(), CSP(4), LDA(shrinkage='auto', solver='eigen'))
+clfs['Cov + TS'] = make_pipeline(Covariances(), TangentSpace(), LogisticRegression())
+clfs['Cov + MDM'] = make_pipeline(Covariances(), MDM())
+clfs['CSP + Cov + TS'] = make_pipeline(Covariances(), CSP(4, log=False), TangentSpace(), LogisticRegression())
+
+# define cross validation
+cv = StratifiedShuffleSplit(n_splits=20, test_size=0.25,
+                                        random_state=42)
+
+# run cross validation for each pipeline
+auc = []
+methods = []
+for m in clfs:
+    print(m)
+    try:
+        res = cross_val_score(clfs[m], X, y==2, scoring='roc_auc',cv=cv, n_jobs=-1)
+        auc.extend(res)
+        methods.extend([m]*len(res))
+    except:
+        pass
+
+results = pd.DataFrame(data=auc, columns=['AUC'])
+results['Method'] = methods
+
+fig = plt.figure(figsize=[8,4])
+sns.barplot(data=results, x='AUC', y='Method')
+plt.xlim(0.4, 1)
+sns.despine()
+
+
+02r  ssvep decoding
CSP + RegLDA
+Cov + TS
+Cov + MDM
+CSP + Cov + TS
+
+
+

Total running time of the script: (0 minutes 1.827 seconds)

+ +

Gallery generated by Sphinx-Gallery

+
+
+ + +
+
+ +
+
+
+
+ + + + + + + \ No newline at end of file diff --git a/develop/auto_examples/visual_ssvep/index.html b/develop/auto_examples/visual_ssvep/index.html new file mode 100644 index 00000000..927365be --- /dev/null +++ b/develop/auto_examples/visual_ssvep/index.html @@ -0,0 +1,515 @@ + + + + + + + <no title> — EEG Notebooks 0.0.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+ + +
+
+
+
+ + + + + + + \ No newline at end of file diff --git a/develop/auto_examples/visual_ssvep/sg_execution_times.html b/develop/auto_examples/visual_ssvep/sg_execution_times.html new file mode 100644 index 00000000..70b88153 --- /dev/null +++ b/develop/auto_examples/visual_ssvep/sg_execution_times.html @@ -0,0 +1,528 @@ + + + + + + + Computation times — EEG Notebooks 0.0.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+ +
+

Computation times

+

00:14.538 total execution time for 3 files from auto_examples/visual_ssvep:

+
+ + + + + + + + + + + + + + + + + + + + + + + + + +

Example

Time

Mem (MB)

SSVEP Visualization (01r__ssvep_viz.py)

00:12.710

0.0

SSVEP Decoding (02r__ssvep_decoding.py)

00:01.827

0.0

SSVEP run experiment (00x__ssvep_run_experiment.py)

00:00.000

0.0

+
+
+ + +
+
+
+ +
+ +
+

© Copyright John Griffiths, Jadin Tredup, NeuroTechX, & Contributors.

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + + + + \ No newline at end of file diff --git a/develop/changelog.html b/develop/changelog.html new file mode 100644 index 00000000..d6857932 --- /dev/null +++ b/develop/changelog.html @@ -0,0 +1,513 @@ + + + + + + + Code Changelog — EEG Notebooks 0.0.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+ +
+

Code Changelog

+

This page contains the changelog for eeg-notebooks and any notes on updating between versions.

+
+

0.2.X

+

The 0.2.X series included several major updates in api, backend, and compatibility. +These updates were introduced around and for the OpenBCI-NTX Challenge 2020

+

Updates include:

+
    +
  • Support for OpenBCI EEG devices introduced through brainflow support

  • +
  • Abstracted ‘device’ class introduced to cover both OpenBCI with brainflow and Muse with muse-lsl

  • +
  • Subprocess calls for running concurrent psychopy and muselsl streams put inside functions (not required to be called by user)

  • +
  • New sphinx gallery-based documentation site, built with sphinx and hosted on gh-pages

  • +
  • General cleanup of documentation, installation, and setup instructions

  • +
  • Example datasets removed from repo and placed in separate cloud storage

  • +
  • Dataset downloader functions implemented

  • +
  • Kyle Mathewson’s visual cueing experiment + results added

  • +
+
+
+

0.1.X

+

The 0.1.X series was the initial port of the muse-lsl code, and development of the jupyter notebook-oriented approach. It was developed principally for the 2018/2019 NeuroBRITE and BrainModes programs.

+
+
+ + +
+
+
+ +
+ +
+

© Copyright John Griffiths, Jadin Tredup, NeuroTechX, & Contributors.

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + + + + \ No newline at end of file diff --git a/develop/docs_notes.html b/develop/docs_notes.html new file mode 100644 index 00000000..82e1f062 --- /dev/null +++ b/develop/docs_notes.html @@ -0,0 +1,530 @@ + + + + + + + EEG-ExPy Documentation Developer Notes — EEG Notebooks 0.0.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+ +
+

EEG-ExPy Documentation Developer Notes

+

The documentation page can be found at https://neurotechx.github.io/eeg-expy

+

The documentation source files are a combination of plain-text .rst, .md, and .py files.

+

It is built locally with sphinx, and hosted on the github repo gh-pages branch in the usual fashion

+

There are two main locations for the documentation files:

+
    +
  • The various files and sub-folders in eeg-expy/doc, which contain the webpage contents

  • +
  • The files and sub-folders in eeg-expy/examples, which contains .py scripts, grouped by experiment

  • +
+

This general organization (with doc and examples folders) is widely used by excellent python libraries such as MNE and Nilearn, and we are largely following suit in the organization here.

+

The .py files in examples contain mixtures of python code and .rst-format documentation, which are converted through sphinx-gallery into a set of web-pages with formatted text and code and in-line figures. In addition, sphinx-gallery creates executable .py and .ipynb files for each example page, and adds download links to these at the bottom of each page.

+

The documentation building command actually executes the python code in each of the examples files, and creates figures from them. Errors in the python code lead to incompletely built webpages.

+

( Side-note: The title EEG-Notebooks was originally conceived with the idea of having complete Python-based EEG experiments runnable from a jupyter notebook, with those notebooks being the main contents of the repo. At the user-level, this is still largely the case; but at the development level, we have now switched over from maintaining a set of ipynb source files, to maintaining a set of .py files (in the examples folder), that are converted to .ipynbfiles when thesphinx-gallerydocumentation is compiled. This is much better and sustainable from the point of view of version control, since multiple-user contributions toipynb` files gets very hairy with git )

+
+

Building the doc site

+

The documentation build has only been tested in linux. It may also work on Mac.

+

First: install the docs dependencies in a new or existing python environment +(see requirements-doc.txt)

+

When working on the docs, it is most useful to have 3 terminals open, each with the python environment activated.

+

In terminal 1: edit the source files

+

In terminal 2: build and re-build the docs periodically to inspect changes

+

cd eeg-expy/doc +make html

+

In terminal 3: keep a local http server running to render the docs

+

python -m http.server 8001

+

In browser, navigate to the port used above

+

localhost:8001

+

When you are happy with the changes, commit and push the source files, and run the command that builds documentation and pushes to gh-pages

+

make install

+
+
+

Misc notes

+
    +
  • The doc/index.rst defines the overall site structure and table-of-contents tree

  • +
  • doc/Makefile contains the commands for building documentation. The main two commands are make html (build docs locally) and make install (build docs locally and push to gh-pages branch, updating the website`)

  • +
  • Examples pages can be built individually, rather than re-running the entire doc build process

  • +
  • The current doc build takes approximately 10 minutes

  • +
+
+
+ + +
+
+
+ +
+ +
+

© Copyright John Griffiths, Jadin Tredup, NeuroTechX, & Contributors.

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + + + + \ No newline at end of file diff --git a/develop/experiments/all_examples.html b/develop/experiments/all_examples.html new file mode 100644 index 00000000..68935186 --- /dev/null +++ b/develop/experiments/all_examples.html @@ -0,0 +1,549 @@ + + + + + + + All Notebook Examples — EEG Notebooks 0.0.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+ +
+

All Notebook Examples

+

This section has examples of different functionality available in the module.

+

Examples are organized by topic, and can be explored in any order.

+
+

Cueing Single Subject Analysis

+
Cueing Single Subject Analysis
+
+

Cueing Group Analysis

+
Cueing Group Analysis
+
+

Cueing Behavioural Analysis Winter 2019

+
Cueing Behavioural Analysis Winter 2019
+
+

Cueing Group Analysis Winter 2019

+
Cueing Group Analysis Winter 2019
+
+

N170 run experiment

+
N170 run experiment
+
+

N170 Load and Visualize Data

+
N170 Load and Visualize Data
+
+

N170 Decoding

+
N170 Decoding
+
+

P300 run experiment

+
P300 run experiment
+
+

P300 Load and Visualize Data

+
P300 Load and Visualize Data
+
+

P300 Decoding

+
P300 Decoding
+
+

SSVEP run experiment

+
SSVEP run experiment
+
+

SSVEP Visualization

+
SSVEP Visualization
+
+

SSVEP Decoding

+
SSVEP Decoding
+
+
+ +

Gallery generated by Sphinx-Gallery

+
+ + +
+
+ +
+
+
+
+ + + + + + + \ No newline at end of file diff --git a/develop/experiments/cueing.html b/develop/experiments/cueing.html new file mode 100644 index 00000000..315157ed --- /dev/null +++ b/develop/experiments/cueing.html @@ -0,0 +1,531 @@ + + + + + + + Visual Cueing — EEG Notebooks 0.0.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+ +
+
+

Visual Cueing

+

The visual cueing task can ellicit a number of reliable changes. A central cue indicates the location of an upcoming target onset. Here the task can be changed to be perfectly predictive, or have some level of cue validity. The task is to indicate the orientation of a spatial grating on the target, up for vertical, right for horizontal.

+

The examples below demonstrate the following neural and cognitive empirical phenomena, elicited by the visual cueing task:

+

ERPs - Validly cued targets ellict larger ERPs than invalidly cued targets

+

Response ERPs - Validly cued targets are more quickly identified and better identified

+

Oscillations - Alpha power lateralizes after a spatial cue onset preceeding the upcoming onset of a target. Alpha power becomes smaller contraleral to the target side, and larger ipsilateral with the target.

+

Visual Cueing Experiment Notebook Examples

+
+
orphan:
+

+
+
+

Cueing Single Subject Analysis

+
Cueing Single Subject Analysis
+
+

Cueing Group Analysis

+
Cueing Group Analysis
+
+

Cueing Behavioural Analysis Winter 2019

+
Cueing Behavioural Analysis Winter 2019
+
+

Cueing Group Analysis Winter 2019

+
Cueing Group Analysis Winter 2019
+
+
+ +

Gallery generated by Sphinx-Gallery

+
+ + +
+
+ +
+
+
+
+ + + + + + + \ No newline at end of file diff --git a/develop/experiments/gonogo.html b/develop/experiments/gonogo.html new file mode 100644 index 00000000..e3043c29 --- /dev/null +++ b/develop/experiments/gonogo.html @@ -0,0 +1,509 @@ + + + + + + + Visual Go-No-Go — EEG Notebooks 0.0.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+ +
+
+

Visual Go-No-Go

+

An experiment designed to investigate the event-related potentials that can be detected during a Go-No-Go Task, which measures executive, inhibitory control and sustained attention. The subject is rapidly presented with a sequence of circles and squares and is asked to indicate, by pressing the spacebar, whether a shape is a circle.

+
+
orphan:
+

+
+
+

Gallery generated by Sphinx-Gallery

+
+ + +
+
+ +
+
+
+
+ + + + + + + \ No newline at end of file diff --git a/develop/experiments/vn170.html b/develop/experiments/vn170.html new file mode 100644 index 00000000..8ec5d3c8 --- /dev/null +++ b/develop/experiments/vn170.html @@ -0,0 +1,525 @@ + + + + + + + Visual N170 — EEG Notebooks 0.0.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+ +
+
+

Visual N170

+

The N170 is a large negative event-related potential (ERP) component that occurs after the detection of faces, but not objects, scrambled faces, or other body parts such as hands. The N170 occurs around 170ms after face perception and is most easily detected at lateral posterior electrodes such as T5 and T6 1. Frontal or profile views of human (and animal 2) faces elicit the strongest N170 and the strength of the N170 does not seem to be influenced by how familiar a face is. Thus, although there is no consensus on the specific source of the N170, researchers believe it is related to activity in the fusiform face area, an area of the brain that shows a similar response pattern and is involved in encoding the holistic representation of a face (i.e eyes, nose mouth all arranged in the appropriate way).

+

In this notebook, we will attempt to detect the N170 with the Muse headband using faces and houses as our stimuli. The Muse’s temporal electrodes (TP9 and TP10) are well positioned to detect the N170 and we expect we’ll be able to see an N170 emerge from just a few dozen trials. We will then run several different classification algorithms on our data in order to evaluate the performance of a potential brain-computer interface using the N170.

+

Visual N170 Experiment Notebook Examples:

+
+
orphan:
+

+
+
+

N170 run experiment

+
N170 run experiment
+
+

N170 Load and Visualize Data

+
N170 Load and Visualize Data
+
+

N170 Decoding

+
N170 Decoding
+
+
+ +

Gallery generated by Sphinx-Gallery

+
+ + +
+
+ +
+
+
+
+ + + + + + + \ No newline at end of file diff --git a/develop/experiments/vp300.html b/develop/experiments/vp300.html new file mode 100644 index 00000000..2bc021c1 --- /dev/null +++ b/develop/experiments/vp300.html @@ -0,0 +1,525 @@ + + + + + + + Visual P300 — EEG Notebooks 0.0.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+ +
+
+

Visual P300

+

The P300 is a positive event-related potential (ERP) that occurs around 300ms after perceiving a novel or unexpected stimulus. It is most commonly elicited through ‘oddball’ experimental paradigms, where a certain subtype of stimulus is presented rarely amidst a background of another more common type of stimulus. Interestingly, the P300 is able to be elicited by multiple sensory modalities (e.g. visual, odditory, somatosensory). Thus, it is believed that the P300 may be a signature of higher level cognitive processing such as conscious attention.

+

In this notebook, we will attempt to elicit a P300 with a visual oddball stimulation paradigm using the Muse headband

+

Visual P300 Notebook Examples

+
+
orphan:
+

+
+
+

P300 run experiment

+
P300 run experiment
+
+

P300 Load and Visualize Data

+
P300 Load and Visualize Data
+
+

P300 Decoding

+
P300 Decoding
+
+
+ +

Gallery generated by Sphinx-Gallery

+
+ + +
+
+ +
+
+
+
+ + + + + + + \ No newline at end of file diff --git a/develop/experiments/vssvep.html b/develop/experiments/vssvep.html new file mode 100644 index 00000000..deb76f79 --- /dev/null +++ b/develop/experiments/vssvep.html @@ -0,0 +1,532 @@ + + + + + + + Visual SSVEP — EEG Notebooks 0.0.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+ +
+
+

Visual SSVEP

+

The steady-state visual evoked potential (SSVEP) is a repetitive evoked potential that is naturally produced when viewing stimuli flashing between a range of 6-75hz. Electrical activity at the same frequency as the visual stimulation can be detected in the occipital areas of the brain, likely due to the perceptual recreation of the stimulus in the primary visual cortex.

+

The SSVEP is often used in BCI applications due to its ease of detection and the amount of information that a user can communicate due to the high potential frequency resolution of the SSVEP.

+

In this notebook, we will use the Muse EEG headband with an extra occipital electrode to detect the SSVEP and evaluate it’s use in SSVEP-based BCIs.

+

Extra Electrode

+

Although the SSVEP is detectable at the default temporal electrodes, it can be seen much more clearly directly over the occipital cortex.

+

The Muse 2016 supports the addition of an extra electrode which can be connected through the devices microUSB charging port.

+

Instructions on how to build an extra electrode for Muse +Working with the extra electrode +For this experiment, the extra electrode should be placed at POz, right at the back of the skull. It can be secured in place with a bandana or a hat

+

SSVEP Experiment Notebook Examples

+
+
orphan:
+

+
+
+

SSVEP run experiment

+
SSVEP run experiment
+
+

SSVEP Visualization

+
SSVEP Visualization
+
+

SSVEP Decoding

+
SSVEP Decoding
+
+
+ +

Gallery generated by Sphinx-Gallery

+
+ + +
+
+ +
+
+
+
+ + + + + + + \ No newline at end of file diff --git a/develop/genindex.html b/develop/genindex.html new file mode 100644 index 00000000..d1789bb9 --- /dev/null +++ b/develop/genindex.html @@ -0,0 +1,495 @@ + + + + + + Index — EEG Notebooks 0.0.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+ + +

Index

+ +
+ +
+ + +
+
+
+ +
+ +
+

© Copyright John Griffiths, Jadin Tredup, NeuroTechX, & Contributors.

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + + + + \ No newline at end of file diff --git a/develop/getting_started/analysis.html b/develop/getting_started/analysis.html new file mode 100644 index 00000000..abe1c21c --- /dev/null +++ b/develop/getting_started/analysis.html @@ -0,0 +1,498 @@ + + + + + + + Analyzing data — EEG Notebooks 0.0.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+ +
+

Analyzing data

+

( To add )

+
+ + +
+
+ +
+
+
+
+ + + + + + + \ No newline at end of file diff --git a/develop/getting_started/available_experiments.html b/develop/getting_started/available_experiments.html new file mode 100644 index 00000000..a69177c6 --- /dev/null +++ b/develop/getting_started/available_experiments.html @@ -0,0 +1,544 @@ + + + + + + + Available Experiments — EEG Notebooks 0.0.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+ +
+

Available Experiments

+
+

Visual P300 with Oddball paradigm

+

The visual P300 is a spike that occurs 300ms after perceiving a visual stimulus that has implications on decision making. This was validated in Muse by Alexandre Barachant with the Oddball paradigm, in which low-probability target items (oddballs) are interspersed with high probability non-target items. With AB’s paradigm, the experiment takes about 10 minutes to run (5 x 2 minute trials). Although the Muse’s sensors aren’t in the ideal position for detecting the P300, AB was able to attain “good” accuracy in identifying P300 spikes.

+
+
+

N170

+

The N170 is an ERP specifically related to the perception of faces. This was validated in Muse by Hubert with a 12 minute experiment (6 x 2 minute trials). Stimuli consists of 12 pictures of houses and 12 pictures of faces. Accuracy of N170 detection is rather good.

+
+
+

SSVEP

+

The steady state visual evoked potential is a frequency response produced visual stimulation at specific frequencies. It was validated by Hubert in a 12 minute experiment (6 x 2 minute trials). Stimulation frequencies of 30hz and 20hz were used and an extra electrode at POz was added. Found clear peaks in the PSD at the stimulation frequencies. The peaks were most significant at the extra electrode, which is closest to the primary visual regions, but was detectable at all electrodes and found to have remarkably high accuracy when using a filter bank approach to isolate specific frequencies.

+
+
+

Old experiments

+
+

Go/No-Go

+

An experiment designed to investigate the event-related potentials that can be detected during a Go-No-Go Task, which measures executive, inhibitory control and sustained attention. The subject is rapidly presented with a sequence of circles and squares and is asked to indicate, by pressing the spacebar, whether a shape is a circle.

+
+
+

SSAEP

+

The steady state auditory evoked potential is a frequency response produced when hearing modulating tones of certain frequencies. It was validated in Muse by Hubert, who used 45hz and 40hz amplitude modulation applied to 900 and 770h carrier frequencies. A PSD of the produced EEG signal showed clear spikes, correspondingly, at 45 and 40hz in the temporal electrodes. The N100 and P200 complex was also noticed at the beginning of stimulus onset.

+
+
+

C1 and P1

+

C1 and P1 are two ERPs related to the perception of a visual stimulus. The C1 is the first component, appearing in the 65-90ms range after stimulus onset while the P1 appears later, around 100ms.

+

C1 and P1 were validated in Muse by Hubert with a left/right visual field experiment. Comparing ERPs to left or right-field presentation of visual stimuli revealed a contralateral pattern of C1 and P1 in both the temporal and anterior electrodes. However, their timing seems a little delayed.

+
+
+

Auditory P300

+

Same as the visual P300, but dependent on auditory stimulus. Auditory P300s are normally less distinguishable than visual P300s, but they may be more suited to the Muse since its electrodes are closer to auditory centers (superior temporal cortex).

+
+
+
+

Unvalidated Experiments and other phenomena

+
+

N100 - P200

+

The combination of a negative evoked potential around 100ms after any unpredictable stimulus and a positive potential 200ms after. These were noticed in Hubert’s SSAEP experiment, but not independently classified or tested.

+
+
+

On-task Beta

+

Noticed in Hubert’s visual grating test, but difficult to extract.

+
+
+

Alpha reset

+

A noticeable increase in alpha activity after stimulus presentation ends. Noticed in Hubert’s visual grating test.

+
+
+
+ + +
+
+ +
+
+
+
+ + + + + + + \ No newline at end of file diff --git a/develop/getting_started/data_zipper.html b/develop/getting_started/data_zipper.html new file mode 100644 index 00000000..3aa04e20 --- /dev/null +++ b/develop/getting_started/data_zipper.html @@ -0,0 +1,539 @@ + + + + + + + Data Zipping — EEG Notebooks 0.0.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+ +
+

Data Zipping

+

After you have ran experiments, you can compress all recorded data as a zip folder. The method for doing this is using the command line tool with the flags detailed below.

+
+

Command Line Interface

+

To activate the command line tool, open a command/terminal prompt and enter eegnb runzip followed by the appropriate flag for your desired experiment. Command line tool looks through folders in ~/.eegnb/data for compression. Zip files will be outputted in the format of ~/Desktop with the following filename {experiment_name}_{site}-{day_month_year_hour:minute}_zipped +The possible flags are

+
    +
  • -ex ; –experiment: The experiment to be run

  • +
  • -s ; –site: Subfolder within the experiment

  • +
  • -ip ; –prompt: Bypass the other flags to activate an interactive prompt

  • +
+
+
+

Using the introprompt flag

+

If using the -ip flag the user will be prompted to input the various session parameters. The prompts are detailed below.

+

Experiment Selection

+
Please select which experiment you would like to run: 
+[0] Visual N170
+[1] Visual P300
+[2] Visual SSVEP
+[3] visual-cue (no description)
+[4] visual-codeprose (no description)
+[5] Auditory SSAEP (orig)
+[6] Auditory SSAEP (single freq)
+[7] Auditory oddball (orig)
+[8] Auditory oddball (diaconescu)
+
+Enter Experiment Selection:
+
+
+

This section allows you to select one of the above experiments to run. There are other experiments available, however, they have not yet been updated for the new API to be device agnostic. As they get updated, more experiments will populate this section.

+

Site Selection

+
Please select which experiment subfolder you would like to zip. Default 'local_ntcs'
+
+Current subfolders for experiment visual-N170:
+
+['local','local_ntcs','temp']
+
+Enter folder:
+
+
+

This selection allows you to select the subfolder for the experiment you have previously chosen. The example provided was for sample sites in the visual-N170 folder.

+
+
+ + +
+
+ +
+
+
+
+ + + + + + + \ No newline at end of file diff --git a/develop/getting_started/faq.html b/develop/getting_started/faq.html new file mode 100644 index 00000000..f6d1a43c --- /dev/null +++ b/develop/getting_started/faq.html @@ -0,0 +1,512 @@ + + + + + + + Frequently Asked Questions — EEG Notebooks 0.0.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+ +
+

Frequently Asked Questions

+
+

How do I run an experiment?

+

eegnb runexp -ip

+
+
+

How do I visualize a live stream to check the recording quality

+

eegnb view

+
+
+

I forgot the name of my conda env?

+

You can check your conda environments with conda env list

+
+
+

Where is my data?

+

By default, all recorded data is saved to, e.g.,

+

~/.eegnb/data/visual-N170/local/museS/subject0001/session0001/recording_2020-09-19-03.37.42.csv

+
+
+

How do I find the correct MAC port for my OpenBCI?

+
+
+ + +
+
+
+ +
+ +
+

© Copyright John Griffiths, Jadin Tredup, NeuroTechX, & Contributors.

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + + + + \ No newline at end of file diff --git a/develop/getting_started/installation.html b/develop/getting_started/installation.html new file mode 100644 index 00000000..9a8ab7b1 --- /dev/null +++ b/develop/getting_started/installation.html @@ -0,0 +1,639 @@ + + + + + + + Installation — EEG Notebooks 0.0.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+ +
+

Installation

+

EEG-Notebooks is a Python library. It runs on all major operating systems (Windows, Linux, Mac).

+

If you do not have Python installed, or are not particularly familiar with using it, then we highly recommend downloading and installing the Miniconda(3) Python(3) distribution. For users who prefer to use VirtualEnv (venv) than conda for Python environment management, we provide equivalent installation and usage instructions where relevant.

+

The principal purpose of EEG-Notebooks is to allow users to run and create cognitive neuroscience experiments using consumer-grade EEG systems. A secondary, complementary, purpose of the tool is to provide various functionalities for the organization, analysis and visualization of the data collected in these experiments.

+

As such, there are two principal modes of usage:

+
    +
  1. Making new EEG recordings, and analyzing the newly recorded data

  2. +
  3. Not making EEG recordings, and analyzing existing (either previously recorded or public-domain) data

  4. +
+

As may be expected, the installation and setup steps for mode 2 are simpler than mode 1, with the difference being additional hardware and software requirements for measuring, streaming and recording EEG data. These requirements, which are device- and operating system-specific, are as follows:

+
    +
  • Muse 2016, Muse 2, and Muse S recordings on Windows require the third-party streaming tool BlueMuse. Bluemuse deals with establishing a connection between the eeg device and the stimulus-delivery laptop/desktop.

  • +
  • Muse 2016, Muse 2, and Muse S recordings on Mac require a BLED112 dongle (see below). The BLED112 dongle bypasses the native bluetooth hardware, which is not compatible with muse device streaming.

  • +
  • OpenBCI recordings work the same for all operating systems, and do not require any additional hardware or software.

  • +
  • ‘Usage mode 2’ above (no EEG recordings) can be done easily on any operating system without any extra hardware or software, as well as on free temporary cloud compute servers through Binder and GoogleColab, which we provide instructions for.

  • +
+
+

Installing the Python library

+

We highly recommend making use of a virtual environment (either conda or virtualenv) when installing eeg-notebooks.

+

If you don’t already have a Python installation, grab the latest miniconda version for your operating system from here (https://docs.conda.io/en/latest/miniconda.html).

+

Use the following commands to download the repo, create and activate a conda or virtualenv virtual environment:

+
+
conda create -n "eeg-notebooks" python=3.7 git pip wxpython
+
+conda activate "eeg-notebooks"
+
+git clone https://github.com/NeuroTechX/eeg-notebooks
+
+cd eeg-notebooks
+
+pip install -e .
+
+
+
+

Add the new environment to the jupyter kernel list

+

For some operating systems, it is necessary the following command is necessary in order to make the new eeg-notebooks environment available from the jupyter notebook landing page

+
python -m ipykernel install --user --name eeg-notebooks
+
+
+

Test installation

+

Start a jupyter notebooks session and you will be presented with the eeg-notebooks file structure. You can test the installation by opening a new jupyter notebook and running a cell containing the code below. This will run one session of the Visual N170 with your board of choice.

+
# Imports
+import os
+from eegnb import generate_save_fn
+from eegnb.devices.eeg import EEG
+from eegnb.experiments.visual_n170 import n170
+from eegnb.analysis.utils import load_data
+
+# Define some variables
+board_name = 'muse'
+# board_name = 'cyton'
+experiment = 'visual_n170'
+session = 999
+subject = 999 # a 'very British number'
+record_duration=120
+
+# Initiate EEG device
+eeg_device = EEG(device=board_name)
+
+# Create output filename
+save_fn = generate_save_fn(board_name, experiment, subject)
+
+# Run experiment
+n170.present(duration=record_duration, eeg=eeg_device, save_fn=save_fn)
+
+# Load recorded data
+raw = load_data(subject, session, board_names, experiment)
+
+
+
+
+

MUSE Requirements

+

The InteraXon MUSE streams EEG over bluetooth. There are additional hardware and software requirements for making recordings with MUSE devices, which are different across operating systems.

+
+

MUSE recordings on windows: BlueMuse

+

BlueMuse is a Windows 10 program that allows communication between a Muse headband and a computer’s native bluetooth drivers using the LSL communication protocol. To install, go the the BlueMuse github repo and follow the installation instructions.

+
+
+

MUSE recordings on Mac: BLED112 Dongle

+

Unfortunately, the native bluetooth driver on Mac cannot be used with eeg-notebooks. To run on this operating system, it is necessary to purchase a BLED112 USB Dongle. Note: this is a ‘special’ bluetooth dongle; standard bluetooth dongles will not work.

+
+
+

MUSE recordings on Linux

+

Streaming MUSE data on Linux works without a dongle (which relies on pygatt’s GATT backend), but might be more stable with the BLED112 USB Dongle and BGAPI backend.

+
+
+
+

Issues

+
+

Common Problems

+

Problems with Conda and Jupyter Notebook: +If you have created the conda env but it is not appearing as a kernel option in the jupyter notebook, you may need to manually add the new conda env to the jupyter envs list

+
$ conda activate eeg-notebooks
+$ pip install ipykernel
+$ python -m ipykernel install --user --name eeg-notebooks
+
+
+

In windows, if the above is causing errors, the following commands may help:

+
$ conda install pywin32
+$ conda install jupyter
+$ conda install nb_conda
+$ conda install ipykernel
+
+
+
+
+

Bug reports

+

Please use the Github issue tracker +to file bug reports and/or ask questions about this project. When filing a bug report, please include the follwing information: +* Operating System. +* Device being used. +* Any error messages generated.

+
+
+
+ + +
+
+ +
+
+
+
+ + + + + + + \ No newline at end of file diff --git a/develop/getting_started/loading_and_saving.html b/develop/getting_started/loading_and_saving.html new file mode 100644 index 00000000..2a5ccf89 --- /dev/null +++ b/develop/getting_started/loading_and_saving.html @@ -0,0 +1,572 @@ + + + + + + + Loading and Saving Data — EEG Notebooks 0.0.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+ +
+

Loading and Saving Data

+

Knowing where the data is saved is integral to the functionality of EEG Notebooks. EEG Notebooks saves data to a default location in a hidden directory. From this directory, the individual files can be found based on a folder structure outlined below in the naming convention.

+
+

Locating the Default Data Directory

+
+

Windows 10

+

The default directory is found at the location C:\Users\*USER_NAME*\.eegnb an example of which is pictured below. +fig

+
+
+

Linux

+
+
+

MacOS

+
+
+
+

Changing the Default Data Directory

+

The default directory for saving data is automatically set within the library. If you want to save and analyze data to/from a new directory, it must be passed as a parameter to both the eegnb.generate_save_fn() and eegnb.analysis.load_data() functions.

+

Saving to new directory:

+
from eegnb import generate_save_fn
+from eegnb.experiments.visual_n170 import n170
+
+# Define session parameters
+board = 'cyton'
+experiment = 'visual-N170
+subject = 1
+session = 1
+
+# Define new directory and generate save filename
+new_dir = 'C:/Users/Jadin/Documents/EEG_Notebooks_Data'
+save_fn = generate_save_fn(board, experiment, subject, session, new_dir)
+
+# Continue to run experiment as normal...
+
+
+

Loading from new directory:

+
from eegnb.analysis.utils import load_data
+
+# Define parameters for session you want to load
+board = 'cyton'
+experiment = 'visual-N170
+subject = 1
+session = 1
+
+# Define new directory
+new_dir = 'C:/Users/Jadin/Documents/EEG_Notebooks_Data'
+
+# Load data
+raw = load_data(
+            subject_id = subject,
+            session_nb = session,
+            device_name = board,
+            experiment = experiment,
+            data_dir = new_dir
+        ) 
+
+
+
+
+

Naming Convention

+

From the specified data directory, EEG notebooks then follows a specific set of naming conventions to define subdirectories and save the data. The full path ends up taking the form

+
DATA_DIR\experiment\site\device\subject#\session#\file_name.csv
+
+
+

Each field is explained below:

+

Experiment: This part is the name of the experiment being run. Example names of experiments as they appear in the example datasets are shown below.

+
visual-N170
+visual-P300
+visual-SSVEP
+
+
+

Site: The site refers to the recording location, or generally the machine it was recorded to. If you are saving and analyzing only your own data on your local machine, you do not need to specify your site name as it will default to ‘local’. When loading example datasets however, it is necessary to specify from which site you would like to load data.

+

Device: The name of the device being recorded from.

+

Subject #: When entering subject ID as a parameter, you only need to specify the integer value. The integer will be formatted to subjectXXXX where “XXXX” is a four-digit representation of the integer ID#.

+

Session #: A session in this case would be the full period of time which you have the device on and are taking multiple recordings. For example: if you put the headset on and take five recordings, all five of these recording would belong to session number 1. Once you take a break from consecutive recordings, then this would constitute a new session. Just like the subject ID, this value is passed as an integer and gets converted to a read-able format.

+

File name: The file name is automatically generated in the format recording_date_time.csv

+
+

Examples

+
+
+
+ + +
+
+
+ +
+ +
+

© Copyright John Griffiths, Jadin Tredup, NeuroTechX, & Contributors.

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + + + + \ No newline at end of file diff --git a/develop/getting_started/running_experiments.html b/develop/getting_started/running_experiments.html new file mode 100644 index 00000000..f9671954 --- /dev/null +++ b/develop/getting_started/running_experiments.html @@ -0,0 +1,690 @@ + + + + + + + Running Experiments — EEG Notebooks 0.0.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+ +
+

Running Experiments

+

After you have installed the library there are two methods for collecting data. The first is using the command line tool with the flags detailed below. This is the easiest and recommended for users without much programming experience. The other method involves calling the experiment presentations from within a jupyter notebook (or custom python script).

+
+

Command Line Interface

+

To activate the command line tool, open a command/terminal prompt and enter eegnb runexp followed by the appropriate flags for your device, desired experiment, and more. The possible flags are

+
    +
  • -ed ; –eegdevice: The device being used to record data. Each device has a specific string to be passed which can be seen on the Initiating an EEG Stream under the EEG.device parameter for the respective device.

  • +
  • -ex ; –experiment: The experiment to be run

  • +
  • -ma ; –macaddr: The MAC address of device to use (applicable devices e.g ganglion)

  • +
  • -rd ; –recdur: Duration of recording (in seconds).

  • +
  • -of ; –outfname: Save file name (this will be automatically generated to match default file system if left blank).

  • +
  • -ip ; –prompt: Bypass the other flags to activate an interactive prompt.

  • +
+
+

Using the introprompt flag

+

If using the -ip flag the user will be prompted to input the various session parameters. The prompts are detailed below.

+
eegnb runexp -ip
+
+
+
+

Board Selection

+
Welcome to NeurotechX EEG Notebooks. 
+Please enter the integer value corresponding to your EEG device: 
+[0] None 
+[1] Muse2016 
+[2] Muse2 
+[3] OpenBCI Ganglion 
+[4] OpenBCI Cyton 
+[5] OpenBCI Cyton + Daisy 
+[6] G.Tec Unicorn 
+[7] BrainBit 
+[8] Notion 1 
+[9] Notion 2 
+[10] Synthetic 
+
+Enter Board Selection:
+
+
+

Here you specify which of the supported boards you are using to collect data. EEG Notebooks supports a number of different consumer boards with various backends. This step ensures that the proper backend and device parameters are used. If you are using an OpenBCI board there will be an additional prompt asking for the desired connection method. Right now it is recommended to use the USB dongle over the wifi shield.

+
+
+

Experiment Selection

+
Please select which experiment you would like to run: 
+[0] Visual N170
+[1] Visual P300
+[2] Visual SSVEP
+[3] visual-cue (no description)
+[4] visual-codeprose (no description)
+[5] Auditory SSAEP (orig)
+[6] Auditory SSAEP (single freq)
+[7] Auditory oddball (orig)
+[8] Auditory oddball (diaconescu)
+
+Enter Experiment Selection:
+
+
+

This section allows you to select one of the above experiments to run. There are other experiments available, however, they have not yet been updated for the new API to be device agnostic. As they get updated, more experiments will populate this section.

+
+
+

Recording Duration

+
Now, enter the duration of the recording (in seconds). 
+
+Enter duration:
+
+
+

This is the duration of each recording. It is standard to use 120 second (2 minute) long recordings per recording, but some people might experience visual fatigue and difficulty not blinking for as long, so you are welcome to adjust length as needed.

+
+
+

Subject ID

+
Next, enter the ID# of the subject you are recording data from. 
+
+Enter subject ID#:
+
+
+
+
+

Session Number

+
Next, enter the session number you are recording for. 
+
+Enter session #:
+
+
+

The session number corresponds to each time you sit down to take multiple recordings. If you put your device on and run this script 5 consecutive times you would use the same session number every time. However, if you were to take a break then return for an additional 3 recordings, the last 3 would have a new session number. For more information about how this corresponds to saving data please see the documentation page on loading and saving data.

+

If you are using OpenBCI on Windows/MacOS you will be given an additional prompt to enter the name of the serial port the USB dongle is using. For instructions on how to use the OpenBCI GUI to find the serial port see Initiating an EEG Stream.

+
+
+
+
+

Using Jupyter Notebooks or a custom script

+

The first step is to import all of the necessary library dependencies. These are necessary for generating a save file name which conforms to the default folder structure, streaming and recording EEG data, and running the stimulus presentation.

+
from eegnb import generate_save_fn
+from eegnb.devices.eeg import EEG
+from eegnb.experiments.visual_n170 import n170
+
+
+

Next we need to define session parameters which are otherwise handled via input prompts in the run run_notebooks.py script. After we define the session parameters we will pass them to the file name generator.

+
board_name = 'cyton'
+experiment = 'visual_n170'
+session = 1
+subject = 1
+record_duration = 120
+
+# Create output filename
+save_fn = generate_save_fn(board_name, experiment, subject, session)
+
+
+

Next it is necessary to call the eegnb.devices.eeg.EEG class which handles all of the backend processes related to each device.

+
eeg_device = EEG(device=board_name)
+
+
+

Finally, we call the present method of the class corresponding to our desired experiment, in this case the visual N170. We pass both the EEG device and generated save file name in order to collect and save data. The presentation can also be run without an EEG device/save file for testing and debugging.

+
n170.present(duration=record_duration, eeg=eeg_device, save_fn=save_fn)
+
+
+

All together the example script looks like

+
# Imports
+from eegnb import generate_save_fn
+from eegnb.devices.eeg import EEG
+from eegnb.experiments.visual_n170 import n170
+
+# Define some variables
+board_name = 'cyton'
+experiment = 'visual_n170'
+session = 1
+subject = 1
+record_duration = 120
+
+# Create output filename
+save_fn = generate_save_fn(board_name, experiment, subject, session)
+
+# Setup EEG device
+eeg_device = EEG(device=board_name)
+
+# Run stimulus presentation
+n170.present(duration=record_duration, eeg=eeg_device, save_fn=save_fn)
+
+
+
+
+

Using virtual reality

+
+

Heads up display

+

A heads-up display can be used for presenting experiments in a similar way to a monitor, without much modification.

+
+

Features to be added in future releases:

+
    +
  • Controller input

  • +
  • Controller haptic feedback

  • +
+
+
+
+

Prerequisites:

+
    +
  • Oculus Rift compatible VR headset, e.g. Oculus Rift or Meta Quest series.

  • +
  • Native Windows installation with meta link compatible video card.

  • +
  • EEG device, e.g. OpenBCI Cyton or Muse

  • +
+

If an experiment has the use_vr argument in its present method, it can have its stimulus presented to a subject’s VR headset. +The N170 experiment for example, can have its stimulus displayed on the VR headset with a simple modification to the ‘use_vr’ argument, when presenting an experiment:

+
# Run stimulus presentation with VR enabled.
+n170.present(duration=record_duration, eeg=eeg_device, save_fn=save_fn, use_vr=True)
+
+
+
+
+

+
+
+

Steps for running experiment in VR

+
    +
  1. Launch the Oculus app on the Windows computer and the IDE or CLI to be used for running the experiment.

  2. +
  3. Turn on the VR headset and put it on your head to make sure it is on and active, then take it off.

  4. +
  5. Go to the ‘Devices’ view in the Oculus app, it will show the headset as connected and active, along with any inactive or connected controllers.

  6. +
  7. Go to the ‘Settings’ view, under the ‘Beta’ title, enable ‘Pass through over Oculus Link’, double tapping the headset later with a fingertip will activate passthrough.

  8. +
  9. Put the VR headset onto the head, activate passthrough to help with wearing the eeg device.

  10. +
  11. Place the EEG device on top of the head.

  12. +
  13. Ensure the electrodes are touching the scalp ok and not being blocked by the headset strap.

  14. +
  15. From inside the VR headset’s ‘quick settings’ dashboard, select ‘Quest Link’ and connect to the Oculus server running on windows, via air link or link cable.

  16. +
  17. Once the Oculus menu has finished loading on the VR headset, open the built-in Oculus desktop app by using the touch controllers or gamepad.

  18. +
  19. Try opening an eeg device raw data viwer and verify that the electrodes are receiving a good signal without too much noise, eg ‘OpenBCI GUI’.

  20. +
  21. Run the EEG-ExPy experiment from the command line or IDE, it should load and take control from the Oculus desktop app.

  22. +
  23. Follow the experiment instructions, and press a key if necessary to begin the experiment and collect valid data.

  24. +
+
+
+

Other experiments can have VR added too.

+
    +
  1. Load/prepare stimulus in the same function as previously (def load_stimulus(self))

  2. +
  3. Present stimulus in the same function as previously(def present_stimulus(self, current_trial: int))

  4. +
  5. VR can be enabled for the experiment as part of the initializer to the base Experiment class, by default it is not enabled(use_vr=False) and will function the same as previously before VR functionality was added.

  6. +
+
+
+
+ + +
+
+ +
+
+
+
+ + + + + + + \ No newline at end of file diff --git a/develop/getting_started/streaming.html b/develop/getting_started/streaming.html new file mode 100644 index 00000000..9d5ff67c --- /dev/null +++ b/develop/getting_started/streaming.html @@ -0,0 +1,616 @@ + + + + + + + Initiating an EEG Stream — EEG Notebooks 0.0.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+ +
+

Initiating an EEG Stream

+

Before getting going with running an experiment, it is important to first verify that a connection between your computer and EEG device has been successfully established, and the raw EEG data is being streamed and recorded properly.

+

The exact steps for this varies with the device (MUSE, OpenBCI, others) and operating system (Windows, Mac, Linux) used. When using these instructions, you should make sure you are consulting the section appropriate for your combination of device and OS.

+

Initiating an EEG stream is a relatively easy process using the eegnb.devices.eeg.EEG class which abstracts the +the various devices and backends behind one easy call.

+
from eegnb.devices.eeg import EEG
+
+# define the name for the board you are using and call the EEG object
+eeg = EEG(device='cyton')
+
+# start the stream
+eeg.start()
+
+
+

These two lines of code abstract a lot of the heavy lifting with respect to switching streaming backends for the various support devices.

+
+

Supported Devices

+

Below is a list of supported devices and the information needed to connect to each when running the library. Each section also provides common troubleshooting tips for each. If you encounter any errors when connecting which are not listed below please report these on the issues page.

+
+

Interaxon Muse

+

Device Names: ‘muse2016’, ‘muse2’, and ‘museS’ +Backend: MuseLSL +Needed Parameters: No other parameters are needed, however, if running on Windows 10, then you must also start blue muse before running the experiments.

+
+

Using the Muse on Windows

+

To initialize the EEG stream on window you must have Bluemuse running in the background. Open a terminal and start +bluemuse using start bluemuse; which should open up a GUI. If you have the USB dongle plugged in and the muse turned on +then you should see a GUI which looks something like the image below.

+

fig

+

Once you press the Start Streaming button, muse will be streaming data in the background and can the above code can +be run to begin the notebooks interfacing with the bluemuse backend.

+
+
+
+

OpenBCI Ganglion

+

fig

+

Device Name: ‘ganglion’ or ‘ganglion_wifi’ with WiFi Shield +Backend: Brainflow +Needed Parameters:

+
    +
  • mac_addr: MAC Address (see below for instructions on getting the MAC address)

  • +
+

Optional Parameters:

+
    +
  • serial_port: Serial port containing the USB dongle. If it does not automatically discover the USB port see the instructions below for finding the serial port in the OpenBCI GUI.

  • +
+

Finding the Ganglion’s MAC address

+

(Information needed)

+
+
+

OpenBCI Cyton

+

fig

+

Device Name: ‘cyton’ or ‘cyton_wifi’ with WiFi Shield +Backend: Brainflow +Needed Parameters: +Optional Parameters:

+
+
+

OpenBCI Cyton + Daisy

+

fig +Device Name: ‘cyton_daisy’ or ‘cyton_daisy_wifi’ with WiFi Shield +Backend: Brainflow +Needed Parameters: +Optional Parameters:

+
+
+

Neurosity Notion (versions 1 and 2)

+

fig +Device Name: ‘notion1’ or ‘notion2’ +Backend: Brainflow +Needed Parameters: No additional parameters are needed to connect to the Notion. It is necessary however to make sure the Notion is on the same network and readable by Neurosity’s developer console.

+
+

Connecting on Windows

+

In order to connect to the Notion on Windows you must first turn off your network firewall for the Open Sound Control (OSC) protocol to function for the notion.

+
+
+
+

BrainBit EEG Headband

+

fig +Device Name: ‘brainbit’ +Backend: Brainflow

+
+
+

G.Tec Unicorn

+

fig +Device Name: ‘unicorn’ +Backend: Brainflow

+
+
+
+

Initiating a Muse stream in Windows using Bluemuse

+

To initialize the EEG stream on window you must have Bluemuse running in the background. Open a terminal and start +bluemuse using start bluemuse; which should open up a GUI. If you have the USB dongle plugged in and the muse turned on +then you should see a GUI which looks something like the image below.

+

fig

+

Once you press the Start Streaming button, muse will be streaming data in the background and can the above code can +be run to begin the notebooks interfacing with the bluemuse backend.

+
+
+

Finding the USB port of the OpenBCI USB dongle

+

If the library is not connecting to an OpenBCI device this might be an issue of defaulting to the wrong serial +port. If this is happening you can check the serial port of the dongle by opening the OpenBCI GUI and navigating to the +menu pictures below.

+

fig

+

Now that we have the COM port, we can initiate the stream by passing it to the EEG device in the object call.

+
from eegnb.devices.eeg import EEG
+
+# define the name for the board you are using and call the EEG object
+eeg = EEG(
+    device='cyton',
+    serial_port='COM7'
+)
+
+# start the stream
+eeg.start()
+
+
+

This issue is more common on windows installations, and the image above is shown on a windows OS. However it might still +be possible for it to happen in Linux and in any case, the process for determining the USB port of the dongle is the same.

+
+
+ + +
+
+ +
+
+
+
+ + + + + + + \ No newline at end of file diff --git a/develop/index.html b/develop/index.html new file mode 100644 index 00000000..2738f59c --- /dev/null +++ b/develop/index.html @@ -0,0 +1,689 @@ + + + + + + + EEG-ExPy — EEG Notebooks 0.0.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+ +
+

EEG-ExPy

+

Democratizing the cognitive neuroscience experiment

+

badge_test badge_binder

+doc/img/EEG-ExPy_Logo.png +

EEG-ExPy is a collection of classic EEG experiments, implemented in Python. The experimental protocols and analyses are quite generic, but are primarily taylored for low-budget / consumer EEG hardware such as the InteraXon MUSE and OpenBCI Cyton. The goal is to make cognitive neuroscience and neurotechnology more accessible, affordable, and scalable.

+ +
+

Note: eeg-expy was previously known as eeg-notebooks. Before the renaming, eeg-notebooks also underwent major changes to the API in v0.2. The old v0.2 version, before the name change, is still available if you need it, in this repo. The even older v0.1 is also still available if needed here.

+
+
+

Overview

+

Conventional lab-based EEG research typically uses research-grade (often high-density) EEG devices, dedicated stimulus delivery software and hardware, and dedicated technicians responsible for operating this equipment. The price tag for these items can easily extend into hundreds of thousands of dollars, which naturally places major limits on their acquisition and usage.

+

In recent years, however, developments in hardware and software technologies are making it possible for many classic EEG cognitive neuroscience experiments to be conducted using a standard laptop/personal computer and a relatively cheap consumer-grade EEG device, with a combined minimum cost of less than 1000 dollars. This opens dramatic new possibilities for neurotechnology and cognitive neuroscience education (at both University and High School levels), as well as more ambitious and larger-scale research and clinical applications using large numbers of devices, and/or in more naturalistic (i.e. out-of-the-lab) settings. We like to think of this as contributing to the democratization of the cognitive neuroscience experiment.

+

The core aim of the EEG-Notebooks project is to provide the critical ‘glue’ that pulls together the various enabling technologies necessary for running these experiments and analyzing the data. This includes functionality for

+
    +
  • streaming data from various relatively new wireless consumer-grade EEG devices

  • +
  • visual and auditory stimulus presentation, concurrent with and time-locked to the EEG recordings

  • +
  • a growing library of well-documented, ready-to-use, and ready-to-modify experiments

  • +
  • signal processing, statistical, and machine learning data analysis functionalities

  • +
+

A real one-stop-shop!

+

For more discussion on these social/scientific/technological contexts and trajectories, a) feel free to get in touch directly (see #Contact info below) and b) keep an eye out for the forthcoming eeg-expy research paper.

+
+
+

Documentation

+

The current version of eeg-expy is the 0.2.X series. The code-base and API are under major development and subject to change.

+

Check the changelog for notes on changes from previous versions.

+

Installation instructions, steps for getting started, common troubleshooting solutions and more can be found in the documentation for eeg-expy, available on the +documentation site.

+
+
+

Acknowledgments

+

EEG-Notebooks was created by the NeurotechX hacker/developer/neuroscience community. The ininitial idea and majority of the groundwork was due to Alexandre Barachant - including the muse-lsl library, which is core dependency. Lead developer on the project is now John Griffiths .

+

Key contributors include: Alexandre Barachant, Hubert Banville, Dano Morrison, Ben Shapiro, John Griffiths, Amanda Easson, Kyle Mathewson, Jadin Tredup, Erik Bjäreholt.

+

Thanks also to Andrey Parfenov for the excellent brainflow library, which has allowed us to dramatically expand the range of supporte devices; as well as the developers of PsychoPy and MNE, which together make up the central scaffolding of eeg-expy.

+
+
+

Contribute

+

This project welcomes and encourages contributions from the community!

+

If you have an idea of something to add to eeg-expy, please start by opening an +issue.

+
+
+

Contact

+

The best place for general discussion on eeg-expy functionality is the issues page. For more general questions and discussions, you can e-mail john.griffiths@utoronto.ca, or ping us on the NeuroTechX Discord or NeuroTechX slack.

+doc/img/eeg-notebooks_democratizing_the_cogneuro_experiment.png + + + +
+
+ + +
+
+
+ +
+ +
+

© Copyright John Griffiths, Jadin Tredup, NeuroTechX, & Contributors.

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + + + + \ No newline at end of file diff --git a/develop/misc/about_the_docs.html b/develop/misc/about_the_docs.html new file mode 100644 index 00000000..b40d6a45 --- /dev/null +++ b/develop/misc/about_the_docs.html @@ -0,0 +1,505 @@ + + + + + + + About the EEG-ExPy Documentation Pages — EEG Notebooks 0.0.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+ +
+

About the EEG-ExPy Documentation Pages

+

A few comments on how these are put together:

+

The documentation pages are written for the following readthedocs sphinx-based setup:

+
    +
  • readthedocs auto-generates the documentation, using various configuration files

  • +
  • In particular, we use Nbsphinx to create html pages directly. +from a combination of jupyter notebooks, .rst files, and .md files

  • +
  • Because the files are not located under the docs folder, we additionally need to make use of nbsphinx-link

  • +
+
+ + +
+
+ +
+
+
+
+ + + + + + + \ No newline at end of file diff --git a/develop/misc/muse_info.html b/develop/misc/muse_info.html new file mode 100644 index 00000000..66d5a2fc --- /dev/null +++ b/develop/misc/muse_info.html @@ -0,0 +1,525 @@ + + + + + + + Technical Information about the MUSE — EEG Notebooks 0.0.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+ +
+

Technical Information about the MUSE

+
+

MUSE setup and usage

+

There is a lot of excellent information on MUSE setup and usage on the website of the Krigolson lab website at the University of Victoria, BC.

+

The following instructional videos are particularly worth checking out:

+
+

Introduction to the MUSE

+

 An Introduction to the MUSE

+
+
+

Headband fit and signal quality tutorial

+

 Headband fit and signal quality tutorial

+
+
+

Adjusting and fitting the MUSE for better signal quality

+

 Adjusting and fitting the MUSE for better signal quality

+
+
+

Using water for better signal quality

+

 Using water for better signal quality

+
+
+ +
+ + +
+
+ +
+
+
+
+ + + + + + + \ No newline at end of file diff --git a/develop/misc/ntcs_phase1_instructions.html b/develop/misc/ntcs_phase1_instructions.html new file mode 100644 index 00000000..19a38d62 --- /dev/null +++ b/develop/misc/ntcs_phase1_instructions.html @@ -0,0 +1,732 @@ + + + + + + + NTCS Phase 1 Instructions — EEG Notebooks 0.0.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+ +
+

NTCS Phase 1 Instructions

+

Welcome to the NeuroTech Challenge Series (NTCS), Phase 1 (P1) instructions page.

+

NTCSP1 is an international citizen science research study, run by researchers at the University of Toronto and the CAMH KCNI, in collaboration with OpenBCI, NeuroTechX, and the eeg-expy core developer team.

+

If this is the first time you are learning about NTCS, check out the NTCS website for additional info about this and other upcoming challenges and opportunities.

+

If you are ready to get going with NTCSP1, have been directed here by other comms from us, or would simply like to browse through to getter a better idea of what’s involved, you are in the right place.

+

These instructions, and NTCS in general, also double up as an excellent practical introduction to the eeg-expy software.

+

Have fun and thank you for joining on NTCSP1!

+
+

On this page

+

This page is your reference location for how to participate in NTCSP1.

+

We have tried to keep it fairly succinct, whilst also including all the key info. At some points there is relevant documentation in other locations which does not make sense to duplicate here (e.g. installation of third-party software such as Miniconda or Bluemuse), in which cases we provide links. There are also links to other additional information at various points that is informative but not essential for you to consult.

+

Contents:

+ +
+
+

Instructional Videos

+

We recommend that before you get going on the steps listed below, you watch the instructional videos.

+

Video 1 is a short general overview and big picture. You should watch this fully first before continuing.

+

Videos 2-5 include live demos and discussions showing you the specific steps you need to follow, and what to expect when you run them. For these, it is probably best to go back and forth a bit between the instructions text below and stepping through sections carefully, as needed.

+

Video 1: About NTCS
 About NTCS

+

Video 2: Docs and getting started
 Docs and getting started

+

Video 3: Connecting your EEG device and checking signal quality
 Connecting your EEG device and checking signal quality

+

Video 4: Running the N170 experiment, reviewing and uploading your data.
 Running the N170 experiment, reviewing and uploading your data

+

Additionally, you might be interested to check out JG’s introductory lecture on eeg-notebooks / eeg-expy . This is not mandatory however for NTCSP1.

+

 Intro to EEG Notebooks

+
+
+

Summary of the steps

+

The end-to-end sequence of steps for participating in NTCS are as follows:

+
    +
  • Register for the study by completing the informed consent form.

  • +
  • Check your e-mail. This will give you a personalized link for uploading your recorded data. It will also give you these instructions.

  • +
  • Install miniconda.

  • +
  • Set up your miniconda environment and install eeg-expy.

  • +
  • Setup your eeg-device (See specific instructions for OpenBCI, Muse, Neurosity, Gtec devices).

  • +
  • Run the experiment using the script prompts

  • +
  • Locate the recorded data on your local machine and run the data zipper command

  • +
  • Upload your data to our secure servers via the personalized link from your first e-mail.

  • +
+
+
+

TL;DR instructions version

+
    +
  1. Start anaconda and activate environment

  2. +
+

conda activate ntcs

+
    +
  1. Run signal quality checker

  2. +
+

eegnb checksigqual -ed cyton

+
    +
  1. Run N170 experiment

  2. +
+

eegnb runexp -ip

+

Specify 5 minutes (300 seconds) run duration

+
    +
  1. Repeat step 3 ten times, with (up to) 2 minute breaks in between.

  2. +
  3. Zip data and upload to website via your personalized link.

  4. +
+
+
+

Full step-by-step instructions

+ +
+

2. Check your e-mail.

+

Check your inbox for the email address you provided in the consent form. This email includes instructions to run the experiment along with a unique link for you to upload your data. This email is unique to you. Please do not share it with anyone. You can use this link an infinite number of times to upload your data. Aside from the link, the rest of the instructions are the same for everyone and they are denoted below in this page for your reference as well.

+

When you click on the unique uploader link, you will be directed to a small form where you can upload your .zip file generated by eeg-expy securely to the CAMH servers. The data will be anonymized for further use and access. In this page, you will also answer a few anonymous questions regarding age, sex, and background neurological conditions. If you would like to upload another file, go back to your inbox and click on the link again, and rinse and repeat!

+
+
+

3. Install miniconda

+

Miniconda makes it easy to manage multiple different Python installations on the same machine. While using miniconda environments is not required, we strongly recommend it.

+

Get the latest version of minconda: https://docs.conda.io/en/latest/miniconda.html

+

Installation instructions: https://conda.io/projects/conda/en/latest/user-guide/install/index.html

+
+
+

4. Set up environment and install eeg-expy

+

Follow the instructions on this page of the eeg-expy docs +https://neurotechx.github.io/eeg-expy/getting_started/installation.html

+
conda create -n "ntcs" python=3.7 git pip wxpython
+
+conda activate ntcs
+
+git clone https://github.com/NeuroTechX/eeg-expy
+
+cd eeg-expy
+
+pip install -e .
+
+
+
+
+

5. Set up your EEG Device

+

First, some general notes:

+ +
+

5.1 Windows+Muse device users

+

EEG-ExPy supports two general options for streaming data from muse devices in windows: Brainflow and Bluemuse.

+

Whilst BlueMuse is an excellent tool, the preferred option is Brainflow, as this is run directly within Python without having to manage an additional streaming tool.

+

However, we have found that Brainflow does not work on all Windows machines, particularly in some Windows 11 computers.

+

So you may need to take a trial-and-error approach here.

+

Ideally the default option, Brainflow (with native bluetooth), will work well for you immediately, and there will be no need to read further on this topic.

+

The eeg-expy codes for the 3 currently available muse models are muse2016, muse2, and museS.

+

For each of these, there is the option to strean on either muselsl (via BlueMuse), Brainflow with native bluetooth, or Brainflow with BLED dongle bluetooth. For a muse 2 device, the codes for these three streaming options would be muse2_bfn, muse2_bfb, muse2.

+

How to check the brainflow is working ok with the muse

+

The easiest way to check if default native-bluetooth Brainflow connection is working on your machine is the following:

+
    +
  • Complete the installation instructions above

  • +
  • Try running the signal quality checker (see below). eegnb checksigqual -ed muse2_bfn

  • +
  • If the flashing lights have gone solid on your device, and the command line signal checker has launched, then you’re in good shape!

  • +
+

BlueMuse

+

Grab the latest BlueMuse version and follow the installation instructions.

+
+
+

5.2 OpenBCI Devices

+

EEG Notebooks supports the OpenBCI Cyton (8 channel) and Ganglion (4 channel) boards. +OpenBCI Getting Started Guides:

+ +

Important - Make sure to check that your FTDI driver is updates and configured properly: +https://docs.openbci.com/Troubleshooting/TroubleshootingLanding/

+

It’s important to set up your electrodes so that they are mapped to the correct 10-20 locations. See table below

+

Electrode Table.

+

OpenBCI Headset Guides:

+ +
+
+
+

6. Run the visual N170 experiment

+

First, activate your conda environment

+

conda activate ntcs

+

Turn on your EEG device, and put it on your head.

+
+

6.1 Check your signal quality

+

Before starting data collection, you need to first verify that your readings have acceptable signal quality.

+

eeg-expy has a simple command-line program for this purpose. Launch it with

+

eegnb checksigqual -ed mydevice

+

(ed here stands for ‘eeg device’)

+

The signal quality checker performs a signal quality check every 5 seconds, and displays results on the command line.

+

It will repeat this 10 times by default. If the signal is evaluated as good for all channels on the device for two runs in row, the program will automatically abort, and you are ready to move on to data collection.

+

Once you launch the signal quality checker, you should do what you can to achieve good signal on all channels. First and foremost this means keeping still, breathing gently, and keeping eye movements and blinks to a minimum.

+

After you have passed the initial signal quality check, you can move on to the experiment.

+

You should repeat the signal quality checker a few times throughout the data collection - especially if you need to adjust the EEG position substantially for some reason. It is not necessary to re-run the signal quality check before all 10 recordings, however.

+
+
+

6.2 Run the experiment

+

Launch the run experiment “interactive prompt” with the command:

+

eegnb runexp -ip

+

and follow the prompts for your specific hardware. When it asks which experiment to run, select “Visual N170”

+
+
6.3.1 Get ready
+

The NTCS Phase 1 experiment is lasts approximately 1 hour.

+

When you are ready to begin proper data collection, you should ensure that you have sufficient time to complete (best to budget 1.5-2 hours), that you will not be interrupted during this time, and that you will be able to focus on the visual images presented on the screen with no distractions from your local environment. A small, quiet room such as an office or bedroom is ideal.

+
+
+
Experiment details
+

The experiment consists of 10 short (5 minute) blocks, separated by short (~2 minute) rest periods. Each block consists of several hundred trials, each a few seconds long. On each trial a grayscale visual image or either a face or a house is quickly shown on the screen and then removed. The the visual system responds differentially to these type of visual stimuli, in ways that we know are measurable with EEG, and the time course of this differential response (sometimes called a ‘difference wave’) is what we are particularly interested in.

+

After each block, there is a 2 minute rest. Use this time to take a breather and refresh your eyes and get ready for the next block.

+
+
+
What to do
+

You are responsible for the timing of your rest periods and for initiating the next block on time after 2 minutes.

+

To initiate a block, you will use the interactive command line prompt, where you should enter your device and subject information.

+

After the prompt questions are completed, the command line signal quality checker will be automatically launched. Take this opportunity if needed to adjust your device location on the head to maximize signal quality.

+

The signal quality checker utility will cease when there are two successful ‘good signal’ evaluations, at which point you are good to go, and the visual images will begin to appear on screen.

+
+
+
+
+

7. After recording, locate and zip your recorded data

+
+

7.1 Take a look at what’s there

+

When you have completed your recording session (or at any other time), you can check what data files you have recorded with eeg-expy using the following convenient command line utility:

+

eegnb checkdirs

+

This will print out to the command line a list of the folders and files present in the default storage location. This list includes two principal types: the demo data, and data you have recorded (`local’ data). Note that any other data saved at non-default locations that you have specified yourself may not be included in this list.

+

The default location is ~/.eegnb/data, file naming convention of {experiment_name}_{site}-{day_month_year_hour:minute}.csv - such as C:\Users\eeglab\.eegnb\data\visual-N170\local\museS_bfn\subject0001\session001\recording_2021-12-24-05.25.25.csv.

+
+
+

7.2 Compress your data

+

When you are ready to continue, run the file zipper command line utility. This will create a new file on your desktop.

+
+
+
+

8. Upload your zipped data file to your personalized URL

+

Final step - go back to the e-mail you received after completing the consent form. This contains your personalized URL for uploading your data.

+
+
+

9. Make use of your newfangled knowledge!

+

Congratulations~ You have completed Neurotech Challenge Phase 1.

+

As a bonus, you now have a working knowledge of how to run cognitive neuroscience experiments on your personal computer with your mobile EEG system. There are many other interesting experiments in eeg-expy that you might be interested to try out, and they’re extremely easy to customize, modify, and extend. Follow your nose and have fun!

+

Also, drop us a line at team@neurotech-challenge.com, tell us how you found NTCS Phase 1. We’d love to hear from you~

+
+
+
+

FAQs, Troubleshooting, Gotchas

+
+

Where are my files?

+

Check out the info in point 7.

+
+
+

OpenBCI Ports and drivers

+

This is a major gotcha. So much so we’re listing it twice. If you are using OpenBCI on Windows, MAKE SURE you have configured the FTDI driver as described above. Otherwise the sampling rate will be 1/16 the desired level and you won’t get any decent data.

+
+
+

Python is not 3.7

+
    +
  • Python 3.8+ is not currently supported by eeg-expy. If you type Python and the prompt tells you it is not Python 3.7, and this was not something you indended, then something has gone wrong with your installation or environment setup. Go back and repeat the environment setup steps; if the issue persists, repeat the miniconda install steps.

  • +
+
+
+
+ + +
+
+
+ +
+ +
+

© Copyright John Griffiths, Jadin Tredup, NeuroTechX, & Contributors.

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + + + + \ No newline at end of file diff --git a/develop/misc/using_an_extra_electrode_muse.html b/develop/misc/using_an_extra_electrode_muse.html new file mode 100644 index 00000000..30ab13ce --- /dev/null +++ b/develop/misc/using_an_extra_electrode_muse.html @@ -0,0 +1,508 @@ + + + + + + + Using an extra electrode with Muse — EEG Notebooks 0.0.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+ +
+

Using an extra electrode with Muse

+

Although the Muse is wonderful for ease-of-use and affordability, it suffers from a small number of electrode locations and inflexibility of electrode positioning. Fortunately, in order to partially overcome this limitation, the Muse hardware team has allowed an extra electrode to be added to the Muse 2016.

+
+

The electrode

+

These electrodes are not for sale anywhere; they must be made by hand. Fortunately, their construction appears pretty simple, attach an EEG electrode (any kind) to a male microUSB port with a wire.

+

We’ll update this section with more info as it comes in from the Muse hardware team.

+

fig

+
+
+

Attaching the extra electrode

+

The extra electrode can be applied anywhere on the head (provide the wire is long enough). Just inset the electrode’s microUSB connector into the charging port of the Muse. In order to make sure the electrode stays in place, we recommend using a hat or scarf as pictured.

+

fig

+
+
+

Getting data from the electrode

+

With the extra electrode connected to the Muse, its data is available as the Right AUX channel in the muse-lsl data stream. It will automatically appear in muse-lsl’s viewer. An example of how to access this data and include it in your analysis is shown in the P300 with Extra Electrode notebook

+
+
+ + +
+
+
+ +
+ +
+

© Copyright John Griffiths, Jadin Tredup, NeuroTechX, & Contributors.

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + + + + \ No newline at end of file diff --git a/develop/objects.inv b/develop/objects.inv new file mode 100644 index 00000000..113e4da8 Binary files /dev/null and b/develop/objects.inv differ diff --git a/develop/search.html b/develop/search.html new file mode 100644 index 00000000..1cc5b169 --- /dev/null +++ b/develop/search.html @@ -0,0 +1,510 @@ + + + + + + Search — EEG Notebooks 0.0.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+ + + + +
+ +
+ +
+
+
+ +
+ +
+

© Copyright John Griffiths, Jadin Tredup, NeuroTechX, & Contributors.

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + + + + + + + + + \ No newline at end of file diff --git a/develop/searchindex.js b/develop/searchindex.js new file mode 100644 index 00000000..3dc3ef25 --- /dev/null +++ b/develop/searchindex.js @@ -0,0 +1 @@ +Search.setIndex({"docnames": ["auto_examples/index", "auto_examples/sg_execution_times", "auto_examples/visual_cueing/01r__cueing_singlesub_analysis", "auto_examples/visual_cueing/02r__cueing_group_analysis", "auto_examples/visual_cueing/03r__cueing_behaviour_analysis_winter2019", "auto_examples/visual_cueing/04r__cueing_group_analysis_winter2019", "auto_examples/visual_cueing/index", "auto_examples/visual_cueing/sg_execution_times", "auto_examples/visual_gonogo/index", "auto_examples/visual_gonogo/sg_execution_times", "auto_examples/visual_n170/00x__n170_run_experiment", "auto_examples/visual_n170/01r__n170_viz", "auto_examples/visual_n170/02r__n170_decoding", "auto_examples/visual_n170/index", "auto_examples/visual_n170/sg_execution_times", "auto_examples/visual_p300/00x__p300_run_experiment", "auto_examples/visual_p300/01r__p300_viz", "auto_examples/visual_p300/02r__p300_decoding", "auto_examples/visual_p300/index", "auto_examples/visual_p300/sg_execution_times", "auto_examples/visual_ssvep/00x__ssvep_run_experiment", "auto_examples/visual_ssvep/01r__ssvep_viz", "auto_examples/visual_ssvep/02r__ssvep_decoding", "auto_examples/visual_ssvep/index", "auto_examples/visual_ssvep/sg_execution_times", "changelog", "docs_notes", "experiments/all_examples", "experiments/cueing", "experiments/gonogo", "experiments/vn170", "experiments/vp300", "experiments/vssvep", "getting_started/analysis", "getting_started/available_experiments", "getting_started/data_zipper", "getting_started/faq", "getting_started/installation", "getting_started/loading_and_saving", "getting_started/running_experiments", "getting_started/streaming", "index", "misc/about_the_docs", "misc/muse_info", "misc/ntcs_phase1_instructions", "misc/using_an_extra_electrode_muse", "sg_execution_times"], "filenames": ["auto_examples/index.rst", "auto_examples/sg_execution_times.rst", "auto_examples/visual_cueing/01r__cueing_singlesub_analysis.rst", "auto_examples/visual_cueing/02r__cueing_group_analysis.rst", "auto_examples/visual_cueing/03r__cueing_behaviour_analysis_winter2019.rst", "auto_examples/visual_cueing/04r__cueing_group_analysis_winter2019.rst", "auto_examples/visual_cueing/index.rst", "auto_examples/visual_cueing/sg_execution_times.rst", "auto_examples/visual_gonogo/index.rst", "auto_examples/visual_gonogo/sg_execution_times.rst", "auto_examples/visual_n170/00x__n170_run_experiment.rst", "auto_examples/visual_n170/01r__n170_viz.rst", "auto_examples/visual_n170/02r__n170_decoding.rst", "auto_examples/visual_n170/index.rst", "auto_examples/visual_n170/sg_execution_times.rst", "auto_examples/visual_p300/00x__p300_run_experiment.rst", "auto_examples/visual_p300/01r__p300_viz.rst", "auto_examples/visual_p300/02r__p300_decoding.rst", "auto_examples/visual_p300/index.rst", "auto_examples/visual_p300/sg_execution_times.rst", "auto_examples/visual_ssvep/00x__ssvep_run_experiment.rst", "auto_examples/visual_ssvep/01r__ssvep_viz.rst", "auto_examples/visual_ssvep/02r__ssvep_decoding.rst", "auto_examples/visual_ssvep/index.rst", "auto_examples/visual_ssvep/sg_execution_times.rst", "changelog.rst", "docs_notes.md", "experiments/all_examples.rst", "experiments/cueing.rst", "experiments/gonogo.rst", "experiments/vn170.rst", "experiments/vp300.rst", "experiments/vssvep.rst", "getting_started/analysis.md", "getting_started/available_experiments.md", "getting_started/data_zipper.md", "getting_started/faq.md", "getting_started/installation.rst", "getting_started/loading_and_saving.md", "getting_started/running_experiments.md", "getting_started/streaming.md", "index.rst", "misc/about_the_docs.md", "misc/muse_info.md", "misc/ntcs_phase1_instructions.md", "misc/using_an_extra_electrode_muse.md", "sg_execution_times.rst"], "titles": ["All Notebook Examples", "Computation times", "Cueing Single Subject Analysis", "Cueing Group Analysis", "Cueing Behavioural Analysis Winter 2019", "Cueing Group Analysis Winter 2019", "<no title>", "Computation times", "<no title>", "Computation times", "N170 run experiment", "N170 Load and Visualize Data", "N170 Decoding", "<no title>", "Computation times", "P300 run experiment", "P300 Load and Visualize Data", "P300 Decoding", "<no title>", "Computation times", "SSVEP run experiment", "SSVEP Visualization", "SSVEP Decoding", "<no title>", "Computation times", "Code Changelog", "EEG-ExPy Documentation Developer Notes", "All Notebook Examples", "Visual Cueing", "Visual Go-No-Go", "Visual N170", "Visual P300", "Visual SSVEP", "Analyzing data", "Available Experiments", "Data Zipping", "Frequently Asked Questions", "Installation", "Loading and Saving Data", "Running Experiments", "Initiating an EEG Stream", "EEG-ExPy", "About the EEG-ExPy Documentation Pages", "Technical Information about the MUSE", "NTCS Phase 1 Instructions", "Using an extra electrode with Muse", "Computation times"], "terms": {"thi": [0, 10, 11, 12, 15, 16, 17, 20, 21, 22, 25, 26, 27, 30, 31, 32, 34, 35, 37, 38, 39, 40, 41, 45], "section": [0, 27, 35, 39, 40, 44, 45], "ha": [0, 16, 26, 27, 34, 39, 40, 41, 44, 45], "differ": [0, 3, 5, 22, 27, 30, 37, 39, 44], "function": [0, 2, 3, 5, 11, 12, 16, 17, 21, 22, 25, 27, 37, 38, 39, 40, 41], "avail": [0, 12, 17, 21, 27, 35, 37, 39, 41, 44, 45], "modul": [0, 27, 34], "ar": [0, 2, 4, 11, 16, 21, 22, 26, 27, 28, 30, 34, 35, 37, 38, 39, 40, 41, 42, 43, 45], "organ": [0, 11, 16, 21, 26, 27, 37], "topic": [0, 27, 44], "can": [0, 2, 21, 22, 26, 27, 28, 29, 32, 34, 35, 36, 37, 38, 40, 41, 44, 45], "explor": [0, 27], "ani": [0, 25, 27, 34, 37, 39, 40, 44, 45], "order": [0, 2, 3, 5, 11, 12, 16, 17, 22, 27, 30, 37, 39, 40, 45], "cue": [0, 6, 7, 25, 27, 35, 39, 41, 46], "singl": [0, 4, 6, 7, 27, 28, 35, 39, 46], "subject": [0, 4, 6, 7, 10, 11, 12, 16, 17, 21, 22, 27, 28, 29, 34, 37, 38, 41, 43, 44, 46], "analysi": [0, 6, 7, 11, 12, 16, 17, 21, 22, 27, 28, 37, 38, 41, 45, 46], "group": [0, 6, 7, 26, 27, 28, 46], "behaviour": [0, 6, 7, 27, 28, 46], "winter": [0, 6, 7, 27, 28, 46], "2019": [0, 6, 7, 25, 27, 28, 46], "n170": [0, 13, 14, 16, 27, 35, 36, 37, 38, 39, 41, 46], "run": [0, 2, 3, 4, 5, 11, 13, 14, 16, 17, 18, 19, 21, 22, 23, 24, 25, 26, 27, 30, 31, 32, 34, 35, 37, 38, 40, 41, 46], "experi": [0, 2, 3, 4, 5, 11, 12, 13, 14, 16, 17, 18, 19, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 35, 37, 38, 40, 46], "load": [0, 4, 13, 14, 18, 19, 21, 27, 30, 31, 37, 39, 46], "visual": [0, 3, 4, 5, 12, 13, 14, 17, 18, 19, 22, 23, 24, 25, 27, 35, 37, 38, 39, 41, 46], "data": [0, 13, 14, 18, 19, 21, 27, 30, 31, 37, 39, 40, 41, 46], "decod": [0, 13, 14, 18, 19, 23, 24, 27, 30, 31, 32, 46], "p300": [0, 18, 19, 27, 35, 38, 39, 41, 43, 45, 46], "ssvep": [0, 2, 23, 24, 27, 35, 38, 39, 41, 46], "download": [0, 2, 4, 5, 6, 8, 10, 11, 12, 13, 15, 16, 17, 18, 20, 21, 22, 23, 25, 26, 27, 28, 29, 30, 31, 32, 37], "python": [0, 2, 3, 4, 5, 6, 8, 10, 11, 12, 13, 15, 16, 17, 18, 20, 21, 22, 23, 26, 27, 28, 29, 30, 31, 32, 39, 41], "sourc": [0, 2, 3, 4, 5, 6, 8, 10, 11, 12, 13, 15, 16, 17, 18, 20, 21, 22, 23, 26, 27, 28, 29, 30, 31, 32, 37], "code": [0, 2, 3, 4, 5, 6, 8, 10, 11, 12, 13, 15, 16, 17, 18, 20, 21, 22, 23, 26, 27, 28, 29, 30, 31, 32, 37, 40, 41, 44], "auto_examples_python": [0, 27], "zip": [0, 2, 6, 8, 11, 13, 16, 18, 21, 23, 27, 28, 29, 30, 31, 32, 41], "jupyt": [0, 2, 3, 4, 5, 6, 10, 11, 12, 13, 15, 16, 17, 18, 20, 21, 22, 23, 25, 26, 27, 28, 30, 31, 32, 37, 41, 42], "auto_examples_jupyt": [0, 27], "galleri": [0, 2, 3, 4, 5, 6, 8, 10, 11, 12, 13, 15, 16, 17, 18, 20, 21, 22, 23, 25, 26, 27, 28, 29, 30, 31, 32, 46], "gener": [0, 2, 3, 4, 5, 6, 8, 10, 11, 12, 13, 15, 16, 17, 18, 20, 21, 22, 23, 25, 26, 27, 28, 29, 30, 31, 32, 37, 38, 39, 41, 42, 44], "sphinx": [0, 2, 3, 4, 5, 6, 8, 10, 11, 12, 13, 15, 16, 17, 18, 20, 21, 22, 23, 25, 26, 27, 28, 29, 30, 31, 32, 42], "00": [1, 2, 3, 5, 7, 9, 11, 12, 14, 16, 17, 19, 21, 22, 24, 46], "000": [1, 2, 3, 5, 9, 11, 12, 14, 16, 17, 19, 21, 22, 24, 46], "total": [1, 2, 3, 4, 5, 7, 9, 11, 12, 14, 16, 17, 19, 21, 22, 24, 46], "execut": [1, 7, 9, 14, 19, 24, 26, 29, 34, 46], "0": [1, 2, 3, 4, 5, 7, 9, 10, 11, 12, 14, 15, 16, 17, 19, 20, 21, 22, 24, 35, 39, 41, 46], "file": [1, 2, 3, 4, 5, 7, 9, 11, 12, 14, 15, 16, 17, 19, 20, 21, 22, 24, 26, 35, 37, 38, 39, 41, 42, 46], "from": [1, 2, 3, 4, 5, 7, 9, 10, 11, 12, 14, 15, 16, 17, 19, 20, 21, 22, 24, 25, 26, 30, 37, 38, 39, 40, 41, 42, 43, 44, 46], "auto_exampl": [1, 7, 9, 14, 19, 24], "exampl": [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 28, 29, 30, 31, 32, 35, 39, 41, 45, 46], "mem": [1, 7, 9, 14, 19, 24, 46], "mb": [1, 7, 9, 14, 19, 24, 46], "n": [1, 9, 37, 44], "A": [1, 9, 22, 28, 34, 37, 38, 39, 41, 42, 44], "go": [2, 3, 4, 5, 10, 11, 12, 15, 16, 17, 20, 21, 22, 37, 39, 40, 41, 44], "end": [2, 3, 4, 5, 10, 11, 12, 15, 16, 17, 20, 21, 22, 34, 38, 44], "full": [2, 3, 4, 5, 10, 11, 12, 15, 16, 17, 20, 21, 22, 38, 41], "some": [2, 4, 10, 11, 12, 15, 16, 17, 20, 21, 22, 28, 37, 39, 44], "standard": [2, 3, 4, 5, 11, 12, 16, 17, 21, 22, 37, 39, 41], "import": [2, 3, 4, 5, 10, 11, 12, 15, 16, 17, 20, 21, 22, 37, 38, 39, 40, 44], "o": [2, 3, 4, 5, 11, 12, 15, 16, 17, 20, 21, 22, 37, 40], "numpi": [2, 3, 4, 5, 12, 17, 21, 22], "np": [2, 3, 4, 5, 12, 17, 21, 22], "sy": [2, 3, 4, 5], "glob": [2, 3, 4, 5], "panda": [2, 3, 4, 5, 12, 17, 21, 22], "pd": [2, 3, 4, 5, 12, 17, 21, 22], "collect": [2, 3, 5, 11, 12, 16, 17, 21, 22, 37, 39, 41, 44], "ordereddict": [2, 3, 5, 11, 12, 16, 17, 21, 22], "warn": [2, 3, 5, 11, 12, 16, 17, 21, 22], "filterwarn": [2, 3, 5, 11, 12, 16, 17, 21, 22], "ignor": [2, 3, 5, 11, 12, 16, 17, 21, 22], "matplotlib": [2, 3, 4, 5, 11, 12, 16, 17, 21, 22], "pyplot": [2, 3, 4, 5, 11, 12, 16, 17, 21, 22], "plt": [2, 3, 4, 5, 11, 12, 16, 17, 21, 22], "patch": [2, 3, 5], "mne": [2, 11, 12, 16, 17, 21, 22, 26, 41], "find_ev": [2, 3, 5, 11, 12, 16, 17, 21, 22], "concatenate_raw": [2, 3, 5], "time_frequ": [2, 3, 5, 21], "tfr_morlet": [2, 3, 5, 21], "eeg": [2, 3, 4, 5, 10, 11, 12, 16, 17, 21, 22, 25, 32, 34, 37, 38, 39, 43, 45], "notebook": [2, 3, 4, 5, 6, 10, 11, 12, 13, 15, 16, 17, 18, 20, 21, 22, 23, 25, 26, 28, 30, 31, 32, 37, 38, 40, 41, 42, 44, 45], "eegnb": [2, 3, 4, 5, 10, 11, 12, 15, 16, 17, 20, 21, 22, 35, 36, 37, 38, 39, 40, 44], "analysis_util": [2, 3, 5, 11, 12, 16, 17, 21, 22], "load_data": [2, 3, 5, 11, 12, 16, 17, 21, 22, 37, 38], "plot_condit": [2, 11, 16, 21], "dataset": [2, 3, 4, 5, 11, 12, 16, 17, 21, 22, 25, 38, 44], "fetch_dataset": [2, 3, 4, 5, 11, 12, 16, 17, 21, 22], "us": [2, 3, 11, 12, 16, 17, 21, 22, 26, 30, 31, 32, 34, 37, 41, 42], "expi": [2, 10, 11, 15, 16, 20, 21, 39], "eegnb_data_path": [2, 3, 4, 5, 11, 12, 16, 17, 21, 22], "path": [2, 3, 4, 5, 10, 11, 12, 16, 17, 21, 22, 38], "join": [2, 3, 4, 5, 11, 12, 16, 17, 21, 22, 44], "expandus": [2, 3, 4, 5, 11, 12, 16, 17, 21, 22], "cueing_data_path": [2, 3, 4, 5], "kylemathlab_dev": [2, 3, 4, 5], "If": [2, 3, 4, 5, 11, 12, 16, 17, 21, 22, 35, 37, 38, 39, 40, 41, 44], "hasn": [2, 3, 4, 5, 11, 12, 16, 17, 21, 22], "t": [2, 4, 11, 12, 16, 17, 21, 22, 34, 37, 44], "been": [2, 3, 4, 5, 11, 12, 16, 17, 21, 22, 26, 35, 39, 40, 44], "yet": [2, 3, 4, 5, 11, 12, 16, 17, 21, 22, 35, 39], "isdir": [2, 3, 4, 5, 11, 12, 16, 17, 21, 22], "data_dir": [2, 3, 4, 5, 11, 12, 16, 17, 21, 22, 38], "site": [2, 3, 4, 5, 11, 12, 16, 17, 21, 22, 25, 35, 38, 41, 44], "sub": [2, 3, 4, 5, 26], "302": [2, 3, 4, 5], "sess": 2, "1": [2, 3, 4, 5, 11, 12, 16, 17, 21, 22, 26, 30, 35, 37, 38, 39, 41], "raw": [2, 3, 5, 11, 12, 16, 17, 21, 22, 37, 38, 39, 40], "session": [2, 3, 4, 5, 10, 11, 12, 16, 17, 21, 22, 35, 37, 38, 44], "device_nam": [2, 3, 5, 11, 12, 16, 17, 21, 22, 38], "muse2016": [2, 3, 4, 5, 12, 16, 17, 21, 22, 39, 40, 44], "append": [2, 3, 5], "2": [2, 3, 4, 5, 11, 12, 16, 17, 21, 22, 26, 30, 34, 35, 37, 39, 41], "origin": [2, 3, 5, 11, 22, 26], "http": [2, 11, 16, 21, 26, 37, 44], "drive": [2, 11, 16, 21], "googl": [2, 11, 16, 21, 22], "com": [2, 11, 16, 21, 37, 40, 44], "uc": [2, 11, 16, 21], "id": [2, 3, 5, 10, 11, 12, 16, 17, 21, 22, 38], "1abovj9s0bejosqdgfnexatfz": 2, "zcsixfq": 2, "redirect": [2, 11], "confirm": [2, 11], "uuid": [2, 11], "015d1c9b": 2, "6ead": 2, "4318": 2, "b5d9": 2, "333a0b7d7b30": 2, "To": [2, 11, 16, 21, 33, 35, 37, 39, 40, 44], "home": [2, 3, 4, 5, 11, 12, 16, 17, 21, 22], "runner": [2, 3, 4, 5, 11, 12, 16, 17, 21, 22], "downloaded_data": [2, 11, 16, 21], "102m": 2, "b": [2, 11, 16, 21, 41], "": [2, 3, 5, 11, 12, 16, 17, 21, 22, 25, 30, 32, 34, 35, 37, 39, 40, 41, 43, 45], "6": [2, 3, 4, 5, 11, 12, 16, 17, 21, 22, 32, 34, 35, 39, 41], "29m": [2, 16], "01": [2, 3, 5, 7, 11, 12, 17, 21, 24, 46], "62": 2, "1mb": [2, 16, 21], "12": [2, 3, 4, 5, 12, 17, 19, 21, 24, 34, 44, 46], "6m": 2, "03": [2, 3, 4, 5, 11, 36], "27": [2, 3, 4, 5, 11, 21, 22], "5mb": 2, "17": [2, 3, 4, 5, 11, 12, 21, 22], "3m": [2, 11], "26": [2, 3, 4, 5, 11, 12, 46], "25": [2, 3, 4, 5, 11, 12, 17, 21, 22, 44], "7m": [2, 11], "02": [2, 3, 5, 11, 12, 16, 17, 22, 46], "31": [2, 3, 5], "2mb": [2, 11], "34": [2, 3, 5, 11, 14, 16], "1m": [2, 11], "36": [2, 3, 5, 21, 22], "8mb": 2, "42": [2, 3, 11, 12, 17, 22, 36], "5m": 2, "39": [2, 3, 5], "50": [2, 3, 4, 5, 12, 21], "9m": 2, "0mb": [2, 11, 21], "58": [2, 3, 4, 5], "59": [2, 3, 11], "2m": 2, "40": [2, 3, 5, 11, 12], "7mb": 2, "67": [2, 3, 5], "43": [2, 3, 5], "75": [2, 21], "76": 2, "0m": 2, "46": [2, 3, 5], "88": [2, 5], "89": 2, "96": [2, 3, 4], "97": [2, 11, 16], "53": [2, 3, 5, 11], "100": [2, 3, 5, 11, 12, 16, 17, 21, 22], "subject0302": [2, 3], "session001": [2, 3, 5, 11, 12, 16, 17, 21, 22, 44], "subject302_session1_recording_2018": [2, 3], "11": [2, 3, 4, 5, 11, 16, 19, 44, 46], "20": [2, 3, 4, 5, 11, 12, 21, 22, 44], "10": [2, 3, 4, 5, 7, 11, 17, 21, 26, 34, 37, 39, 40, 43, 44, 46], "csv": [2, 3, 4, 5, 10, 11, 12, 16, 17, 21, 22, 36, 38, 44], "tp9": [2, 3, 5, 11, 12, 16, 17, 21, 22, 30], "af7": [2, 3, 5, 12, 16, 17, 21, 22], "af8": [2, 3, 5, 12, 16, 17, 21, 22], "tp10": [2, 3, 5, 11, 12, 16, 17, 21, 22, 30], "right": [2, 3, 5, 12, 16, 17, 21, 22, 28, 32, 34, 39, 44, 45], "aux": [2, 3, 5, 12, 16, 17, 21, 22, 45], "stim": [2, 3, 5, 11, 12, 16, 17, 21, 22], "creat": [2, 3, 4, 5, 10, 11, 12, 15, 16, 17, 20, 21, 22, 26, 37, 39, 41, 42, 44], "rawarrai": [2, 3, 5, 11, 12, 16, 17, 21, 22], "float64": [2, 3, 5, 11, 12, 16, 17, 21, 22], "n_channel": [2, 3, 5, 11, 12, 16, 17, 21, 22], "n_time": [2, 3, 5, 11, 12, 16, 17, 21, 22], "61296": [2, 3, 5], "rang": [2, 3, 4, 5, 11, 12, 16, 17, 21, 22, 32, 34, 41], "61295": [2, 3, 5], "239": [2, 3, 5], "434": [2, 3, 4, 5, 11], "sec": [2, 3, 5, 11, 12, 16, 17, 21, 22], "readi": [2, 3, 5, 11, 12, 16, 17, 21, 22, 41], "session002": [2, 3, 4, 5], "subject302_session2_recording_2018": [2, 3], "18": [2, 3, 4, 5, 16, 46], "04": [2, 3, 4, 5, 7, 14, 16, 17, 21, 22, 46], "2d": 2, "backend": [2, 25, 37, 39, 40], "mnebrowsefigur": 2, "size": [2, 11, 16, 21, 43], "800x800": 2, "4": [2, 3, 4, 5, 7, 11, 12, 17, 21, 22, 35, 39, 41], "ax": [2, 3, 4, 5, 11, 16, 21], "One": [2, 22], "wai": [2, 30, 39, 44], "analyz": [2, 3, 37, 38, 41], "i": [2, 3, 11, 12, 16, 17, 21, 22, 26, 28, 29, 30, 31, 32, 34, 35, 37, 38, 39, 40, 41, 43, 45], "psd": [2, 34], "should": [2, 11, 12, 16, 17, 21, 22, 32, 39, 40, 44], "appear": [2, 21, 34, 37, 38, 44, 45], "peak": [2, 21, 34], "certain": [2, 31, 34], "frequenc": [2, 3, 5, 12, 17, 21, 22, 32, 34], "expect": [2, 30, 37, 44], "clear": [2, 21, 34], "domain": [2, 37], "stimul": [2, 21, 22, 31, 32, 34], "30": [2, 3, 5, 11, 12, 14, 16, 17, 21, 22, 46], "hz": [2, 3, 5, 11, 12, 16, 17, 21, 22], "compute_psd": [2, 11, 16, 21], "see": [2, 12, 17, 21, 22, 26, 30, 37, 39, 40, 41, 44], "electr": [2, 32], "nois": [2, 39], "60": [2, 21], "mayb": 2, "red": 2, "blue": [2, 40], "channel": [2, 3, 5, 11, 12, 16, 17, 21, 22, 44, 45], "between": [2, 3, 25, 32, 37, 40, 44], "7": [2, 3, 4, 5, 12, 17, 21, 35, 37, 39, 41], "14": [2, 3, 4, 5, 11, 21, 22, 24], "alpha": [2, 28, 41], "effect": [2, 3, 5, 11, 12, 16, 17, 21, 22], "window": [2, 3, 5, 11, 16, 21, 39, 41], "8": [2, 3, 4, 5, 11, 12, 16, 17, 21, 22, 35, 39, 41], "mnelinefigur": [2, 11, 16, 21], "1000x350": [2, 11, 16, 21], "most": [2, 22, 26, 30, 31, 34], "erp": [2, 11, 16, 22, 28, 30, 31, 34, 41], "compon": [2, 30, 34], "compos": 2, "lower": [2, 3, 5], "fluctuat": 2, "signal": [2, 21, 34, 39, 41], "thu": [2, 30, 31], "out": [2, 4, 41, 43, 44], "all": [2, 6, 8, 11, 12, 13, 18, 21, 22, 23, 28, 29, 30, 31, 32, 34, 35, 36, 37, 38, 39, 41, 44, 46], "increas": [2, 34], "our": [2, 30, 39, 44], "abil": 2, "detect": [2, 29, 30, 32, 34], "them": [2, 12, 17, 26, 39, 44], "method": [2, 3, 5, 11, 12, 16, 17, 21, 22, 35, 39], "iir": [2, 3, 5, 11, 12, 16, 17, 22], "fmin": [2, 11, 16], "fmax": [2, 11, 16], "3": [2, 3, 4, 5, 11, 12, 14, 16, 17, 19, 21, 22, 24, 26, 35, 37, 39, 41], "contigu": [2, 3, 5, 11, 12, 16, 17, 22], "segment": [2, 3, 5, 11, 12, 16, 17, 21, 22], "set": [2, 3, 4, 5, 11, 12, 16, 17, 21, 22, 26, 38, 39, 41], "up": [2, 3, 5, 11, 12, 16, 17, 22, 28, 38, 40, 41], "band": [2, 3, 5, 11, 12, 16, 17, 22], "pass": [2, 3, 5, 11, 12, 16, 17, 22, 38, 39, 40, 44], "paramet": [2, 3, 5, 11, 12, 16, 17, 22, 35, 38, 39, 40], "butterworth": [2, 3, 5, 11, 12, 16, 17, 22], "bandpass": [2, 3, 5, 11, 12, 16, 17, 22], "zero": [2, 3, 4, 5, 11, 12, 16, 17, 22], "phase": [2, 3, 5, 11, 12, 16, 17, 22, 41], "two": [2, 3, 5, 11, 12, 16, 17, 21, 22, 26, 34, 37, 39, 40, 44], "forward": [2, 3, 5, 11, 12, 16, 17, 22, 44], "revers": [2, 3, 5, 11, 12, 16, 17, 22], "non": [2, 3, 5, 11, 12, 16, 17, 22, 34, 44], "causal": [2, 3, 5, 11, 12, 16, 17, 22], "16": [2, 3, 4, 5, 11, 12, 16, 17, 22, 44], "after": [2, 3, 5, 11, 12, 16, 17, 21, 22, 28, 30, 31, 34, 35, 39, 41], "backward": [2, 3, 5, 11, 12, 16, 17, 22], "cutoff": [2, 3, 4, 5, 11, 12, 16, 17, 22], "db": [2, 3, 5, 11, 12, 16, 17, 21, 22], "next": [2, 21, 22, 39, 44], "chunk": [2, 3, 21, 22], "repres": [2, 11, 12, 21, 22], "1000m": 2, "befor": [2, 21, 22, 39, 40, 41, 44], "2000m": 2, "each": [2, 3, 4, 5, 11, 12, 16, 17, 21, 22, 26, 38, 39, 40, 44], "reject": [2, 3, 5, 11, 12, 16, 17, 21, 22], "everi": [2, 39, 44], "where": [2, 31, 37, 38, 41], "amplitud": [2, 21, 22, 34], "exceed": 2, "uv": [2, 3, 5], "which": [2, 16, 21, 26, 29, 32, 34, 35, 37, 38, 39, 40, 41, 43, 44], "ey": [2, 21, 22, 30, 41, 44], "blink": [2, 21, 22, 39, 44], "event": [2, 3, 5, 11, 12, 16, 17, 21, 22, 29, 30, 31, 34], "event_id": [2, 3, 5, 11, 12, 16, 17, 21, 22], "leftcu": [2, 3, 5], "rightcu": [2, 3, 5], "rej_thresh_uv": [2, 3, 5], "150": 2, "rej_thresh": [2, 3, 5], "1e": [2, 3, 5, 44], "tmin": [2, 3, 5, 11, 12, 16, 17, 21, 22], "tmax": [2, 3, 5, 11, 12, 16, 17, 21, 22], "baselin": [2, 3, 5, 11, 12, 16, 17, 21, 22], "preload": [2, 3, 5, 11, 12, 16, 17, 21, 22], "true": [2, 3, 5, 11, 12, 16, 17, 21, 22, 39], "verbos": [2, 3, 5, 11, 12, 16, 17, 21, 22], "fals": [2, 3, 4, 5, 10, 11, 12, 16, 17, 21, 22, 39], "pick": [2, 3, 5, 11, 12, 16, 17, 21, 22], "print": [2, 3, 4, 5, 10, 11, 12, 15, 16, 17, 20, 21, 22, 44], "sampl": [2, 11, 12, 16, 17, 21, 22, 35, 43, 44], "drop": [2, 11, 12, 16, 17, 21, 22, 44], "len": [2, 3, 4, 5, 11, 12, 16, 17, 21, 22], "condit": [2, 3, 4, 5, 11, 16, 44], "diffwav": [2, 11, 16], "fig": [2, 3, 5, 11, 12, 16, 22], "ci": [2, 11, 16], "5": [2, 3, 4, 5, 11, 16, 21, 22, 34, 35, 39, 41], "n_boot": [2, 11, 16], "1000": [2, 11, 16, 41], "titl": [2, 3, 4, 5, 11, 16, 21, 26, 39], "diff_waveform": [2, 11, 16], "ylim": [2, 4, 11, 16], "213": [2, 3], "found": [2, 3, 5, 11, 12, 16, 17, 21, 22, 26, 34, 38, 41, 43, 44], "21": [2, 3, 4, 5, 21, 22], "22": [2, 3, 4, 5, 11, 21, 22], "95": 2, "77464788732395": 2, "also": [2, 21, 26, 34, 39, 40, 41, 44], "look": [2, 21, 22, 35, 39, 40, 41], "color": [2, 3, 21], "over": [2, 4, 5, 21, 26, 32, 37, 39], "time": [2, 3, 4, 5, 11, 12, 16, 17, 21, 22, 34, 38, 39, 41, 44], "linspac": [2, 3, 5], "endpoint": [2, 3, 5], "wave_cycl": [2, 3, 5], "morlet": [2, 3, 5], "wavelet": [2, 3, 5], "left": [2, 3, 5, 34, 39], "tfr": [2, 3, 5, 21], "itc": [2, 3, 5, 21], "freq": [2, 3, 5, 21, 35, 39], "n_cycl": [2, 3, 5, 21], "return_itc": [2, 3, 5, 21], "apply_baselin": [2, 3, 5], "mode": [2, 3, 5, 21, 37], "mean": [2, 3, 5, 21, 44], "logratio": [2, 3, 21], "ipsi": 2, "contra": 2, "power_ipsi_tp9": [2, 3, 5], "power_contra_tp10": [2, 3, 5], "power_contra_tp9": [2, 3, 5], "power_ipsi_tp10": [2, 3, 5], "appli": [2, 3, 5, 21, 22, 34, 45], "correct": [2, 3, 4, 5, 21, 44], "No": [2, 40, 41], "f_low": [2, 3, 5], "f_high": [2, 3, 5], "f_diff": [2, 3, 5], "t_low": [2, 3, 5], "t_high": [2, 3, 5], "t_diff": [2, 3, 5], "power_avg_ipsi": [2, 3, 5], "power_avg_contra": [2, 3, 5], "power_avg_diff": [2, 3, 5], "find": [2, 3, 39, 41], "max": [2, 3, 5], "make": [2, 3, 26, 34, 37, 39, 40, 41, 42, 45], "plot_max": [2, 3, 5], "ab": [2, 3, 5, 34], "plot_diff_max": [2, 3], "subplot": [2, 3, 4, 5, 11, 21], "im": [2, 3, 5], "imshow": [2, 3, 5], "extent": [2, 3, 5], "aspect": [2, 3, 5], "auto": [2, 3, 5, 12, 17, 22, 42], "cmap": [2, 3, 5], "coolwarm": [2, 3, 5], "vmin": [2, 3, 5], "vmax": [2, 3, 5], "xlabel": [2, 3, 4, 5], "ylabel": [2, 3, 4, 5], "averag": [2, 4, 21], "ipsilater": [2, 3, 28], "cb": [2, 3, 5], "colorbar": [2, 3, 5, 21], "set_label": [2, 3, 5], "rectangl": [2, 3, 5], "rect": [2, 3, 5], "linewidth": [2, 3, 5], "edgecolor": [2, 3, 5], "k": [2, 3, 5], "facecolor": [2, 3, 5], "none": [2, 3, 5, 11, 12, 16, 17, 21, 22, 39], "add": [2, 3, 5, 26, 33, 37, 41], "add_patch": [2, 3, 5], "str": [2, 3, 4, 5], "greater": [2, 43], "direct": [2, 44], "posit": [2, 30, 31, 34, 43, 44, 45], "valu": [2, 38, 39], "second": [2, 3, 4, 5, 11, 12, 16, 17, 21, 22, 39, 44], "object": [2, 5, 10, 11, 12, 15, 16, 21, 30, 40], "0x7fddc5f5dbe0": 2, "200m": [2, 34], "invalidtarget_left": 2, "invalidtarget_right": 2, "validtarget_left": 2, "validtarget_right": 2, "0001": 2, "validtarget": 2, "invalidtarget": 2, "67136150234741": 2, "script": [2, 3, 4, 5, 11, 12, 16, 17, 21, 22, 26, 37, 41, 44], "minut": [2, 3, 4, 5, 11, 12, 16, 17, 21, 22, 26, 34, 35, 39, 44], "934": [2, 7, 46], "01r__cueing_singlesub_analysi": [2, 7, 46], "ipynb": [2, 3, 4, 5, 10, 11, 12, 15, 16, 17, 20, 21, 22, 26], "py": [2, 3, 4, 5, 7, 10, 11, 12, 14, 15, 16, 17, 19, 20, 21, 22, 24, 26, 39, 46], "importa": 3, "epoch": 3, "veri": [3, 22, 26, 37], "librari": [3, 26, 38, 39, 40, 41], "It": [3, 25, 26, 31, 32, 34, 37, 39, 40, 44, 45], "provid": [3, 35, 37, 40, 41, 45], "help": [3, 37, 39], "perform": [3, 12, 17, 30, 44], "kei": [3, 39, 41, 44], "task": [3, 28, 29, 41], "filter": [3, 5, 12, 16, 17, 22, 34], "artifact": 3, "The": [3, 11, 12, 16, 17, 21, 22, 25, 26, 28, 29, 30, 31, 32, 34, 35, 37, 38, 39, 40, 41, 42, 43, 44], "first": [3, 11, 16, 21, 26, 34, 39, 40, 44], "step": [3, 37, 40, 41], "depend": [3, 26, 34, 39, 41], "read": [3, 38, 44], "we": [3, 11, 16, 21, 22, 26, 30, 31, 32, 37, 39, 40, 41, 42, 44, 45], "ve": 3, "an": [3, 10, 11, 12, 15, 16, 17, 20, 21, 28, 29, 30, 32, 34, 35, 38, 39, 41, 44], "101": [3, 4, 5], "102": [3, 4, 5, 11, 12, 17], "103": [3, 4, 5], "104": [3, 4, 5], "105": [3, 4, 5], "106": [3, 4, 5], "108": [3, 4, 5], "109": [3, 4, 5], "110": [3, 4, 5], "111": [3, 4, 5], "112": [3, 4, 5], "202": [3, 4, 5], "203": [3, 4, 5], "204": [3, 4, 5], "205": [3, 4, 5], "207": [3, 4, 5], "208": [3, 4, 5], "209": [3, 4, 5], "210": [3, 4, 5], "211": [3, 4, 5], "301": [3, 4, 5], "303": [3, 4, 5], "304": [3, 4, 5, 19], "305": [3, 4, 5], "306": [3, 4, 5], "307": [3, 4, 5], "308": [3, 4, 5], "309": [3, 4, 5], "diff_out": [3, 5], "ipsi_out": [3, 5], "contra_out": [3, 5], "ipsi_spectra_out": [3, 5], "contra_spectra_out": [3, 5], "diff_spectra_out": [3, 5], "ersp_diff_out": [3, 5], "ersp_ipsi_out": [3, 5], "ersp_contra_out": [3, 5], "bad_sub": [3, 5], "13": [3, 4, 5, 12], "really_bad_sub": [3, 5], "19": [3, 4, 5, 11, 36], "sub_count": [3, 5], "90": [3, 5], "elif": [3, 5], "els": [3, 5], "both": [3, 4, 5, 22, 25, 34, 38, 39, 41], "select": [3, 4, 5, 35, 44], "trial": [3, 4, 5, 11, 16, 21, 30, 34, 39, 44], "remain": [3, 5], "plot": [3, 4, 5, 11, 16, 21], "inlin": 3, "output": [3, 4, 5, 35, 37, 39], "arrai": [3, 4, 5, 11, 12, 16, 17], "argmax": [3, 5], "shape": [3, 29, 34, 44], "subject0101": [3, 4], "subject101_session1_recording_2018": 3, "61284": [3, 5], "61283": [3, 5], "387": [3, 5], "subject101_session2_recording_2018": 3, "23": [3, 4, 5, 11, 21], "122": 3, "subject0102": 3, "subject102_session1_recording_2018": 3, "05": [3, 44], "subject102_session2_recording_2018": 3, "92": [3, 21], "subject0103": 3, "subject103_session1_recording_2018": 3, "subject103_session2_recording_2018": 3, "33": [3, 5, 11, 12], "160": [3, 5], "subject0104": 3, "subject104_session1_recording_2018": 3, "35": [3, 5, 12, 22], "subject104_session2_recording_2018": 3, "49": [3, 5], "47": [3, 5], "272": 3, "subject0105": 3, "subject105_session1_recording_2018": 3, "07": [3, 5], "subject105_session2_recording_2018": 3, "37": [3, 5, 36], "subject0106": 3, "subject106_session1_recording_2018": 3, "subject106_session2_recording_2018": 3, "201": 3, "subject0108": 3, "subject108_session1_recording_2018": 3, "subject108_session2_recording_2018": 3, "15": [3, 4, 5, 12, 21, 22], "130": 3, "subject0109": 3, "subject109_session1_recording_2018": 3, "44": [3, 5], "subject109_session2_recording_2018": 3, "subject0110": 3, "subject110_session1_recording_2018": 3, "55": [3, 5], "subject110_session2_recording_2018": 3, "56": [3, 5], "107": 3, "28": [3, 5], "subject0111": 3, "subject111_session1_recording_2018": 3, "subject111_session2_recording_2018": 3, "223": 3, "32": [3, 5, 11, 12, 21, 22], "subject0112": 3, "subject112_session1_recording_2018": 3, "subject112_session2_recording_2018": 3, "86": 3, "subject0202": 3, "subject202_session1_recording_2018": 3, "subject202_session2_recording_2018": 3, "156": 3, "61": [3, 16], "subject0203": 3, "subject203_session1_recording_2018": 3, "subject203_session2_recording_2018": 3, "08": [3, 5, 12], "120": [3, 10, 12, 15, 16, 17, 20, 21, 22, 37, 39], "subject0204": 3, "subject204_session1_recording_2018": 3, "61308": [3, 5], "61307": [3, 5], "480": [3, 5], "subject204_session2_recording_2018": 3, "29": [3, 5, 7, 11, 21, 22, 46], "51": [3, 5, 11, 21, 22], "183": 3, "subject0205": 3, "subject205_session1_recording_2018": 3, "41": [3, 5, 11], "48": [3, 5], "subject205_session2_recording_2018": 3, "194": [3, 5], "87": 3, "subject0207": 3, "subject207_session1_recording_2018": 3, "38": [3, 5], "subject207_session2_recording_2018": 3, "181": 3, "70": 3, "subject0208": 3, "subject208_session1_recording_2018": 3, "54": [3, 5, 11], "subject208_session2_recording_2018": 3, "subject0209": 3, "subject209_session1_recording_2018": 3, "subject209_session2_recording_2018": 3, "118": 3, "subject0210": 3, "subject210_session1_recording_2018": 3, "subject210_session2_recording_2018": 3, "170": 3, "subject0211": 3, "subject211_session1_recording_2018": 3, "subject211_session2_recording_2018": 3, "9": [3, 4, 5, 11, 12, 39, 41], "subject0301": 3, "subject301_session1_recording_2018": 3, "52": [3, 5, 11], "30564": 3, "30563": 3, "119": [3, 12, 21, 22], "subject301_session2_recording_2018": 3, "57": [3, 5, 11, 21, 22], "subject0303": 3, "subject303_session1_recording_2018": 3, "subject303_session2_recording_2018": 3, "198": [3, 5], "subject0304": 3, "subject304_session1_recording_2018": 3, "subject304_session2_recording_2018": 3, "06": [3, 5], "195": 3, "24": [3, 4, 5, 7, 19, 44, 46], "subject0305": 3, "subject305_session1_recording_2018": 3, "subject305_session2_recording_2018": 3, "166": 3, "subject0306": 3, "subject306_session1_recording_2018": 3, "subject306_session2_recording_2018": 3, "147": 3, "subject0307": 3, "subject307_session1_recording_2018": 3, "subject307_session2_recording_2018": 3, "09": [3, 5, 11, 12, 21, 22, 36], "171": [3, 5], "subject0308": 3, "subject308_session1_recording_2018": 3, "subject308_session2_recording_2018": 3, "180": 3, "71": 3, "subject0309": 3, "subject309_session1_recording_2018": 3, "subject309_session2_recording_2018": 3, "196": 3, "74": 3, "769": 3, "8633016529356232e": 3, "194492698041424e": 3, "965717366845577e": 3, "439937924298538e": 3, "5395426956926177e": 3, "nan": [3, 5], "437520530470476e": 3, "156413801290107e": 3, "4542289627374594e": 3, "404002269024156e": 3, "5250575579728836e": 3, "13341442171729e": 3, "7562617842729167e": 3, "498985471214225e": 3, "358448888412755e": 3, "066739867471836e": 3, "833832231476192e": 3, "1353507280272121e": 3, "0578545669354385e": 3, "13612907825663e": 3, "231124565129559e": 3, "417204016507064e": 3, "5824220701839133e": 3, "972694444208325e": 3, "7053453190038292e": 3, "0032262566342049e": 3, "grandavg_diff": [3, 5], "nanmean": [3, 4, 5], "grandavg_ipsi": [3, 5], "grandavg_contra": [3, 5], "grandavg_spec_ipsi": [3, 5], "grandavg_spec_contra": [3, 5], "grandavg_spec_diff": [3, 5], "num_good": [3, 5], "sum": [3, 4, 5], "isnan": [3, 5], "grandavg_spec_ipsi_st": [3, 5], "nanstd": [3, 4, 5], "sqrt": [3, 4, 5], "grandavg_spec_contra_st": [3, 5], "grandavg_spec_diff_st": [3, 5], "error": [3, 5, 21, 26, 37, 40, 44], "bar": [3, 4, 5], "errorbar": [3, 5], "yerr": [3, 4, 5], "legend": [3, 5, 21], "hline": [3, 5], "diff": [3, 5], "grand": [3, 5], "plot_max_diff": [3, 5], "0x7fddc2640d30": 3, "scipi": [3, 4, 5], "tstat": [3, 5], "pval": [3, 5], "stat": [3, 4, 5], "ttest_ind": [3, 5], "nan_polici": [3, 5], "omit": [3, 5], "round": [3, 5], "p": [3, 5], "02456136295644e": 3, "617586361170848e": 3, "164214772412729e": 3, "394": 3, "169": 3, "raw_data": [3, 5], "df": [3, 4, 5], "datafram": [3, 4, 5, 12, 17, 22], "column": [3, 5, 12, 17, 22], "to_csv": [3, 4, 5], "375cueingeeg": [3, 5], "present": [3, 5, 11, 12, 16, 21, 29, 31, 34, 37, 39, 41, 44], "directori": [3, 5, 11, 16, 21], "375cueingipsispec": [3, 5], "375cueingcontraspec": [3, 5], "375cue": 3, "spec": 3, "319": [3, 4, 7, 46], "02r__cueing_group_analysi": [3, 7, 46], "io": [4, 26, 37, 44], "sio": 4, "fall": [4, 5], "2018": [4, 5, 25], "one": [4, 11, 16, 21, 35, 37, 39, 40, 41], "1101": [4, 5], "1102": [4, 5], "1103": [4, 5], "1104": [4, 5], "1105": [4, 5], "1106": [4, 5], "1108": [4, 5], "1109": [4, 5], "1110": [4, 5], "1202": [4, 5], "1203": [4, 5], "1205": [4, 5], "1206": [4, 5], "1209": [4, 5], "1210": [4, 5], "1211": [4, 5], "1215": [4, 5], "1301": [4, 5], "1302": [4, 5], "1313": [4, 5], "1401": [4, 5], "1402": [4, 5], "1403": [4, 5], "1404": [4, 5], "1405": [4, 5], "1408": [4, 5], "1410": [4, 5], "1411": [4, 5], "1412": [4, 5], "1413": [4, 5], "1414": [4, 5], "1415": [4, 5], "1416": [4, 5], "1107": 4, "1201": 4, "1304": 4, "muse": [4, 11, 16, 21, 22, 25, 30, 31, 32, 34, 36, 39, 41], "1308": 4, "1311": 4, "1314": 4, "1407": 4, "onli": [4, 22, 26, 38], "session1": 4, "basic": 4, "number": [4, 10, 11, 12, 17, 28, 37, 38, 41, 43, 44, 45], "n_sub": 4, "n_sesh": 4, "valid": [4, 12, 17, 22, 28, 34, 39], "invalid": 4, "n_cond": 4, "too": [4, 41], "slow": 4, "fast": 4, "rt_toofast": 4, "250": 4, "rt_tooslow": 4, "1500": 4, "save": [4, 10, 15, 20, 36, 39, 44], "count_rt": 4, "median_rt": 4, "prop_accu": 4, "just": [4, 30, 38, 44, 45], "sesh": 4, "f": [4, 21], "x": [4, 5, 12, 17, 22, 34, 41], "listdir": 4, "endswith": 4, "output_dict": 4, "loadmat": 4, "pull": [4, 41], "info": [4, 41, 44, 45], "median": 4, "validrt": 4, "nanmedian": 4, "m": [4, 12, 17, 22, 26, 37], "invalidrt": 4, "subject101_session2_behoutput_2018": 4, "435": 4, "031485": 4, "423": 4, "867152": 4, "335": 4, "010084": 4, "945121": 4, "297": 4, "102568": 4, "402": 4, "462939": 4, "984897": 4, "352": 4, "377032": 4, "315": 4, "902263": 4, "346": 4, "695745": 4, "409": 4, "051914": 4, "337": 4, "994502": 4, "412": 4, "490445": 4, "253": 4, "304399": 4, "325": 4, "551731": 4, "451": 4, "072688": 4, "362": 4, "531119": 4, "449": 4, "032591": 4, "312": 4, "999804": 4, "293": 4, "670213": 4, "327": 4, "738388": 4, "526": 4, "629063": 4, "459": 4, "253055": 4, "355": 4, "719449": 4, "932556": 4, "345": 4, "884407": 4, "454": 4, "775045": 4, "350": 4, "070761": 4, "313": 4, "989374": 4, "374": 4, "227764": 4, "369": [4, 11, 14, 46], "010847": 4, "467709": 4, "334": 4, "978303": 4, "730519": 4, "363": 4, "791318": 4, "067033": 4, "155788": 4, "419": 4, "084514": 4, "271": 4, "027066": 4, "420538": 4, "342": 4, "584127": 4, "859108": 4, "314": 4, "203547": 4, "339": 4, "341316": 4, "467": 4, "860489": 4, "351041": 4, "173015": 4, "340": 4, "925891": 4, "318": 4, "901094": 4, "351": 4, "947652": 4, "437": 4, "583955": 4, "290": 4, "667171": 4, "596349": 4, "331": 4, "919014": 4, "390": 4, "521833": 4, "642289": 4, "489389": 4, "348": 4, "551979": 4, "273": 4, "162904": 4, "347": 4, "040148": 4, "326291": 4, "386": 4, "592227": 4, "317": 4, "4145": 4, "393": 4, "78332": 4, "426": 4, "145324": 4, "753921": 4, "321": [4, 17, 19, 46], "549098": 4, "940851": 4, "379": 4, "253401": 4, "338": 4, "095756": 4, "040606": 4, "403": 4, "22982": 4, "368": 4, "109211": 4, "04014799990546": 4, "75905900013095": 4, "loop": 4, "through": [4, 22, 25, 26, 31, 32, 35, 37, 39, 44], "isub": 4, "enumer": 4, "get": [4, 26, 35, 38, 39, 40], "name": [4, 10, 15, 20, 37, 39, 40, 41, 44], "stuff": 4, "proport": 4, "accur": 4, "count": [4, 22], "summari": [4, 41], "collaps": 4, "out_median_rt": 4, "squeez": 4, "axi": 4, "out_prop_accu": 4, "361": 4, "7079635": 4, "364": [4, 11], "45366275": 4, "547": 4, "10713075": 4, "611": 4, "45526175": 4, "600": 4, "073256": 4, "587": 4, "019756": 4, "535": 4, "04144725": 4, "533": 4, "250092": 4, "24263125": 4, "439": 4, "1580675": 4, "456": [4, 5], "3330375": 4, "631": 4, "1320265": 4, "446": 4, "95026625": 4, "556": 4, "038382": 4, "484": 4, "0763345": 4, "478": 4, "4887": 4, "443": 4, "5412585": 4, "472": 4, "331497": 4, "436": 4, "66653125": 4, "501": 4, "3115865": 4, "495": 4, "63977675": 4, "358365": 4, "520": 4, "97468575": 4, "973": 4, "144689": 4, "395": 4, "5098235": 4, "370": 4, "232933": 4, "489": 4, "59985525": 4, "560": 4, "08098575": 4, "455": 4, "6651645": 4, "500": 4, "22833625": 4, "662": 4, "9326765": 4, "643": 4, "0934405": 4, "488": 4, "56224425": 4, "474": 4, "7125945": 4, "482": 4, "1316255": 4, "543": 4, "0173155": 4, "713": 4, "4103845": 4, "894": 4, "0804795": 4, "485": 4, "48053725": 4, "6649235": 4, "464": 4, "47033775": 4, "517": 4, "368321": 4, "45884825": 4, "39610975": 4, "527": [4, 11], "5814465": 4, "4898155": 4, "477": 4, "23867575": 4, "72875575": 4, "94210575": 4, "578": 4, "6717025": 4, "681": 4, "7458885": 4, "804": 4, "780758": 4, "736862": 4, "569911": 4, "42846425": 4, "12191": 4, "98305085": 4, "96428571": 4, "91680961": 4, "88974359": 4, "97767857": 4, "9822995": 4, "95403439": 4, "92346939": 4, "97727273": 4, "99019608": 4, "97058824": 4, "97395994": 4, "68275862": 4, "83333333": 4, "89914021": 4, "89732143": 4, "92080745": 4, "73333333": 4, "94716042": 4, "91666667": 4, "95755518": 4, "95959184": 4, "94949495": 4, "88296296": 4, "94117647": 4, "90046296": 4, "92857143": 4, "96551724": 4, "875": 4, "91171329": 4, "98333333": 4, "94733656": 4, "87820513": 4, "94187987": 4, "95833333": 4, "96479592": 4, "94736842": 4, "90833333": 4, "94186047": 4, "97222222": 4, "95718391": 4, "barplot": [4, 12, 17, 22], "result": [4, 17, 22, 25, 44], "figur": [4, 11, 12, 16, 17, 21, 22, 26], "set_xtick": 4, "set_xticklabel": 4, "212": 4, "450": 4, "reaction": 4, "show": [4, 11, 16, 21, 30, 34, 39, 44], "spreadsheet": 4, "column_dict": 4, "particip": [4, 12, 17, 21, 44], "accvalid": 4, "accinvalid": 4, "rtvalid": 4, "rtinvalid": 4, "375cueingbehpi": 4, "index": [4, 26, 44], "983051": 4, "964286": 4, "707963": 4, "453663": 4, "000000": [4, 5], "107131": 4, "455262": 4, "916810": 4, "889744": 4, "977679": 4, "041447": 4, "982299": 4, "242631": 4, "158067": 4, "954034": 4, "600000": 4, "333038": 4, "132027": 4, "923469": 4, "977273": 4, "950266": 4, "990196": 4, "970588": 4, "076334": 4, "488700": 4, "973960": 4, "541259": 4, "682759": 4, "833333": 4, "666531": 4, "311586": 4, "899140": 4, "897321": 4, "639777": 4, "920807": 4, "733333": 4, "974686": 4, "947160": 4, "916667": 4, "509823": 4, "957555": 4, "599855": 4, "080986": 4, "959592": 4, "949495": 4, "665165": 4, "228336": 4, "882963": 4, "941176": 4, "932677": 4, "093440": 4, "900463": 4, "928571": 4, "562244": 4, "712594": 4, "965517": 4, "875000": 4, "131626": 4, "017316": 4, "911713": 4, "410385": 4, "080479": 4, "983333": 4, "900000": 4, "480537": 4, "664923": 4, "947337": 4, "878205": 4, "470338": 4, "941880": 4, "958333": 4, "458848": 4, "396110": 4, "964796": 4, "947368": 4, "581447": 4, "489816": 4, "908333": 4, "238676": 4, "728756": 4, "941860": 4, "800000": 4, "942106": 4, "671703": 4, "745889": 4, "972222": 4, "957184": 4, "428464": 4, "121910": 4, "155": [4, 7, 46], "03r__cueing_behaviour_analysis_winter2019": [4, 7, 46], "noteook": 5, "placehold": 5, "spectrogram": 5, "subject1101": 5, "subject1101_session1_recording_2019": 5, "subject1101_session2_recording_2019": 5, "subject1102": 5, "subject1102_session1_recording_2019": 5, "subject1102_session2_recording_2019": 5, "136": 5, "subject1103": 5, "subject1103_session1_recording_2019": 5, "45": [5, 11, 12, 34], "subject1103_session2_recording_2019": 5, "161": 5, "64": 5, "subject1104": 5, "subject1104_session1_recording_2019": 5, "subject1104_session2_recording_2019": 5, "187": 5, "subject1105": 5, "subject1105_session1_recording_2019": 5, "subject1105_session2_recording_2019": 5, "192": 5, "73": 5, "subject1106": 5, "subject1106_session1_recording_2019": 5, "subject1106_session2_recording_2019": 5, "134": 5, "subject1108": 5, "subject1108_session1_recording_2019": 5, "subject1108_session2_recording_2019": 5, "146": 5, "subject1109": 5, "subject1109_session1_recording_2019": 5, "subject1109_session2_recording_2019": 5, "176": 5, "subject1110": 5, "subject1110_session1_recording_2019": 5, "subject1110_session2_recording_2019": 5, "168": 5, "subject1202": 5, "subject1202_session1_recording_2019": 5, "subject1202_session2_recording_2019": 5, "173": 5, "subject1203": 5, "subject1203_session1_recording_2019": 5, "subject1203_session2_recording_2019": 5, "subject1205": 5, "subject1205_session1_recording_2019": 5, "subject1205_session2_recording_2019": 5, "226": 5, "83": 5, "subject1206": 5, "subject1206_session1_recording_2019": 5, "subject1206_session2_recording_2019": 5, "214": 5, "66": 5, "subject1209": 5, "subject1209_session1_recording_2019": 5, "subject1209_session2_recording_2019": 5, "186": 5, "subject1210": 5, "subject1210_session1_recording_2019": 5, "subject1210_session2_recording_2019": 5, "subject1211": 5, "subject1211_session1_recording_2019": 5, "subject1211_session2_recording_2019": 5, "139": 5, "subject1215": 5, "subject1215_session1_recording_2019": 5, "subject1215_session2_recording_2019": 5, "193": 5, "subject1301": 5, "subject1301_session1_recording_2019": 5, "subject1301_session2_recording_2019": 5, "197": [5, 21, 22], "subject1302": 5, "subject1302_session1_recording_2019": 5, "subject1302_session2_recording_2019": 5, "subject1313": 5, "subject1313_session1_recording_2019": 5, "subject1313_session2_recording_2019": 5, "subject1401": 5, "subject1401_session1_recording_2019": 5, "subject1401_session2_recording_2019": 5, "125": 5, "subject1402": 5, "subject1402_session1_recording_2019": 5, "subject1402_session2_recording_2019": 5, "158": 5, "65": [5, 34], "subject1403": 5, "subject1403_session1_recording_2019": 5, "subject1403_session2_recording_2019": 5, "172": 5, "subject1404": 5, "subject1404_session1_recording_2019": 5, "subject1404_session2_recording_2019": 5, "subject1405": 5, "subject1405_session1_recording_2019": 5, "subject1405_session2_recording_2019": 5, "162": 5, "subject1408": 5, "subject1408_session1_recording_2019": 5, "subject1408_session2_recording_2019": 5, "subject1410": 5, "subject1410_session1_recording_2019": 5, "subject1410_session2_recording_2019": 5, "subject1411": 5, "subject1411_session1_recording_2019": 5, "subject1411_session2_recording_2019": 5, "subject1412": 5, "subject1412_session1_recording_2019": 5, "subject1412_session2_recording_2019": 5, "subject1413": 5, "subject1413_session1_recording_2019": 5, "subject1413_session2_recording_2019": 5, "154": 5, "subject1414": 5, "subject1414_session1_recording_2019": 5, "subject1414_session2_recording_2019": 5, "185": 5, "81": 5, "subject1415": 5, "subject1415_session1_recording_2019": 5, "subject1415_session2_recording_2019": 5, "subject1416": 5, "subject1416_session1_recording_2019": 5, "subject1416_session2_recording_2019": 5, "linecollect": 5, "0x7fddc28c64f0": 5, "0x7fddc2be6e80": 5, "e": [5, 11, 12, 16, 17, 30, 31, 36, 37, 39, 41], "0x7fddc2d46910": 5, "0x7fddc2cf2e50": 5, "5735930197157026e": 5, "6421745679207323e": 5, "314184517949713e": 5, "789004769369718e": 5, "402571924638044e": 5, "201698855806472e": 5, "6436811298970808e": 5, "1465401250727375e": 5, "107123680349165e": 5, "1169419915324295e": 5, "49372710835747e": 5, "3529314385416065e": 5, "8760356184521736e": 5, "773350340044497e": 5, "113134995947907e": 5, "5571207459949488e": 5, "6291150675500723e": 5, "052232335321791e": 5, "6644221372822607e": 5, "438951097875039e": 5, "091042185483073e": 5, "298338355801429e": 5, "3604198590381474e": 5, "861633427481533e": 5, "204912953285817e": 5, "8344086913139518e": 5, "380242500968444e": 5, "805033297719282e": 5, "508128947253835e": 5, "907964859843707e": 5, "146716305783299e": 5, "5307351374032941e": 5, "3393198398387354e": 5, "901325e": 5, "690330e": 5, "568905e": 5, "833667e": 5, "037251e": 5, "979737e": 5, "567893e": 5, "242117e": 5, "128359e": 5, "818109e": 5, "090019e": 5, "279306e": 5, "325457e": 5, "013762e": 5, "086011e": 5, "407716e": 5, "168927e": 5, "815995e": 5, "073335e": 5, "857313e": 5, "134699e": 5, "757364e": 5, "088382e": 5, "201517e": 5, "006811e": 5, "503096e": 5, "543183e": 5, "085932e": 5, "615777e": 5, "436455e": 5, "354581e": 5, "901593e": 5, "637907e": 5, "495330e": 5, "265134e": 5, "017556e": 5, "448927e": 5, "578761e": 5, "682534e": 5, "322114e": 5, "775386e": 5, "063702e": 5, "500639e": 5, "042736e": 5, "907671e": 5, "643642e": 5, "746872e": 5, "666293e": 5, "699470e": 5, "944371e": 5, "345766e": 5, "096579e": 5, "654300e": 5, "463347e": 5, "302737e": 5, "439791e": 5, "426000e": 5, "088135e": 5, "177686e": 5, "517005e": 5, "242424": 5, "757576": 5, "991890e": 5, "036284e": 5, "094144e": 5, "057510e": 5, "482874e": 5, "635330e": 5, "867214e": 5, "719235e": 5, "394826e": 5, "324403e": 5, "411441e": 5, "411093e": 5, "355015e": 5, "519385e": 5, "270396e": 5, "232011e": 5, "146935e": 5, "747567e": 5, "757861e": 5, "344323e": 5, "591767e": 5, "802644e": 5, "011956e": 5, "111111e": 5, "051924e": 5, "359738e": 5, "878931e": 5, "096915e": 5, "289620e": 5, "966269e": 5, "976043e": 5, "762940e": 5, "639212e": 5, "390503e": 5, "273891e": 5, "085691e": 5, "734091e": 5, "751927e": 5, "127110e": 5, "042786e": 5, "155194e": 5, "554974e": 5, "215457e": 5, "061286e": 5, "132429e": 5, "957683e": 5, "976622e": 5, "777715e": 5, "521996e": 5, "196098e": 5, "447362e": 5, "356378e": 5, "992109e": 5, "659712e": 5, "917412e": 5, "278172e": 5, "632265e": 5, "465498e": 5, "835722e": 5, "643791e": 5, "051757e": 5, "010762e": 5, "687247e": 5, "318740e": 5, "037587e": 5, "195487e": 5, "292653e": 5, "176655e": 5, "576869e": 5, "575594e": 5, "139473e": 5, "728775e": 5, "017372e": 5, "806035e": 5, "168381e": 5, "039849e": 5, "892637e": 5, "928711e": 5, "549501e": 5, "594952e": 5, "863042e": 5, "441354e": 5, "789338e": 5, "213076e": 5, "099290e": 5, "131996e": 5, "197682e": 5, "769963e": 5, "486010e": 5, "910190e": 5, "456576e": 5, "193358e": 5, "444701e": 5, "199233e": 5, "492537e": 5, "519870e": 5, "015551e": 5, "978091e": 5, "160645e": 5, "092205e": 5, "236814e": 5, "335374e": 5, "298211e": 5, "066240e": 5, "196307e": 5, "035983e": 5, "293275e": 5, "487449e": 5, "570382e": 5, "986854e": 5, "506011e": 5, "287051e": 5, "194128e": 5, "055475e": 5, "599190e": 5, "352438e": 5, "126135e": 5, "308744e": 5, "366325e": 5, "285204e": 5, "row": [5, 44], "491": [5, 7, 46], "04r__cueing_group_analysis_winter2019": [5, 7, 46], "visual_cueing_python": [6, 28], "visual_cueing_jupyt": [6, 28], "900": [7, 34], "visual_cu": [7, 46], "visual_gonogo_python": [8, 29], "visual_gonogo": 9, "demonstr": [10, 11, 15, 16, 20, 21, 28], "initi": [10, 25, 37, 39, 41, 44], "stream": [10, 11, 15, 16, 20, 25, 37, 39, 41, 44, 45], "how": [10, 15, 20, 22, 30, 32, 39, 42, 44, 45], "generate_save_fn": [10, 15, 20, 37, 38, 39], "devic": [10, 25, 32, 35, 37, 38, 39, 41], "visualn170": 10, "defin": [10, 12, 15, 17, 20, 22, 26, 37, 38, 39, 40], "variabl": [10, 15, 20, 37, 39], "board_nam": [10, 15, 20, 37, 39], "muse2": [10, 15, 20, 39, 40, 44], "board": [10, 37, 38, 40, 44], "experiment_nam": [10, 35, 44], "visual_n170": [10, 14, 37, 38, 39, 46], "subject_id": [10, 15, 20, 38], "test": [10, 26, 34, 37, 39, 44], "session_nb": [10, 15, 20, 38], "record_dur": [10, 15, 20, 37, 39], "record": [10, 11, 16, 21, 35, 38, 40, 41], "durat": [10, 12, 15, 17, 20, 21, 37, 44], "save_fn": [10, 15, 20, 37, 38, 39], "eeg_devic": [10, 15, 20, 37, 39], "type": [10, 11, 12, 16, 17, 31, 44], "use_vr": [10, 39], "locat": [10, 26, 28, 41, 42, 45], "00x__n170_run_experi": [10, 14, 46], "respons": [11, 16, 21, 28, 30, 34, 41, 44], "imag": [11, 16, 40, 44], "face": [11, 12, 16, 17, 30, 34, 44], "hous": [11, 12, 16, 17, 30, 34, 44], "shown": [11, 38, 40, 44, 45], "rapid": [11, 16], "serial": [11, 16, 39, 40], "rsvp": [11, 16], "interaxon": [11, 16, 21, 37, 41], "headset": [11, 16, 21, 38, 39, 44], "2016": [11, 16, 21, 32, 37, 45], "model": [11, 16, 21, 44], "consist": [11, 16, 21, 34, 44], "six": [11, 16, 21], "block": [11, 16, 21, 39, 44], "continu": [11, 16, 21, 38, 44], "obtain": [11, 16, 21], "list": [11, 16, 21, 36, 37, 40, 44], "filenam": [11, 16, 21, 35, 37, 38, 39], "alreadi": [11, 16, 21, 37], "specifi": [11, 16, 21, 38, 39, 44], "thei": [11, 16, 21, 34, 35, 38, 39, 44, 45], "quickli": [11, 16, 21, 28, 44], "cloud": [11, 16, 21, 25, 37], "place": [11, 16, 21, 25, 32, 39, 41, 44, 45], "final": [11, 16, 21, 39, 44], "waveform": [11, 16], "note": [11, 12, 16, 17, 21, 22, 25, 37, 41, 44], "you": [11, 16, 21, 26, 35, 36, 37, 38, 39, 40, 41, 44], "local": [11, 16, 21, 26, 35, 36, 38, 44], "follow": [11, 16, 21, 22, 26, 28, 35, 37, 38, 39, 42, 43, 44], "cell": [11, 16, 21, 22, 37], "do": [11, 16, 21, 22, 35, 37, 38], "have": [11, 16, 21, 26, 28, 34, 35, 37, 38, 40, 41, 44], "n170_data_path": [11, 12], "eegnb_exampl": [11, 12, 16, 17, 21, 22], "muse2016_bfn": 11, "1ostfxzeqf36r5d": 11, "2auyw4dlnpj9e_fah": 11, "963ef67c": 11, "fe37": 11, "4f4e": 11, "a1c3": 11, "82d6d5ae3840": 11, "72m": [11, 21], "91m": 11, "9mb": 11, "78": 11, "3mb": 11, "subject0001": [11, 12, 16, 17, 21, 22, 36, 44], "recording_2022": 11, "fp1": 11, "fp2": 11, "76768": 11, "76767": 11, "299": 11, "871": 11, "76936": 11, "76935": 11, "300": [11, 44], "76780": 11, "76779": 11, "918": 11, "76744": 11, "76743": 11, "777": 11, "76912": 11, "76911": 11, "76816": 11, "76815": 11, "059": 11, "93280": 11, "93279": 11, "371": 11, "plot_psd": [11, 16, 21], "legaci": [11, 12, 16, 17, 21, 22], "new": [11, 12, 16, 17, 21, 22, 25, 26, 35, 37, 38, 39, 41, 44], "contain": [11, 12, 16, 17, 25, 26, 37, 40, 44], "timestamp": [11, 12, 16, 17], "stimulu": [11, 12, 16, 17, 21, 22, 31, 32, 34, 37, 39, 41], "around": [11, 12, 22, 25, 30, 31, 34], "5e": 11, "3740": 11, "919786096256688": 11, "3369": 11, "1702hous": 11, "1667": 11, "602": 11, "off": [11, 12, 17, 39, 40], "channel_ord": [11, 16], "reorder": 11, "ch_name": 11, "accord": [11, 16], "manual": [11, 16, 37], "adjust": [11, 16, 39, 41, 44], "set_ylim": [11, 16], "5e6": [11, 16], "tight_layout": [11, 16, 21], "01r__n170_viz": [11, 14, 46], "machin": [12, 17, 22, 38, 41, 44], "learn": [12, 17, 22, 41, 44], "algorithm": [12, 17, 30], "compar": [12, 17, 21, 34], "term": [12, 17], "exactli": [12, 17], "same": [12, 17, 32, 34, 37, 39, 40, 44], "load_and_visu": [12, 17, 22], "seaborn": [12, 17, 22], "sn": [12, 17, 22], "vector": [12, 17, 22], "scikit": [12, 17, 22], "pyriemann": [12, 17, 22], "ml": [12, 17, 22], "sklearn": [12, 17, 22], "pipelin": [12, 17, 22], "make_pipelin": [12, 17, 22], "linear_model": [12, 17, 22], "logisticregress": [12, 17, 22], "preprocess": [12, 17, 22], "standardscal": [12, 17, 22], "discriminant_analysi": [12, 17, 22], "lineardiscriminantanalysi": [12, 17, 22], "lda": [12, 17, 22], "model_select": [12, 17, 22], "cross_val_scor": [12, 17, 22], "stratifiedshufflesplit": [12, 17, 22], "estim": [12, 17, 22], "erpcovari": [12, 17, 22], "xdawncovari": [12, 17, 22], "tangentspac": [12, 17, 22], "mdm": [12, 17, 22], "further": [12, 17, 22, 44], "descript": [12, 17, 22, 35, 39], "data_2017": [12, 16, 17, 21, 22], "30732": [12, 16, 17, 21, 22], "30731": [12, 16, 17, 21, 22], "043": [12, 16, 17, 21, 22], "30720": [12, 21, 22], "30719": [12, 21, 22], "996": [12, 21, 22], "measur": [12, 17, 21, 29, 34, 37, 44], "date": [12, 17, 21], "unknown": [12, 17, 21], "experiment": [12, 17, 21, 31, 41], "digit": [12, 17, 21, 38], "point": [12, 17, 21, 26, 44], "good": [12, 17, 21, 34, 39, 44], "misc": [12, 17], "bad": [12, 17, 21], "eog": [12, 17, 21], "Not": [12, 17, 21, 37], "ecg": [12, 17, 21], "256": [12, 17, 21], "highpass": [12, 17, 21], "lowpass": [12, 17, 21], "hh": [12, 17, 21], "mm": [12, 17, 21], "ss": [12, 17, 21], "75e": 12, "1174": 12, "003407155025551": 12, "1127": 12, "562hous": 12, "565": 12, "801": [12, 17], "clf": [12, 17, 22], "vect": [12, 17], "lr": [12, 17], "reglda": [12, 17, 22], "shrinkag": [12, 17, 22], "solver": [12, 17, 22], "eigen": [12, 17, 22], "erpcov": [12, 17], "oa": [12, 17], "xdawncov": [12, 17], "format": [12, 17, 26, 35, 38], "pick_typ": [12, 17, 22], "get_data": [12, 17, 21, 22], "1e6": [12, 17, 22], "y": [12, 17, 22], "cross": [12, 17, 22], "cv": [12, 17, 22], "n_split": [12, 17, 22], "test_siz": [12, 17, 22], "random_st": [12, 17, 22], "auc": [12, 17, 22], "try": [12, 22, 39, 44], "re": [12, 17, 22, 26, 44], "score": [12, 17, 22], "roc_auc": [12, 17, 22], "n_job": [12, 17, 22], "extend": [12, 17, 22, 41, 44], "except": [12, 22], "inst": [12, 17, 22], "figsiz": [12, 17, 21, 22], "xlim": [12, 17, 22], "despin": [12, 17, 22], "511": [12, 14, 46], "02r__n170_decod": [12, 14, 46], "visual_n170_python": [13, 30], "visual_n170_jupyt": [13, 30], "880": 14, "visualp300": 15, "visual_p300": [15, 19, 46], "start": [15, 20, 37, 40, 44], "00x__p300_run_experi": [15, 19, 46], "oddbal": [16, 31, 35, 39, 41], "paradigm": [16, 31, 41], "cat": [16, 17], "dog": [16, 17], "shwn": 16, "categor": 16, "respect": [16, 39, 40], "target": [16, 17, 28, 34], "high": [16, 32, 34, 41, 43], "low": [16, 22, 34, 41], "probabl": [16, 22, 34, 44], "occur": [16, 30, 31, 34], "p300_data_path": [16, 17], "1olcj": 16, "zsjqdnrsbsuasgbxowwdngwtvfc": 16, "4m": 16, "4mb": 16, "93": 16, "15_45_13": [16, 17], "16_03_08": [16, 17], "15_47_49": [16, 17], "15_58_30": [16, 17], "15_55_07": [16, 17], "15_51_07": [16, 17], "100e": [16, 17, 22], "1161": [16, 17], "5503875968992276": [16, 17], "2e6": 16, "982": [16, 19, 46], "01r__p300_viz": [16, 19, 46], "classif": [17, 22, 30], "xdawn": 17, "1143": 17, "959target": 17, "184": 17, "class": [17, 25, 39, 40], "85": 17, "02r__p300_decod": [17, 19, 46], "visual_p300_python": [18, 31], "visual_p300_jupyt": [18, 31], "visualssvep": 20, "visual_ssvep": [20, 24, 46], "00x__ssvep_run_experi": [20, 24, 46], "steadi": [21, 32, 34], "state": [21, 32, 34], "evok": [21, 32, 34], "potenti": [21, 29, 30, 31, 32, 34], "ssvep_data_path": [21, 22], "replace_ch_nam": [21, 22], "poz": [21, 22, 32, 34], "set_channel_typ": 21, "1zj9wx": 21, "yemjo7guguuu7sshcybfsr": 21, "fze": 21, "14m": 21, "128": 21, "100m": [21, 22, 34], "800m": [21, 22], "here": [21, 22, 26, 28, 37, 39, 41, 44], "becaus": [21, 22, 42], "so": [21, 22, 39, 44], "larg": [21, 22, 26, 30, 41], "difficult": [21, 22, 34], "separ": [21, 22, 25, 44], "538071065989844": [21, 22], "dure": [21, 29, 34, 44], "20hz": [21, 34], "30hz": [21, 34], "welch_param": 21, "dict": 21, "welch": 21, "n_fft": 21, "1028": 21, "n_per_seg": 21, "psd1": 21, "freq1": 21, "return_freq": 21, "psd2": 21, "freq2": 21, "log10": 21, "psd1_mean": 21, "psd1_std": 21, "psd2_mean": 21, "psd2_std": 21, "label": 21, "r": 21, "set_titl": 21, "set_ylabel": 21, "spectral": 21, "densiti": [21, 41], "set_xlim": 21, "set_xlabel": 21, "With": [21, 34, 45], "clearli": [21, 32], "distinct": 21, "correspond": [21, 39], "much": [21, 26, 32, 39, 44], "larger": [21, 28, 41], "electrod": [21, 22, 30, 32, 34, 39, 44], "still": [21, 26, 40, 41, 44], "visibl": 21, "016": 21, "logspac": 21, "layout": 21, "engin": 21, "tight": 21, "fix": 21, "set_layout_engin": 21, "onc": [21, 38, 39, 40, 44], "again": [21, 44], "710": [21, 24, 46], "01r__ssvep_viz": [21, 24, 46], "part": [22, 30, 38, 39, 44], "web": [22, 26], "binder": [22, 37], "collab": 22, "addit": [22, 26, 32, 37, 39, 40, 44], "remov": [22, 25, 44], "kept": 22, "covari": 22, "spatialfilt": 22, "csp": 22, "bank": [22, 34], "approach": [22, 25, 34, 44], "headband": [22, 30, 31, 32, 37, 41, 44], "alon": 22, "without": [22, 37, 39, 44], "extern": 22, "could": 22, "classifi": [22, 34], "concaten": 22, "extract": [22, 34], "onset": [22, 28, 34], "avoid": 22, "common": [22, 31, 40, 41], "muse_raw": 22, "drop_channel": 22, "raw_filt_30hz": 22, "copi": 22, "raw_filt_20hz": 22, "rename_channel": 22, "lambda": 22, "_30hz": 22, "_20hz": 22, "raw_al": 22, "add_channel": 22, "force_update_info": 22, "epochs_al": 22, "stim_30hz": 22, "stim_20hz": 22, "base": [22, 25, 26, 32, 38, 39, 41, 42], "spatial": [22, 28], "pattern": [22, 30, 34], "regular": 22, "linear": 22, "discriminat": 22, "cov": 22, "tangent": 22, "space": 22, "map": [22, 44], "reliabl": [22, 28], "riemannian": 22, "geometri": 22, "simpl": [22, 39, 44, 45], "spacem": 22, "procedur": [22, 44], "beforehand": 22, "evalu": [22, 30, 32, 44], "done": [22, 37], "area": [22, 30, 32], "under": [22, 39, 41, 42], "curv": 22, "metric": 22, "best": [22, 41, 44], "binari": 22, "unbalanc": 22, "problem": [22, 41], "mai": [22, 26, 31, 34, 37, 44], "take": [22, 26, 34, 38, 39, 41], "while": [22, 34, 44], "complet": [22, 26, 44], "log": 22, "827": [22, 24, 46], "02r__ssvep_decod": [22, 24, 46], "visual_ssvep_python": [23, 32], "visual_ssvep_jupyt": [23, 32], "538": 24, "page": [25, 26, 37, 39, 40, 41], "updat": [25, 26, 35, 39, 44, 45], "version": [25, 26, 37, 41], "seri": [25, 39, 41, 44], "includ": [25, 37, 41, 44, 45], "sever": [25, 30, 44], "major": [25, 37, 41, 44], "api": [25, 35, 39, 41], "compat": [25, 37, 39], "These": [25, 34, 37, 39, 40, 44, 45], "were": [25, 34, 39, 43], "introduc": 25, "openbci": [25, 37, 39, 41], "ntx": 25, "challeng": [25, 44], "2020": 25, "support": [25, 32, 39, 41, 44], "brainflow": [25, 40, 41, 44], "abstract": [25, 40], "cover": 25, "lsl": [25, 37, 41, 45], "subprocess": 25, "call": [25, 39, 40, 44], "concurr": [25, 41], "psychopi": [25, 41], "muselsl": [25, 40, 44], "put": [25, 38, 39, 42, 44], "insid": [25, 39], "requir": [25, 26, 41, 44], "user": [25, 26, 32, 35, 37, 38, 39, 41], "document": [25, 38, 39, 44], "built": [25, 26, 39], "host": [25, 26], "gh": [25, 26], "cleanup": 25, "instal": [25, 26, 39, 40, 41], "setup": [25, 37, 39, 41, 42, 44], "instruct": [25, 32, 37, 39, 40, 41, 43], "repo": [25, 26, 37, 41], "storag": [25, 44], "implement": [25, 41], "kyle": [25, 41], "mathewson": [25, 41], "ad": [25, 34, 41, 45], "wa": [25, 26, 34, 35, 38, 39, 41, 43, 44], "port": [25, 26, 32, 39, 41, 45], "develop": [25, 40, 41, 44], "orient": [25, 28], "princip": [25, 37, 44], "neurobrit": 25, "brainmod": 25, "program": [25, 37, 39, 44], "neurotechx": [26, 37, 39, 41, 44], "github": [26, 37, 41, 44], "combin": [26, 34, 40, 41, 42], "plain": 26, "text": [26, 44], "rst": [26, 42], "md": [26, 42], "branch": 26, "usual": 26, "fashion": 26, "There": [26, 35, 37, 39, 43, 44], "main": 26, "variou": [26, 35, 37, 39, 40, 41, 42, 44], "folder": [26, 35, 38, 39, 42, 44], "webpag": 26, "content": [26, 44], "wide": 26, "excel": [26, 41, 43, 44], "nilearn": 26, "suit": [26, 34], "mixtur": 26, "convert": [26, 38], "line": [26, 40, 41, 44], "In": [26, 30, 31, 32, 37, 40, 41, 42, 43, 44, 45], "link": [26, 39, 42, 44], "bottom": [26, 44], "command": [26, 37, 41, 44], "actual": [26, 44], "lead": [26, 41], "incomplet": 26, "side": [26, 28], "conceiv": 26, "idea": [26, 41, 44], "runnabl": 26, "those": 26, "being": [26, 37, 38, 39, 40, 44], "At": [26, 44], "level": [26, 28, 31, 41, 44], "case": [26, 38, 39, 40, 44], "now": [26, 39, 40, 41, 44], "switch": [26, 40], "maintain": 26, "when": [26, 32, 34, 37, 38, 39, 40, 44], "compil": 26, "better": [26, 28, 41, 44], "sustain": [26, 29, 34], "view": [26, 30, 32, 36, 39], "control": [26, 29, 34, 39, 40], "sinc": [26, 34], "multipl": [26, 31, 38, 39, 44], "contribut": 26, "hairi": 26, "git": [26, 37, 44], "linux": [26, 40, 41], "work": [26, 32, 37, 44], "mac": [26, 39, 40, 41], "exist": [26, 37], "environ": [26, 36, 37, 41], "txt": 26, "termin": [26, 35, 39, 40], "open": [26, 35, 37, 39, 40, 41, 44], "activ": [26, 30, 32, 34, 35, 37, 39, 44], "edit": 26, "period": [26, 38, 44], "inspect": 26, "chang": [26, 28, 41], "cd": [26, 37, 44], "html": [26, 37, 42, 44], "keep": [26, 41, 44], "server": [26, 37, 39, 44], "render": 26, "8001": 26, "browser": 26, "navig": [26, 40], "abov": [26, 35, 37, 39, 40, 44], "localhost": 26, "happi": [26, 44], "commit": 26, "push": 26, "overal": 26, "structur": [26, 37, 38, 39], "tabl": [26, 44], "tree": 26, "makefil": 26, "websit": [26, 43, 44], "individu": [26, 38], "rather": [26, 34], "than": [26, 28, 34, 37, 41, 44], "entir": 26, "process": [26, 31, 39, 40, 41, 44], "current": [26, 35, 39, 41, 44], "approxim": [26, 44], "ellicit": 28, "central": [28, 41], "indic": [28, 29, 34], "upcom": [28, 44], "perfectli": 28, "predict": 28, "grate": [28, 34], "vertic": 28, "horizont": 28, "below": [28, 35, 37, 38, 39, 40, 41, 44], "neural": 28, "cognit": [28, 31, 37, 41, 44], "empir": 28, "phenomena": [28, 41], "elicit": [28, 30, 31], "validli": 28, "cu": 28, "ellict": 28, "invalidli": 28, "more": [28, 31, 32, 34, 35, 37, 39, 40, 41, 44, 45], "identifi": [28, 34], "oscil": 28, "power": 28, "later": [28, 30, 34, 39], "preceed": 28, "becom": 28, "smaller": 28, "contraler": 28, "orphan": [28, 29, 30, 31, 32], "design": [29, 34], "investig": [29, 34], "relat": [29, 30, 31, 34, 39, 41], "inhibitori": [29, 34], "attent": [29, 31, 34], "rapidli": [29, 34], "sequenc": [29, 34, 44], "circl": [29, 34], "squar": [29, 34], "ask": [29, 34, 37, 39, 44], "press": [29, 34, 39, 40, 44], "spacebar": [29, 34], "whether": [29, 34], "neg": [30, 34], "scrambl": 30, "other": [30, 35, 40, 41, 44], "bodi": 30, "hand": [30, 45], "170m": 30, "percept": [30, 34], "easili": [30, 37, 41], "posterior": 30, "t5": 30, "t6": 30, "frontal": 30, "profil": 30, "human": [30, 44], "anim": 30, "strongest": 30, "strength": 30, "doe": [30, 40, 44], "seem": [30, 34], "influenc": 30, "familiar": [30, 37], "although": [30, 32, 34, 45], "consensu": 30, "specif": [30, 34, 37, 38, 39, 44], "research": [30, 41, 44], "believ": [30, 31], "fusiform": 30, "brain": [30, 32], "similar": [30, 39, 43], "involv": [30, 39, 44], "encod": 30, "holist": 30, "represent": [30, 38], "nose": [30, 44], "mouth": 30, "arrang": 30, "appropri": [30, 35, 39, 40], "attempt": [30, 31], "stimuli": [30, 32, 34, 44], "tempor": [30, 32, 34], "well": [30, 37, 41, 44], "ll": [30, 45], "abl": [30, 31, 34, 38, 44], "emerg": 30, "few": [30, 42, 44], "dozen": 30, "comput": [30, 37, 39, 40, 41, 44], "interfac": [30, 40, 41], "300m": [31, 34], "perceiv": [31, 34], "novel": 31, "unexpect": 31, "commonli": 31, "subtyp": 31, "rare": 31, "amidst": 31, "background": [31, 40, 44], "anoth": [31, 44], "interestingli": 31, "sensori": 31, "modal": 31, "g": [31, 36, 39, 41, 44], "odditori": 31, "somatosensori": 31, "signatur": [31, 44], "higher": 31, "consciou": 31, "repetit": 32, "natur": [32, 41], "produc": [32, 34], "flash": [32, 44], "75hz": 32, "occipit": 32, "like": [32, 35, 38, 39, 40, 41, 44], "due": [32, 41], "perceptu": 32, "recreat": 32, "primari": [32, 34], "cortex": [32, 34], "often": [32, 41], "bci": 32, "applic": [32, 39, 41], "its": [32, 34, 39, 45], "eas": [32, 45], "amount": 32, "inform": [32, 37, 39, 40, 41], "commun": [32, 37, 41], "resolut": 32, "extra": [32, 34, 37], "default": [32, 35, 36, 39, 40, 44], "seen": [32, 39], "directli": [32, 41, 42, 44], "connect": [32, 37, 39, 44, 45], "microusb": [32, 45], "charg": [32, 45], "build": 32, "For": [32, 37, 38, 39, 41, 44], "back": [32, 44], "skull": 32, "secur": [32, 44], "bandana": 32, "hat": [32, 45], "spike": 34, "implic": 34, "decis": 34, "alexandr": [34, 41], "barach": [34, 41], "item": [34, 41], "interspers": 34, "about": [34, 37, 39, 41, 44], "sensor": 34, "aren": 34, "ideal": [34, 44], "attain": 34, "accuraci": [34, 43], "hubert": [34, 41], "pictur": [34, 38, 40, 44, 45], "signific": 34, "closest": 34, "region": 34, "remark": 34, "isol": 34, "hear": [34, 44], "tone": 34, "who": [34, 37], "45hz": 34, "40hz": 34, "770h": 34, "carrier": 34, "correspondingli": 34, "complex": 34, "notic": 34, "begin": [34, 39, 40, 44], "90m": 34, "field": [34, 38], "reveal": 34, "contralater": 34, "anterior": 34, "howev": [34, 35, 38, 39, 40, 41, 44], "littl": 34, "delai": 34, "normal": [34, 38], "less": [34, 41], "distinguish": 34, "closer": 34, "center": 34, "superior": 34, "unpredict": 34, "independ": 34, "ran": 35, "compress": [35, 41], "tool": [35, 37, 39, 44], "detail": [35, 39, 41], "prompt": [35, 39, 44], "enter": [35, 38, 39, 44], "runzip": 35, "your": [35, 36, 37, 38, 39, 40, 41, 45], "desir": [35, 39, 44], "desktop": [35, 37, 39, 44], "_": [35, 39, 40, 44], "day_month_year_hour": [35, 44], "_zip": 35, "possibl": [35, 39, 40, 41, 44], "ex": [35, 39], "subfold": 35, "within": [35, 38, 39, 44], "ip": [35, 36, 39, 44], "bypass": [35, 37, 39], "interact": [35, 39, 44], "input": [35, 39], "pleas": [35, 37, 39, 40, 41, 44], "would": [35, 38, 39, 44], "codepros": [35, 39], "auditori": [35, 39, 41], "ssaep": [35, 39, 41], "orig": [35, 39], "diaconescu": [35, 39], "allow": [35, 37, 39, 41, 45], "agnost": [35, 39], "As": [35, 37, 39, 44], "popul": [35, 39], "local_ntc": 35, "temp": 35, "previous": [35, 37, 39, 41], "chosen": 35, "runexp": [36, 39, 44], "By": 36, "session0001": 36, "recording_2020": 36, "oper": [37, 40, 41], "system": [37, 39, 40, 44], "particularli": [37, 43, 44], "highli": 37, "recommend": [37, 39, 44, 45], "miniconda": [37, 41], "distribut": 37, "prefer": [37, 44], "virtualenv": 37, "venv": 37, "conda": [37, 44], "manag": [37, 44], "equival": 37, "usag": [37, 41, 44], "relev": [37, 44], "purpos": [37, 44], "neurosci": [37, 41, 44], "consum": [37, 39, 41], "grade": [37, 41], "secondari": 37, "complementari": 37, "newli": 37, "either": [37, 44], "public": 37, "simpler": 37, "hardwar": [37, 41, 44, 45], "softwar": [37, 41, 44], "third": [37, 44], "parti": [37, 44], "deal": 37, "establish": [37, 40], "deliveri": [37, 41], "laptop": [37, 41], "nativ": [37, 39, 44], "bluetooth": [37, 44], "free": [37, 41], "temporari": 37, "googlecolab": 37, "virtual": [37, 41], "don": 37, "grab": [37, 44], "latest": [37, 44], "doc": [37, 42, 44], "en": [37, 44], "pip": [37, 44], "wxpython": [37, 44], "clone": [37, 44], "maco": [37, 39], "mkdir": 37, "eegnb_dir": 37, "python3": 37, "env": 37, "bat": 37, "bin": 37, "kernel": 37, "necessari": [37, 38, 39, 40, 41, 44], "land": 37, "ipykernel": 37, "choic": [37, 44], "util": [37, 38, 44], "cyton": [37, 38, 39, 41, 44], "999": 37, "british": 37, "across": 37, "driver": [37, 41], "protocol": [37, 40, 41], "unfortun": 37, "cannot": [37, 44], "purchas": 37, "usb": [37, 39, 41], "special": 37, "reli": 37, "pygatt": 37, "gatt": 37, "might": [37, 39, 40, 44], "stabl": 37, "bgapi": 37, "option": [37, 40, 44], "need": [37, 38, 39, 40, 41, 42, 43, 44], "caus": 37, "pywin32": 37, "nb_conda": 37, "tracker": 37, "question": [37, 41, 44], "project": [37, 41, 44], "follw": 37, "messag": 37, "know": [38, 44], "integr": 38, "hidden": 38, "outlin": 38, "c": [38, 44], "user_nam": 38, "automat": [38, 39, 40, 44, 45], "want": 38, "must": [38, 40, 44, 45], "new_dir": 38, "jadin": [38, 41], "eeg_notebooks_data": 38, "subdirectori": 38, "form": [38, 44], "file_nam": 38, "explain": 38, "refer": [38, 44], "own": 38, "integ": [38, 39], "subjectxxxx": 38, "xxxx": 38, "four": 38, "five": 38, "belong": 38, "break": [38, 39, 44], "consecut": [38, 39], "constitut": 38, "recording_date_tim": 38, "easiest": [39, 44], "ed": [39, 44], "eegdevic": 39, "string": 39, "ma": 39, "macaddr": 39, "address": [39, 40, 44], "ganglion": [39, 41, 44], "rd": 39, "recdur": 39, "outfnam": 39, "match": 39, "blank": 39, "welcom": [39, 41, 44], "daisi": [39, 41], "tec": [39, 41], "unicorn": [39, 41], "brainbit": [39, 41], "notion": [39, 41], "synthet": 39, "ensur": [39, 44], "proper": [39, 44], "dongl": [39, 41, 44], "wifi": [39, 40], "shield": [39, 40], "long": [39, 44, 45], "per": 39, "peopl": 39, "fatigu": 39, "difficulti": 39, "length": 39, "sit": 39, "down": 39, "return": 39, "last": [39, 44], "given": 39, "gui": [39, 40], "conform": 39, "otherwis": [39, 44], "handl": 39, "via": [39, 44], "run_notebook": 39, "debug": 39, "togeth": [39, 41, 42], "monitor": 39, "modif": 39, "haptic": 39, "feedback": 39, "oculu": 39, "rift": 39, "meta": 39, "quest": 39, "video": [39, 41, 43], "card": 39, "argument": 39, "enabl": [39, 41], "launch": [39, 44], "app": 39, "cli": 39, "turn": [39, 40, 44], "sure": [39, 40, 44, 45], "along": [39, 44], "inact": 39, "beta": [39, 41], "doubl": [39, 44], "tap": 39, "fingertip": 39, "passthrough": 39, "onto": 39, "wear": 39, "top": 39, "touch": [39, 41, 44], "scalp": 39, "ok": [39, 44], "strap": 39, "quick": 39, "dashboard": 39, "air": 39, "cabl": 39, "menu": [39, 40], "finish": [39, 44], "gamepad": 39, "viwer": 39, "verifi": [39, 40, 44], "receiv": [39, 44], "eg": 39, "prepar": 39, "def": 39, "self": 39, "int": 39, "successfulli": 40, "properli": [40, 44], "exact": 40, "vari": 40, "consult": [40, 44], "rel": [40, 41], "easi": [40, 44], "behind": 40, "lot": [40, 43], "heavi": 40, "lift": 40, "troubleshoot": [40, 41], "tip": 40, "encount": 40, "report": [40, 41], "issu": [40, 41, 44], "plug": 40, "someth": [40, 41, 44], "button": [40, 44], "addr": 40, "discov": 40, "notion1": 40, "notion2": 40, "network": 40, "readabl": 40, "consol": 40, "firewal": 40, "sound": 40, "osc": 40, "wrong": [40, 44], "happen": 40, "check": [40, 41, 43], "serial_port": 40, "com7": 40, "determin": 40, "democrat": 41, "classic": 41, "analys": 41, "quit": 41, "primarili": 41, "taylor": 41, "budget": [41, 44], "goal": 41, "neurotechnologi": 41, "access": [41, 44, 45], "afford": [41, 45], "scalabl": 41, "intro": 41, "talk": 41, "formerli": 41, "jg": [41, 44], "brainhack": 41, "ontario": 41, "tutori": 41, "come": [41, 45], "soon": 41, "known": 41, "renam": 41, "underw": 41, "v0": 41, "old": 41, "even": 41, "older": 41, "convent": [41, 44], "lab": [41, 43], "typic": 41, "dedic": 41, "technician": 41, "equip": 41, "price": 41, "tag": 41, "hundr": [41, 44], "thousand": 41, "dollar": 41, "limit": [41, 45], "acquisit": 41, "recent": 41, "year": 41, "technologi": 41, "mani": [41, 44], "conduct": 41, "person": 41, "cheap": 41, "minimum": [41, 44], "cost": 41, "dramat": 41, "educ": 41, "univers": [41, 43, 44], "school": 41, "ambiti": 41, "scale": 41, "clinic": 41, "naturalist": 41, "think": 41, "core": [41, 44], "aim": 41, "critic": 41, "glue": 41, "wireless": 41, "lock": 41, "grow": 41, "modifi": [41, 44], "statist": [41, 43], "real": 41, "stop": 41, "shop": 41, "discuss": [41, 44], "social": 41, "scientif": 41, "technolog": 41, "context": 41, "trajectori": 41, "feel": 41, "forthcom": 41, "paper": 41, "changelog": 41, "previou": [41, 44], "solut": 41, "hacker": 41, "ininiti": 41, "groundwork": 41, "john": 41, "griffith": 41, "contributor": 41, "banvil": 41, "dano": 41, "morrison": 41, "ben": 41, "shapiro": 41, "amanda": 41, "easson": 41, "tredup": 41, "erik": 41, "bj\u00e4reholt": 41, "thank": [41, 44], "andrei": 41, "parfenov": 41, "u": [41, 44], "expand": 41, "scaffold": 41, "encourag": 41, "mail": 41, "utoronto": 41, "ca": 41, "ping": 41, "discord": 41, "slack": 41, "bluemus": [41, 44], "bled112": 41, "bug": 41, "neuros": [41, 44], "introprompt": 41, "flag": 41, "custom": [41, 44], "realiti": 41, "head": [41, 44, 45], "displai": [41, 44], "prerequisit": 41, "vr": 41, "c1": 41, "p1": [41, 44], "unvalid": 41, "n100": 41, "p200": 41, "On": 41, "reset": 41, "technic": 41, "introduct": [41, 44], "fit": 41, "qualiti": 41, "water": 41, "ntc": 41, "tl": 41, "dr": 41, "sign": 41, "consent": 41, "what": 41, "upload": 41, "url": 41, "newfangl": 41, "knowledg": 41, "faq": 41, "gotcha": 41, "my": 41, "comment": 42, "written": 42, "readthedoc": 42, "configur": [42, 44], "particular": 42, "nbsphinx": 42, "addition": [42, 44], "krigolson": 43, "victoria": 43, "bc": 43, "worth": 43, "latenc": 43, "jitter": 43, "40m": 43, "20m": 43, "resampl": 43, "n200": 43, "reward": 43, "tradit": 43, "neurotech": 44, "ntcsp1": 44, "intern": 44, "citizen": 44, "scienc": 44, "studi": 44, "toronto": 44, "camh": 44, "kcni": 44, "collabor": 44, "team": [44, 45], "opportun": 44, "comm": 44, "simpli": 44, "brows": 44, "getter": 44, "practic": 44, "fun": 44, "tri": 44, "fairli": 44, "succinct": 44, "whilst": 44, "sens": 44, "duplic": 44, "essenti": 44, "watch": 44, "short": 44, "overview": 44, "big": 44, "fulli": 44, "live": 44, "demo": 44, "forth": 44, "bit": 44, "carefulli": 44, "review": 44, "interest": 44, "introductori": 44, "lectur": 44, "mandatori": 44, "regist": 44, "give": 44, "gtec": 44, "zipper": 44, "anaconda": 44, "checker": 44, "checksigqu": 44, "repeat": 44, "ten": 44, "gd": 44, "ch": 44, "phone": 44, "tablet": 44, "store": 44, "share": 44, "outcom": 44, "afteth": 44, "fill": 44, "unlimit": 44, "entri": 44, "small": [44, 45], "quiz": 44, "did": 44, "skip": 44, "though": 44, "correctli": 44, "move": 44, "reason": 44, "reach": 44, "resolv": 44, "guid": 44, "describ": 44, "1a": 44, "click": 44, "taken": 44, "1b": 44, "survei": 44, "privaci": 44, "contact": 44, "1c": 44, "comprehens": 44, "anonym": 44, "statement": 44, "benefit": 44, "risk": 44, "thoroughli": 44, "quizz": 44, "1d": 44, "proce": 44, "ye": 44, "answer": 44, "screen": 44, "refresh": 44, "memori": 44, "unansw": 44, "appoint": 44, "1f": 44, "mous": 44, "trackpad": 44, "doesn": 44, "beauti": 44, "sent": 44, "email": 44, "uniqu": 44, "inbox": 44, "anyon": 44, "infinit": 44, "asid": 44, "rest": 44, "everyon": 44, "denot": 44, "regard": 44, "ag": 44, "sex": 44, "neurolog": 44, "rins": 44, "strongli": 44, "minconda": 44, "immedi": 44, "strean": 44, "bled": 44, "three": 44, "muse2_bfn": 44, "muse2_bfb": 44, "light": 44, "gone": 44, "solid": 44, "gettingstart": 44, "cytong": 44, "gangliong": 44, "ftdi": 44, "troubleshootingland": 44, "ultracortex": 44, "addon": 44, "headwear": 44, "markiv": 44, "kit": 44, "ones": 44, "accept": 44, "mydevic": 44, "stand": 44, "abort": 44, "achiev": 44, "foremost": 44, "breath": 44, "gentli": 44, "movement": 44, "throughout": 44, "especi": 44, "substanti": 44, "hour": 44, "suffici": 44, "interrupt": 44, "focu": 44, "distract": 44, "quiet": 44, "room": 44, "offic": 44, "bedroom": 44, "grayscal": 44, "respond": 44, "differenti": 44, "cours": 44, "sometim": 44, "wave": 44, "breather": 44, "maxim": 44, "ceas": 44, "success": 44, "conveni": 44, "checkdir": 44, "yourself": 44, "eeglab": 44, "muses_bfn": 44, "recording_2021": 44, "congratul": 44, "bonu": 44, "mobil": 44, "extrem": 44, "tell": 44, "d": 44, "love": 44, "twice": 44, "rate": 44, "won": 44, "decent": 44, "indend": 44, "persist": 44, "wonder": 45, "suffer": 45, "inflex": 45, "fortun": 45, "partial": 45, "overcom": 45, "sale": 45, "anywher": 45, "made": 45, "construct": 45, "pretti": 45, "kind": 45, "male": 45, "wire": 45, "enough": 45, "inset": 45, "connector": 45, "stai": 45, "scarf": 45, "viewer": 45, "622": 46}, "objects": {}, "objtypes": {}, "objnames": {}, "titleterms": {"all": [0, 27], "notebook": [0, 27, 39], "exampl": [0, 27, 38], "comput": [1, 2, 3, 5, 7, 9, 14, 19, 24, 46], "time": [1, 7, 9, 14, 19, 24, 46], "cue": [2, 3, 4, 5, 28], "singl": 2, "subject": [2, 3, 5, 39], "analysi": [2, 3, 4, 5, 43], "setup": [2, 3, 4, 5, 10, 11, 12, 15, 16, 17, 20, 22, 43], "load": [2, 3, 5, 11, 12, 16, 17, 22, 38], "data": [2, 3, 4, 5, 11, 12, 16, 17, 22, 33, 35, 36, 38, 44, 45], "visual": [2, 11, 16, 21, 28, 29, 30, 31, 32, 34, 36, 44], "power": [2, 3, 5, 11, 16, 21], "spectrum": [2, 11, 16, 21], "spectral": 2, "densiti": 2, "filter": [2, 11], "epoch": [2, 5, 11, 12, 16, 17, 21, 22], "spectrogram": [2, 21], "now": 2, "we": 2, "plot": [2, 12], "differ": 2, "target": 2, "group": [3, 5], "download": 3, "mne": [3, 5], "object": 3, "combin": [3, 5], "t": [3, 5], "test": [3, 5], "save": [3, 5, 38], "averag": [3, 5, 11, 16], "ipsi": [3, 5], "contra": [3, 5], "spectra": [3, 5], "behaviour": 4, "winter": [4, 5], "2019": [4, 5], "analyz": [4, 33], "mat": 4, "accuraci": 4, "rt": 4, "put": 5, "n170": [10, 11, 12, 30, 34, 44], "run": [10, 12, 15, 20, 36, 39, 44], "experi": [10, 15, 20, 34, 36, 39, 41, 44], "decod": [12, 17, 22], "filteri": [12, 16, 17], "classif": 12, "result": 12, "p300": [15, 16, 17, 31, 34], "initi": [15, 20, 40], "eeg": [15, 20, 26, 40, 41, 42, 44], "devic": [15, 20, 40, 44], "classfic": 17, "ssvep": [20, 21, 22, 32, 34], "stimuli": 21, "specif": 21, "psd": 21, "code": 25, "changelog": 25, "0": 25, "2": [25, 40, 44], "x": 25, "1": [25, 40, 44], "expi": [26, 41, 42, 44], "document": [26, 41, 42], "develop": 26, "note": 26, "build": 26, "doc": 26, "site": 26, "misc": [26, 41], "go": [29, 34], "No": [29, 34], "avail": 34, "oddbal": 34, "paradigm": 34, "old": 34, "ssaep": 34, "c1": 34, "p1": 34, "auditori": 34, "unvalid": 34, "other": [34, 39], "phenomena": 34, "n100": 34, "p200": 34, "On": [34, 44], "task": 34, "beta": 34, "alpha": 34, "reset": 34, "zip": [35, 44], "command": [35, 39], "line": [35, 39], "interfac": [35, 39], "us": [35, 39, 40, 43, 44, 45], "introprompt": [35, 39], "flag": [35, 39], "frequent": 36, "ask": 36, "question": 36, "how": 36, "do": [36, 44], "i": [36, 44], "an": [36, 40, 45], "live": 36, "stream": [36, 40], "check": [36, 44], "record": [36, 37, 39, 44], "qualiti": [36, 43, 44], "forgot": 36, "name": [36, 38], "my": [36, 44], "conda": 36, "env": 36, "where": [36, 44], "find": [36, 40], "correct": 36, "mac": [36, 37], "port": [36, 40, 44], "openbci": [36, 40, 44], "instal": [37, 44], "python": [37, 44], "librari": 37, "muse": [37, 40, 43, 44, 45], "requir": 37, "window": [37, 38, 40, 44], "bluemus": [37, 40], "bled112": 37, "dongl": [37, 40], "linux": [37, 38], "issu": 37, "common": 37, "problem": 37, "bug": 37, "report": 37, "locat": [38, 44], "default": 38, "directori": 38, "10": 38, "maco": 38, "chang": 38, "convent": 38, "board": 39, "select": 39, "durat": 39, "id": 39, "session": 39, "number": 39, "jupyt": 39, "custom": 39, "script": 39, "virtual": 39, "realiti": 39, "head": 39, "up": [39, 44], "displai": 39, "featur": 39, "ad": 39, "futur": 39, "releas": 39, "prerequisit": 39, "step": [39, 44], "vr": 39, "can": 39, "have": 39, "too": 39, "support": 40, "interaxon": 40, "ganglion": 40, "cyton": 40, "daisi": 40, "neuros": 40, "notion": 40, "version": [40, 44], "connect": 40, "brainbit": 40, "headband": [40, 43], "g": 40, "tec": 40, "unicorn": 40, "usb": 40, "overview": 41, "acknowledg": 41, "contribut": 41, "contact": 41, "get": [41, 44, 45], "start": 41, "about": [42, 43], "page": [42, 44], "technic": 43, "inform": [43, 44], "usag": 43, "introduct": 43, "fit": 43, "signal": [43, 44], "tutori": 43, "adjust": 43, "better": 43, "water": 43, "detail": [43, 44], "relat": 43, "erp": 43, "ntc": 44, "phase": 44, "instruct": 44, "thi": 44, "video": 44, "summari": 44, "tl": 44, "dr": 44, "full": 44, "sign": 44, "provid": 44, "consent": 44, "your": 44, "e": 44, "mail": 44, "3": 44, "miniconda": 44, "4": 44, "set": 44, "environ": 44, "5": 44, "user": 44, "6": 44, "readi": 44, "what": 44, "7": 44, "after": 44, "take": 44, "look": 44, "": 44, "compress": 44, "8": 44, "upload": 44, "file": 44, "person": 44, "url": 44, "9": 44, "make": 44, "newfangl": 44, "knowledg": 44, "faq": 44, "troubleshoot": 44, "gotcha": 44, "ar": 44, "driver": 44, "extra": 45, "electrod": 45, "The": 45, "attach": 45, "from": 45}, "envversion": {"sphinx.domains.c": 3, "sphinx.domains.changeset": 1, "sphinx.domains.citation": 1, "sphinx.domains.cpp": 9, "sphinx.domains.index": 1, "sphinx.domains.javascript": 3, "sphinx.domains.math": 2, "sphinx.domains.python": 4, "sphinx.domains.rst": 2, "sphinx.domains.std": 2, "sphinx.ext.intersphinx": 1, "sphinx.ext.viewcode": 1, "sphinx": 58}, "alltitles": {"All Notebook Examples": [[0, "all-notebook-examples"], [27, "all-notebook-examples"]], "Computation times": [[1, "computation-times"], [7, "computation-times"], [9, "computation-times"], [14, "computation-times"], [19, "computation-times"], [24, "computation-times"], [46, "computation-times"]], "Cueing Single Subject Analysis": [[2, "cueing-single-subject-analysis"]], "Setup": [[2, "setup"], [3, "setup"], [4, "setup"], [5, "setup"], [10, "setup"], [11, "setup"], [12, "setup"], [15, "setup"], [16, "setup"], [17, "setup"], [20, "setup"], [22, "setup"]], "Load Data": [[2, "load-data"], [11, "load-data"], [12, "load-data"], [17, "load-data"], [22, "load-data"]], "Visualize the power spectrum": [[2, "visualize-the-power-spectrum"], [11, "visualize-the-power-spectrum"], [16, "visualize-the-power-spectrum"], [21, "visualize-the-power-spectrum"]], "Power Spectral Density": [[2, "power-spectral-density"]], "Filtering": [[2, "filtering"], [11, "filtering"]], "Epoching": [[2, "epoching"], [11, "epoching"], [12, "epoching"], [16, "epoching"], [17, "epoching"], [21, "epoching"], [22, "epoching"]], "Spectrogram": [[2, "spectrogram"], [21, "spectrogram"]], "Now we compute and plot the differences": [[2, "now-we-compute-and-plot-the-differences"]], "Target Epoching": [[2, "target-epoching"]], "Cueing Group Analysis": [[3, "cueing-group-analysis"]], "Download the data": [[3, "download-the-data"]], "Load data into MNE objects": [[3, "load-data-into-mne-objects"]], "Combine subjects": [[3, "combine-subjects"], [5, "combine-subjects"]], "Compute t test": [[3, "compute-t-test"], [5, "compute-t-test"]], "Save average powers ipsi and contra": [[3, "save-average-powers-ipsi-and-contra"], [5, "save-average-powers-ipsi-and-contra"]], "Save spectra": [[3, "save-spectra"], [5, "save-spectra"]], "Cueing Behavioural Analysis Winter 2019": [[4, "cueing-behavioural-analysis-winter-2019"]], "Analyze .mat behavioural data for Accuracy and RT": [[4, "analyze-mat-behavioural-data-for-accuracy-and-rt"]], "Cueing Group Analysis Winter 2019": [[5, "cueing-group-analysis-winter-2019"]], "Load the data": [[5, "load-the-data"]], "Put the data into MNE Epochs": [[5, "put-the-data-into-mne-epochs"]], "N170 run experiment": [[10, "n170-run-experiment"]], "Run experiment": [[10, "run-experiment"], [15, "run-experiment"], [20, "run-experiment"]], "N170 Load and Visualize Data": [[11, "n170-load-and-visualize-data"]], "Epoch average": [[11, "epoch-average"], [16, "epoch-average"]], "N170 Decoding": [[12, "n170-decoding"]], "Filteriing": [[12, "filteriing"], [16, "filteriing"], [17, "filteriing"]], "Run classification": [[12, "run-classification"]], "Plot Decoding Results": [[12, "plot-decoding-results"]], "P300 run experiment": [[15, "p300-run-experiment"]], "Initiate EEG device": [[15, "initiate-eeg-device"], [20, "initiate-eeg-device"]], "P300 Load and Visualize Data": [[16, "p300-load-and-visualize-data"]], "P300 Decoding": [[17, "p300-decoding"]], "Classfication": [[17, "classfication"]], "SSVEP run experiment": [[20, "ssvep-run-experiment"]], "SSVEP Visualization": [[21, "ssvep-visualization"]], "Stimuli-Specific PSD": [[21, "stimuli-specific-psd"]], "SSVEP Decoding": [[22, "ssvep-decoding"]], "Decoding": [[22, "decoding"], [22, "id1"]], "Code Changelog": [[25, "code-changelog"]], "0.2.X": [[25, "x"]], "0.1.X": [[25, "id1"]], "EEG-ExPy Documentation Developer Notes": [[26, "eeg-expy-documentation-developer-notes"]], "Building the doc site": [[26, "building-the-doc-site"]], "Misc notes": [[26, "misc-notes"]], "Visual Cueing": [[28, "visual-cueing"]], "Visual Go-No-Go": [[29, "visual-go-no-go"]], "Visual N170": [[30, "visual-n170"]], "Visual P300": [[31, "visual-p300"]], "Visual SSVEP": [[32, "visual-ssvep"]], "Analyzing data": [[33, "analyzing-data"]], "Available Experiments": [[34, "available-experiments"]], "Visual P300 with Oddball paradigm": [[34, "visual-p300-with-oddball-paradigm"]], "N170": [[34, "n170"]], "SSVEP": [[34, "ssvep"]], "Old experiments": [[34, "old-experiments"]], "Go/No-Go": [[34, "go-no-go"]], "SSAEP": [[34, "ssaep"]], "C1 and P1": [[34, "c1-and-p1"]], "Auditory P300": [[34, "auditory-p300"]], "Unvalidated Experiments and other phenomena": [[34, "unvalidated-experiments-and-other-phenomena"]], "N100 - P200": [[34, "n100-p200"]], "On-task Beta": [[34, "on-task-beta"]], "Alpha reset": [[34, "alpha-reset"]], "Data Zipping": [[35, "data-zipping"]], "Command Line Interface": [[35, "command-line-interface"], [39, "command-line-interface"]], "Using the introprompt flag": [[35, "using-the-introprompt-flag"], [39, "using-the-introprompt-flag"]], "Frequently Asked Questions": [[36, "frequently-asked-questions"]], "How do I run an experiment?": [[36, "how-do-i-run-an-experiment"]], "How do I visualize a live stream to check the recording quality": [[36, "how-do-i-visualize-a-live-stream-to-check-the-recording-quality"]], "I forgot the name of my conda env?": [[36, "i-forgot-the-name-of-my-conda-env"]], "Where is my data?": [[36, "where-is-my-data"]], "How do I find the correct MAC port for my OpenBCI?": [[36, "how-do-i-find-the-correct-mac-port-for-my-openbci"]], "Installation": [[37, "installation"]], "Installing the Python library": [[37, "installing-the-python-library"]], "MUSE Requirements": [[37, "muse-requirements"]], "MUSE recordings on windows: BlueMuse": [[37, "muse-recordings-on-windows-bluemuse"]], "MUSE recordings on Mac: BLED112 Dongle": [[37, "muse-recordings-on-mac-bled112-dongle"]], "MUSE recordings on Linux": [[37, "muse-recordings-on-linux"]], "Issues": [[37, "issues"]], "Common Problems": [[37, "common-problems"]], "Bug reports": [[37, "bug-reports"]], "Loading and Saving Data": [[38, "loading-and-saving-data"]], "Locating the Default Data Directory": [[38, "locating-the-default-data-directory"]], "Windows 10": [[38, "windows-10"]], "Linux": [[38, "linux"]], "MacOS": [[38, "macos"]], "Changing the Default Data Directory": [[38, "changing-the-default-data-directory"]], "Naming Convention": [[38, "naming-convention"]], "Examples": [[38, "examples"]], "Running Experiments": [[39, "running-experiments"]], "Board Selection": [[39, "board-selection"]], "Experiment Selection": [[39, "experiment-selection"]], "Recording Duration": [[39, "recording-duration"]], "Subject ID": [[39, "subject-id"]], "Session Number": [[39, "session-number"]], "Using Jupyter Notebooks or a custom script": [[39, "using-jupyter-notebooks-or-a-custom-script"]], "Using virtual reality": [[39, "using-virtual-reality"]], "Heads up display": [[39, "heads-up-display"]], "Features to be added in future releases:": [[39, "features-to-be-added-in-future-releases"]], "Prerequisites:": [[39, "prerequisites"]], "": [[39, "id1"]], "Steps for running experiment in VR": [[39, "steps-for-running-experiment-in-vr"]], "Other experiments can have VR added too.": [[39, "other-experiments-can-have-vr-added-too"]], "Initiating an EEG Stream": [[40, "initiating-an-eeg-stream"]], "Supported Devices": [[40, "supported-devices"]], "Interaxon Muse": [[40, "interaxon-muse"]], "Using the Muse on Windows": [[40, "using-the-muse-on-windows"]], "OpenBCI Ganglion": [[40, "openbci-ganglion"]], "OpenBCI Cyton": [[40, "openbci-cyton"]], "OpenBCI Cyton + Daisy": [[40, "openbci-cyton-daisy"]], "Neurosity Notion (versions 1 and 2)": [[40, "neurosity-notion-versions-1-and-2"]], "Connecting on Windows": [[40, "connecting-on-windows"]], "BrainBit EEG Headband": [[40, "brainbit-eeg-headband"]], "G.Tec Unicorn": [[40, "g-tec-unicorn"]], "Initiating a Muse stream in Windows using Bluemuse": [[40, "initiating-a-muse-stream-in-windows-using-bluemuse"]], "Finding the USB port of the OpenBCI USB dongle": [[40, "finding-the-usb-port-of-the-openbci-usb-dongle"]], "EEG-ExPy": [[41, "eeg-expy"]], "Overview": [[41, "overview"]], "Documentation": [[41, "documentation"]], "Acknowledgments": [[41, "acknowledgments"]], "Contribute": [[41, "contribute"]], "Contact": [[41, "contact"]], "Getting Started": [[41, null]], "Experiments": [[41, null]], "Misc": [[41, null]], "About the EEG-ExPy Documentation Pages": [[42, "about-the-eeg-expy-documentation-pages"]], "Technical Information about the MUSE": [[43, "technical-information-about-the-muse"]], "MUSE setup and usage": [[43, "muse-setup-and-usage"]], "Introduction to the MUSE": [[43, "introduction-to-the-muse"]], "Headband fit and signal quality tutorial": [[43, "headband-fit-and-signal-quality-tutorial"]], "Adjusting and fitting the MUSE for better signal quality": [[43, "adjusting-and-fitting-the-muse-for-better-signal-quality"]], "Using water for better signal quality": [[43, "using-water-for-better-signal-quality"]], "Details related to Muse ERP analysis": [[43, "details-related-to-muse-erp-analysis"]], "NTCS Phase 1 Instructions": [[44, "ntcs-phase-1-instructions"]], "On this page": [[44, "on-this-page"]], "Instructional Videos": [[44, "instructional-videos"]], "Summary of the steps": [[44, "summary-of-the-steps"]], "TL;DR instructions version": [[44, "tl-dr-instructions-version"]], "Full step-by-step instructions": [[44, "full-step-by-step-instructions"]], "1. Sign up and provide informed consent.": [[44, "sign-up-and-provide-informed-consent"]], "2. Check your e-mail.": [[44, "check-your-e-mail"]], "3. Install miniconda": [[44, "install-miniconda"]], "4. Set up environment and install eeg-expy": [[44, "set-up-environment-and-install-eeg-expy"]], "5. Set up your EEG Device": [[44, "set-up-your-eeg-device"]], "5.1 Windows+Muse device users": [[44, "windows-muse-device-users"]], "5.2 OpenBCI Devices": [[44, "openbci-devices"]], "6. Run the visual N170 experiment": [[44, "run-the-visual-n170-experiment"]], "6.1 Check your signal quality": [[44, "check-your-signal-quality"]], "6.2 Run the experiment": [[44, "run-the-experiment"]], "6.3.1 Get ready": [[44, "get-ready"]], "Experiment details": [[44, "experiment-details"]], "What to do": [[44, "what-to-do"]], "7. After recording, locate and zip your recorded data": [[44, "after-recording-locate-and-zip-your-recorded-data"]], "7.1 Take a look at what\u2019s there": [[44, "take-a-look-at-what-s-there"]], "7.2 Compress your data": [[44, "compress-your-data"]], "8. Upload your zipped data file to your personalized URL": [[44, "upload-your-zipped-data-file-to-your-personalized-url"]], "9. Make use of your newfangled knowledge!": [[44, "make-use-of-your-newfangled-knowledge"]], "FAQs, Troubleshooting, Gotchas": [[44, "faqs-troubleshooting-gotchas"]], "Where are my files?": [[44, "where-are-my-files"]], "OpenBCI Ports and drivers": [[44, "openbci-ports-and-drivers"]], "Python is not 3.7": [[44, "python-is-not-3-7"]], "Using an extra electrode with Muse": [[45, "using-an-extra-electrode-with-muse"]], "The electrode": [[45, "the-electrode"]], "Attaching the extra electrode": [[45, "attaching-the-extra-electrode"]], "Getting data from the electrode": [[45, "getting-data-from-the-electrode"]]}, "indexentries": {}}) \ No newline at end of file diff --git a/develop/sg_execution_times.html b/develop/sg_execution_times.html new file mode 100644 index 00000000..89b3a0c7 --- /dev/null +++ b/develop/sg_execution_times.html @@ -0,0 +1,620 @@ + + + + + + + Computation times — EEG Notebooks 0.0.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+ +
+

Computation times

+

02:18.622 total execution time for 26 files from all galleries:

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

Example

Time

Mem (MB)

N170 Decoding (../examples/visual_n170/02r__n170_decoding.py)

00:30.511

0.0

Cueing Group Analysis (../examples/visual_cueing/02r__cueing_group_analysis.py)

00:29.319

0.0

Cueing Group Analysis Winter 2019 (../examples/visual_cueing/04r__cueing_group_analysis_winter2019.py)

00:24.491

0.0

SSVEP Visualization (../examples/visual_ssvep/01r__ssvep_viz.py)

00:12.710

0.0

P300 Decoding (../examples/visual_p300/02r__p300_decoding.py)

00:12.321

0.0

P300 Load and Visualize Data (../examples/visual_p300/01r__p300_viz.py)

00:11.982

0.0

Cueing Single Subject Analysis (../examples/visual_cueing/01r__cueing_singlesub_analysis.py)

00:10.934

0.0

N170 Load and Visualize Data (../examples/visual_n170/01r__n170_viz.py)

00:04.369

0.0

SSVEP Decoding (../examples/visual_ssvep/02r__ssvep_decoding.py)

00:01.827

0.0

Cueing Behavioural Analysis Winter 2019 (../examples/visual_cueing/03r__cueing_behaviour_analysis_winter2019.py)

00:00.155

0.0

Cueing Single Subject Analysis (../examples/visual_cueing/01r__cueing_singlesub_analysis.py)

00:00.000

0.0

Cueing Group Analysis (../examples/visual_cueing/02r__cueing_group_analysis.py)

00:00.000

0.0

Cueing Behavioural Analysis Winter 2019 (../examples/visual_cueing/03r__cueing_behaviour_analysis_winter2019.py)

00:00.000

0.0

Cueing Group Analysis Winter 2019 (../examples/visual_cueing/04r__cueing_group_analysis_winter2019.py)

00:00.000

0.0

N170 run experiment (../examples/visual_n170/00x__n170_run_experiment.py)

00:00.000

0.0

N170 run experiment (../examples/visual_n170/00x__n170_run_experiment.py)

00:00.000

0.0

N170 Load and Visualize Data (../examples/visual_n170/01r__n170_viz.py)

00:00.000

0.0

N170 Decoding (../examples/visual_n170/02r__n170_decoding.py)

00:00.000

0.0

P300 run experiment (../examples/visual_p300/00x__p300_run_experiment.py)

00:00.000

0.0

P300 run experiment (../examples/visual_p300/00x__p300_run_experiment.py)

00:00.000

0.0

P300 Load and Visualize Data (../examples/visual_p300/01r__p300_viz.py)

00:00.000

0.0

P300 Decoding (../examples/visual_p300/02r__p300_decoding.py)

00:00.000

0.0

SSVEP run experiment (../examples/visual_ssvep/00x__ssvep_run_experiment.py)

00:00.000

0.0

SSVEP run experiment (../examples/visual_ssvep/00x__ssvep_run_experiment.py)

00:00.000

0.0

SSVEP Visualization (../examples/visual_ssvep/01r__ssvep_viz.py)

00:00.000

0.0

SSVEP Decoding (../examples/visual_ssvep/02r__ssvep_decoding.py)

00:00.000

0.0

+
+
+ + +
+
+
+ +
+ +
+

© Copyright John Griffiths, Jadin Tredup, NeuroTechX, & Contributors.

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + + + + \ No newline at end of file