diff --git a/.buildinfo b/.buildinfo new file mode 100644 index 0000000..7770ae7 --- /dev/null +++ b/.buildinfo @@ -0,0 +1,4 @@ +# Sphinx build info version 1 +# This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done. +config: b9b45093e9614ff4bacb10ea7767257f +tags: 645f666f9bcd5a90fca523b33c5a78b7 diff --git a/.doctrees/accessibility.doctree b/.doctrees/accessibility.doctree new file mode 100644 index 0000000..d0a3d59 Binary files /dev/null and b/.doctrees/accessibility.doctree differ diff --git a/.doctrees/api_install.doctree b/.doctrees/api_install.doctree new file mode 100644 index 0000000..c9997b8 Binary files /dev/null and b/.doctrees/api_install.doctree differ diff --git a/.doctrees/behavioral_data.doctree b/.doctrees/behavioral_data.doctree new file mode 100644 index 0000000..28bbc61 Binary files /dev/null and b/.doctrees/behavioral_data.doctree differ diff --git a/.doctrees/contact.doctree b/.doctrees/contact.doctree new file mode 100644 index 0000000..fdd40c7 Binary files /dev/null and b/.doctrees/contact.doctree differ diff --git a/.doctrees/data_hosting.doctree b/.doctrees/data_hosting.doctree new file mode 100644 index 0000000..a56e0da Binary files /dev/null and b/.doctrees/data_hosting.doctree differ diff --git a/.doctrees/dwi_acquisitions.doctree b/.doctrees/dwi_acquisitions.doctree new file mode 100644 index 0000000..fe707a6 Binary files /dev/null and b/.doctrees/dwi_acquisitions.doctree differ diff --git a/.doctrees/dwi_processing.doctree b/.doctrees/dwi_processing.doctree new file mode 100644 index 0000000..3c657da Binary files /dev/null and b/.doctrees/dwi_processing.doctree differ diff --git a/.doctrees/environment.pickle b/.doctrees/environment.pickle new file mode 100644 index 0000000..c3b56fa Binary files /dev/null and b/.doctrees/environment.pickle differ diff --git a/.doctrees/experimentaldesign_diagrams.doctree b/.doctrees/experimentaldesign_diagrams.doctree new file mode 100644 index 0000000..2adaba0 Binary files /dev/null and b/.doctrees/experimentaldesign_diagrams.doctree differ diff --git a/.doctrees/get_data.doctree b/.doctrees/get_data.doctree new file mode 100644 index 0000000..9bb2ae9 Binary files /dev/null and b/.doctrees/get_data.doctree differ diff --git a/.doctrees/ibc_api.doctree b/.doctrees/ibc_api.doctree new file mode 100644 index 0000000..809788e Binary files /dev/null and b/.doctrees/ibc_api.doctree differ diff --git a/.doctrees/index.doctree b/.doctrees/index.doctree new file mode 100644 index 0000000..feb9870 Binary files /dev/null and b/.doctrees/index.doctree differ diff --git a/.doctrees/movie_protocols_data.doctree b/.doctrees/movie_protocols_data.doctree new file mode 100644 index 0000000..f215ef2 Binary files /dev/null and b/.doctrees/movie_protocols_data.doctree differ diff --git a/.doctrees/mri_acquisitions.doctree b/.doctrees/mri_acquisitions.doctree new file mode 100644 index 0000000..49982fb Binary files /dev/null and b/.doctrees/mri_acquisitions.doctree differ diff --git a/.doctrees/mridata_organization.doctree b/.doctrees/mridata_organization.doctree new file mode 100644 index 0000000..5eb5fd5 Binary files /dev/null and b/.doctrees/mridata_organization.doctree differ diff --git a/.doctrees/nbsphinx/get_data.ipynb b/.doctrees/nbsphinx/get_data.ipynb new file mode 100644 index 0000000..e6e70a2 --- /dev/null +++ b/.doctrees/nbsphinx/get_data.ipynb @@ -0,0 +1,1069 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "\n", + "# Get the data\n", + "\n", + "This is a simple guide on how to download the data using [this API](https://github.com/individual-brain-charting/api). You can also find the reference for the API [here](https://individual-brain-charting.github.io/docs/ibc_api.html).\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Import the fetcher as follows:" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[siibra:INFO] Version: 0.4a47\n", + "[siibra:WARNING] This is a development release. Use at your own risk.\n", + "[siibra:INFO] Please file bugs and issues at https://github.com/FZJ-INM1-BDA/siibra-python.\n", + "[siibra:INFO] Clearing siibra cache at /home/himanshu/.cache/siibra.retrieval\n" + ] + } + ], + "source": [ + "import ibc_api.utils as ibc" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "To see what is available for a given data type on IBC, we need fetch the file that contains that information.\n", + "The following loads a CSV file with all that info as a pandas dataframe and\n", + "saves it as ``ibc_data/available_{data_type}.csv``.\n", + "\n", + "Let's do that for IBC volumetric contrast maps.\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "db = ibc.get_info(data_type=\"volume_maps\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Let's see what's in the database\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
subjectsessiondeschemitaskdirectionrunspacesuffixdatatypeextensioncontrastmegabytesdatasetpath
00100preprocNaNArchiSocialapMNI152NLin2009cAsymNaNNaN.jsonfalse_belief-mechanistic0.000552volume_mapssub-01/ses-00/sub-01_ses-00_task-ArchiSocial_d...
10100preprocNaNArchiSocialapMNI152NLin2009cAsymNaNNaN.nii.gzfalse_belief-mechanistic2.896178volume_mapssub-01/ses-00/sub-01_ses-00_task-ArchiSocial_d...
20100preprocNaNArchiSocialapMNI152NLin2009cAsymaudioNaN.jsonfalse_belief-mechanistic_audio0.000543volume_mapssub-01/ses-00/sub-01_ses-00_task-ArchiSocial_d...
30100preprocNaNArchiSocialapMNI152NLin2009cAsymaudioNaN.nii.gzfalse_belief-mechanistic_audio2.893414volume_mapssub-01/ses-00/sub-01_ses-00_task-ArchiSocial_d...
40100preprocNaNArchiSocialapMNI152NLin2009cAsymvideoNaN.jsonfalse_belief-mechanistic_video0.000543volume_mapssub-01/ses-00/sub-01_ses-00_task-ArchiSocial_d...
................................................
532191540preprocNaNSceneffxMNI152NLin2009cAsymcorrectNaN.jsonscene_correct-dot_correct0.000570volume_mapssub-15/ses-40/sub-15_ses-40_task-Scene_dir-ffx...
532201540preprocNaNSceneffxMNI152NLin2009cAsymcorrectNaN.jsonscene_impossible_correct0.000618volume_mapssub-15/ses-40/sub-15_ses-40_task-Scene_dir-ffx...
532211540preprocNaNSceneffxMNI152NLin2009cAsymincorrectNaN.jsonscene_impossible_incorrect0.000614volume_mapssub-15/ses-40/sub-15_ses-40_task-Scene_dir-ffx...
532221540preprocNaNSceneffxMNI152NLin2009cAsymcorrectNaN.jsonscene_possible_correct-scene_impossible_correct0.000598volume_mapssub-15/ses-40/sub-15_ses-40_task-Scene_dir-ffx...
532231540preprocNaNSceneffxMNI152NLin2009cAsymcorrectNaN.jsonscene_possible_correct0.000597volume_mapssub-15/ses-40/sub-15_ses-40_task-Scene_dir-ffx...
\n", + "

53224 rows × 15 columns

\n", + "
" + ], + "text/plain": [ + " subject session desc hemi task direction run \\\n", + "0 01 00 preproc NaN ArchiSocial ap \n", + "1 01 00 preproc NaN ArchiSocial ap \n", + "2 01 00 preproc NaN ArchiSocial ap \n", + "3 01 00 preproc NaN ArchiSocial ap \n", + "4 01 00 preproc NaN ArchiSocial ap \n", + "... ... ... ... ... ... ... .. \n", + "53219 15 40 preproc NaN Scene ffx \n", + "53220 15 40 preproc NaN Scene ffx \n", + "53221 15 40 preproc NaN Scene ffx \n", + "53222 15 40 preproc NaN Scene ffx \n", + "53223 15 40 preproc NaN Scene ffx \n", + "\n", + " space suffix datatype extension \\\n", + "0 MNI152NLin2009cAsym NaN NaN .json \n", + "1 MNI152NLin2009cAsym NaN NaN .nii.gz \n", + "2 MNI152NLin2009cAsym audio NaN .json \n", + "3 MNI152NLin2009cAsym audio NaN .nii.gz \n", + "4 MNI152NLin2009cAsym video NaN .json \n", + "... ... ... ... ... \n", + "53219 MNI152NLin2009cAsym correct NaN .json \n", + "53220 MNI152NLin2009cAsym correct NaN .json \n", + "53221 MNI152NLin2009cAsym incorrect NaN .json \n", + "53222 MNI152NLin2009cAsym correct NaN .json \n", + "53223 MNI152NLin2009cAsym correct NaN .json \n", + "\n", + " contrast megabytes \\\n", + "0 false_belief-mechanistic 0.000552 \n", + "1 false_belief-mechanistic 2.896178 \n", + "2 false_belief-mechanistic_audio 0.000543 \n", + "3 false_belief-mechanistic_audio 2.893414 \n", + "4 false_belief-mechanistic_video 0.000543 \n", + "... ... ... \n", + "53219 scene_correct-dot_correct 0.000570 \n", + "53220 scene_impossible_correct 0.000618 \n", + "53221 scene_impossible_incorrect 0.000614 \n", + "53222 scene_possible_correct-scene_impossible_correct 0.000598 \n", + "53223 scene_possible_correct 0.000597 \n", + "\n", + " dataset path \n", + "0 volume_maps sub-01/ses-00/sub-01_ses-00_task-ArchiSocial_d... \n", + "1 volume_maps sub-01/ses-00/sub-01_ses-00_task-ArchiSocial_d... \n", + "2 volume_maps sub-01/ses-00/sub-01_ses-00_task-ArchiSocial_d... \n", + "3 volume_maps sub-01/ses-00/sub-01_ses-00_task-ArchiSocial_d... \n", + "4 volume_maps sub-01/ses-00/sub-01_ses-00_task-ArchiSocial_d... \n", + "... ... ... \n", + "53219 volume_maps sub-15/ses-40/sub-15_ses-40_task-Scene_dir-ffx... \n", + "53220 volume_maps sub-15/ses-40/sub-15_ses-40_task-Scene_dir-ffx... \n", + "53221 volume_maps sub-15/ses-40/sub-15_ses-40_task-Scene_dir-ffx... \n", + "53222 volume_maps sub-15/ses-40/sub-15_ses-40_task-Scene_dir-ffx... \n", + "53223 volume_maps sub-15/ses-40/sub-15_ses-40_task-Scene_dir-ffx... \n", + "\n", + "[53224 rows x 15 columns]" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "db" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "There are over 26000 statistic maps (half of the rows because there are .json files corresponding to each map) available for download.\n", + "But since it's a pandas dataframe, we can filter it to get just what we want.\n", + "Let's see how many statistic maps are available for each task.\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "task\n", + "Audio 5852\n", + "MathLanguage 5760\n", + "ArchiStandard 3588\n", + "RSVPLanguage 3458\n", + "MTTNS 1824\n", + "MTTWE 1824\n", + "Audi 1800\n", + "SpatialNavigation 1728\n", + "ArchiSocial 1404\n", + "Self 1320\n", + "Visu 1152\n", + "BiologicalMotion2 1100\n", + "VSTMC 1100\n", + "BiologicalMotion1 1100\n", + "HcpWm 1092\n", + "ArchiSpatial 1092\n", + "ArchiEmotional 1092\n", + "FaceBody 945\n", + "RewProc 918\n", + "HcpMotor 858\n", + "MVEB 792\n", + "DotPatterns 726\n", + "NARPS 720\n", + "Scene 693\n", + "Attention 660\n", + "EmoReco 660\n", + "WardAndAllport 660\n", + "TwoByTwo 660\n", + "MCSE 648\n", + "Moto 648\n", + "SelectiveStopSignal 528\n", + "StopNogo 462\n", + "Lec1 432\n", + "MVIS 432\n", + "EmoMem 396\n", + "VSTM 360\n", + "FingerTapping 330\n", + "HcpEmotion 312\n", + "HcpGambling 312\n", + "HcpLanguage 312\n", + "HcpRelational 234\n", + "HcpSocial 234\n", + "PreferenceFaces 222\n", + "EmotionalPain 216\n", + "Enumeration 216\n", + "PreferenceHouses 216\n", + "PainMovie 216\n", + "Lec2 216\n", + "TheoryOfMind 216\n", + "PreferenceFood 216\n", + "PreferencePaintings 210\n", + "Stroop 198\n", + "Catell 198\n", + "StopSignal 198\n", + "ColumbiaCards 192\n", + "Bang 144\n", + "Discount 132\n", + "Name: count, dtype: int64" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "db[\"task\"].value_counts()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "You can find the descriptions of all these tasks [here](https://individual-brain-charting.github.io/docs/tasks.html).\n", + "\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "For this example, let's just download the maps from Discount task, only for sub-08. You can filter the maps for tasks and subjects like this.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Found 12 files for subjects ['08'] and tasks ['Discount'].\n" + ] + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
subjectsessiondeschemitaskdirectionrunspacesuffixdatatypeextensioncontrastmegabytesdatasetpath
256240827preprocNaNDiscountapMNI152NLin2009cAsymNaNNaN.jsonamount0.000503volume_mapssub-08/ses-27/sub-08_ses-27_task-Discount_dir-...
256250827preprocNaNDiscountapMNI152NLin2009cAsymNaNNaN.nii.gzamount2.921305volume_mapssub-08/ses-27/sub-08_ses-27_task-Discount_dir-...
256260827preprocNaNDiscountapMNI152NLin2009cAsymNaNNaN.jsondelay0.000505volume_mapssub-08/ses-27/sub-08_ses-27_task-Discount_dir-...
256270827preprocNaNDiscountapMNI152NLin2009cAsymNaNNaN.nii.gzdelay2.923846volume_mapssub-08/ses-27/sub-08_ses-27_task-Discount_dir-...
256280827preprocNaNDiscountffxMNI152NLin2009cAsymNaNNaN.jsonamount0.000504volume_mapssub-08/ses-27/sub-08_ses-27_task-Discount_dir-...
256290827preprocNaNDiscountffxMNI152NLin2009cAsymNaNNaN.nii.gzamount2.925251volume_mapssub-08/ses-27/sub-08_ses-27_task-Discount_dir-...
256300827preprocNaNDiscountffxMNI152NLin2009cAsymNaNNaN.jsondelay0.000506volume_mapssub-08/ses-27/sub-08_ses-27_task-Discount_dir-...
256310827preprocNaNDiscountffxMNI152NLin2009cAsymNaNNaN.nii.gzdelay2.925747volume_mapssub-08/ses-27/sub-08_ses-27_task-Discount_dir-...
256320827preprocNaNDiscountpaMNI152NLin2009cAsymNaNNaN.jsonamount0.000503volume_mapssub-08/ses-27/sub-08_ses-27_task-Discount_dir-...
256330827preprocNaNDiscountpaMNI152NLin2009cAsymNaNNaN.nii.gzamount2.921803volume_mapssub-08/ses-27/sub-08_ses-27_task-Discount_dir-...
256340827preprocNaNDiscountpaMNI152NLin2009cAsymNaNNaN.jsondelay0.000505volume_mapssub-08/ses-27/sub-08_ses-27_task-Discount_dir-...
256350827preprocNaNDiscountpaMNI152NLin2009cAsymNaNNaN.nii.gzdelay2.920833volume_mapssub-08/ses-27/sub-08_ses-27_task-Discount_dir-...
\n", + "
" + ], + "text/plain": [ + " subject session desc hemi task direction run \\\n", + "25624 08 27 preproc NaN Discount ap \n", + "25625 08 27 preproc NaN Discount ap \n", + "25626 08 27 preproc NaN Discount ap \n", + "25627 08 27 preproc NaN Discount ap \n", + "25628 08 27 preproc NaN Discount ffx \n", + "25629 08 27 preproc NaN Discount ffx \n", + "25630 08 27 preproc NaN Discount ffx \n", + "25631 08 27 preproc NaN Discount ffx \n", + "25632 08 27 preproc NaN Discount pa \n", + "25633 08 27 preproc NaN Discount pa \n", + "25634 08 27 preproc NaN Discount pa \n", + "25635 08 27 preproc NaN Discount pa \n", + "\n", + " space suffix datatype extension contrast megabytes \\\n", + "25624 MNI152NLin2009cAsym NaN NaN .json amount 0.000503 \n", + "25625 MNI152NLin2009cAsym NaN NaN .nii.gz amount 2.921305 \n", + "25626 MNI152NLin2009cAsym NaN NaN .json delay 0.000505 \n", + "25627 MNI152NLin2009cAsym NaN NaN .nii.gz delay 2.923846 \n", + "25628 MNI152NLin2009cAsym NaN NaN .json amount 0.000504 \n", + "25629 MNI152NLin2009cAsym NaN NaN .nii.gz amount 2.925251 \n", + "25630 MNI152NLin2009cAsym NaN NaN .json delay 0.000506 \n", + "25631 MNI152NLin2009cAsym NaN NaN .nii.gz delay 2.925747 \n", + "25632 MNI152NLin2009cAsym NaN NaN .json amount 0.000503 \n", + "25633 MNI152NLin2009cAsym NaN NaN .nii.gz amount 2.921803 \n", + "25634 MNI152NLin2009cAsym NaN NaN .json delay 0.000505 \n", + "25635 MNI152NLin2009cAsym NaN NaN .nii.gz delay 2.920833 \n", + "\n", + " dataset path \n", + "25624 volume_maps sub-08/ses-27/sub-08_ses-27_task-Discount_dir-... \n", + "25625 volume_maps sub-08/ses-27/sub-08_ses-27_task-Discount_dir-... \n", + "25626 volume_maps sub-08/ses-27/sub-08_ses-27_task-Discount_dir-... \n", + "25627 volume_maps sub-08/ses-27/sub-08_ses-27_task-Discount_dir-... \n", + "25628 volume_maps sub-08/ses-27/sub-08_ses-27_task-Discount_dir-... \n", + "25629 volume_maps sub-08/ses-27/sub-08_ses-27_task-Discount_dir-... \n", + "25630 volume_maps sub-08/ses-27/sub-08_ses-27_task-Discount_dir-... \n", + "25631 volume_maps sub-08/ses-27/sub-08_ses-27_task-Discount_dir-... \n", + "25632 volume_maps sub-08/ses-27/sub-08_ses-27_task-Discount_dir-... \n", + "25633 volume_maps sub-08/ses-27/sub-08_ses-27_task-Discount_dir-... \n", + "25634 volume_maps sub-08/ses-27/sub-08_ses-27_task-Discount_dir-... \n", + "25635 volume_maps sub-08/ses-27/sub-08_ses-27_task-Discount_dir-... " + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "filtered_db = ibc.filter_data(db, task_list=[\"Discount\"], subject_list=[\"08\"])\n", + "filtered_db" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now we are ready to download the few selected maps that we filtered.\n", + "\n", + "The following will save the requested maps under\n", + "``ibc_data/resulting_smooth_maps/sub-08/task-Discount`` \n", + "(or whatever subject you chose). And will also create a local CSV file ``ibc_data/downloaded_volume_maps.csv`` to track the downloaded files. This will contain local file paths and the time they were downloaded at, and is updated everytime you download new files.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Found 12 files to download.\n", + "***\n", + "To continue, please go to https://iam.ebrains.eu/auth/realms/hbp/device?user_code=UFKZ-XXQU\n", + "***\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[siibra:INFO] 139625 objects found for dataset ad04f919-7dcc-48d9-864a-d7b62af3d49d returned.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "ebrains token successfuly set.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\n", + "Overall Progress: 0%|\u001b[32m \u001b[0m| 0/12 [00:00\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
local_pathdownloaded_on
0ibc_data/volume_maps/sub-08/ses-27/sub-08_ses-...2023-10-05 17:23:53.472528
1ibc_data/volume_maps/sub-08/ses-27/sub-08_ses-...2023-10-05 17:23:53.628380
2ibc_data/volume_maps/sub-08/ses-27/sub-08_ses-...2023-10-05 17:23:53.634523
3ibc_data/volume_maps/sub-08/ses-27/sub-08_ses-...2023-10-05 17:23:53.793226
4ibc_data/volume_maps/sub-08/ses-27/sub-08_ses-...2023-10-05 17:23:53.799418
5ibc_data/volume_maps/sub-08/ses-27/sub-08_ses-...2023-10-05 17:23:53.972341
6ibc_data/volume_maps/sub-08/ses-27/sub-08_ses-...2023-10-05 17:23:53.979429
7ibc_data/volume_maps/sub-08/ses-27/sub-08_ses-...2023-10-05 17:23:54.140314
8ibc_data/volume_maps/sub-08/ses-27/sub-08_ses-...2023-10-05 17:23:54.146809
9ibc_data/volume_maps/sub-08/ses-27/sub-08_ses-...2023-10-05 17:23:54.304385
10ibc_data/volume_maps/sub-08/ses-27/sub-08_ses-...2023-10-05 17:23:54.310566
11ibc_data/volume_maps/sub-08/ses-27/sub-08_ses-...2023-10-05 17:23:54.468429
\n", + "" + ], + "text/plain": [ + " local_path \\\n", + "0 ibc_data/volume_maps/sub-08/ses-27/sub-08_ses-... \n", + "1 ibc_data/volume_maps/sub-08/ses-27/sub-08_ses-... \n", + "2 ibc_data/volume_maps/sub-08/ses-27/sub-08_ses-... \n", + "3 ibc_data/volume_maps/sub-08/ses-27/sub-08_ses-... \n", + "4 ibc_data/volume_maps/sub-08/ses-27/sub-08_ses-... \n", + "5 ibc_data/volume_maps/sub-08/ses-27/sub-08_ses-... \n", + "6 ibc_data/volume_maps/sub-08/ses-27/sub-08_ses-... \n", + "7 ibc_data/volume_maps/sub-08/ses-27/sub-08_ses-... \n", + "8 ibc_data/volume_maps/sub-08/ses-27/sub-08_ses-... \n", + "9 ibc_data/volume_maps/sub-08/ses-27/sub-08_ses-... \n", + "10 ibc_data/volume_maps/sub-08/ses-27/sub-08_ses-... \n", + "11 ibc_data/volume_maps/sub-08/ses-27/sub-08_ses-... \n", + "\n", + " downloaded_on \n", + "0 2023-10-05 17:23:53.472528 \n", + "1 2023-10-05 17:23:53.628380 \n", + "2 2023-10-05 17:23:53.634523 \n", + "3 2023-10-05 17:23:53.793226 \n", + "4 2023-10-05 17:23:53.799418 \n", + "5 2023-10-05 17:23:53.972341 \n", + "6 2023-10-05 17:23:53.979429 \n", + "7 2023-10-05 17:23:54.140314 \n", + "8 2023-10-05 17:23:54.146809 \n", + "9 2023-10-05 17:23:54.304385 \n", + "10 2023-10-05 17:23:54.310566 \n", + "11 2023-10-05 17:23:54.468429 " + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "downloaded_db = ibc.download_data(filtered_db)\n", + "downloaded_db" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Let's try plotting one of these contrast maps" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAhsAAADJCAYAAACKTvCwAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8pXeV/AAAACXBIWXMAAAsTAAALEwEAmpwYAAC6eElEQVR4nOydeXxU1fnGv5NkkpAhZgiEQAIkIAEMRBZBNhGwuKB16w/3fa/71s22WrdWW1tt1daNurburQtWrKUqBUEEgQhGIRETSUbCYJg4zEAySeb3xznvOfeGsKigBu7z+UBmuXPvufeee8573vd5n9eXTCaTePDgwYMHDx487CakfNsN8ODBgwcPHjzs2fCMDQ8ePHjw4MHDboVnbHjw4MGDBw8edis8Y8ODBw8ePHjwsFvhGRsePHjw4MGDh90Kz9jw4MGDBw8ePOxWeMaGBw8ePHjw4GG3wjM2PHjw4MGDBw+7FZ6x4cGDBw8ePHjYrfCMDQ8ePHjw4MHDboVnbHjw4MGDBw8edis8Y8ODBw8ePHjwsFvhGRsePHjw4MFDJ0ckEmHGjBkMGTKE/fbbj4ULF7q+v+OOOxgxYgQjRoxg2LBhpKam0tDQ8I21z+dVffXgwYMHDx46N8466ywmTZrE+eefT3NzM/F4nGAw2OG2s2bN4q677uKNN974xtrnGRsePHjw4MFDJ0ZjYyMjRoxgzZo1+Hy+HW5/6qmnMnXqVC644IJvoHUKnrHhwYMHDx48dGIsX76cCy+8kNLSUsrLyznggAP405/+RCAQ2GrbeDxOnz59qKqqIjc3d4f7PuKII9iwYcM2v+/RowevvfbaDveTtsMtPHjw4MGDBw/fWbS0tLB06VLuuecexo4dy5VXXsntt9/OLbfcstW2s2bNYuLEiTtlaABs2FDPkiVvbvP70aOn7dR+PIKoBw8ePHjw0InRp08f+vTpw9ixYwGYMWMGS5cu7XDbp59+mlNOOeVL7L0N2LKdfzsHz9jw4MGDBw8eOjF69epF3759WbVqFQD//e9/KS0t3Wq7xsZG5s6dy7HHHvsl9t4KNG7n387BC6N48ODBgwcPnRz33HMPp512Gs3NzQwYMIBHHnmE+++/H4Af/vCHALzwwgscdthhHXI5to0kX8aDsS14BFEPHjx48PC1cdVVVwHwxz/+8Vtth4ddi9GjB7NkyYPb+f5alixZssP9eJ4NDx48ePDwtbF8+fJvuwkedgt2jWfD42x48ODBw25EcXExc+bM+bab4eFLori4mC5dutC1a1d69erF2WefzaZNm77tZn0LaAUi2/m3c/CMDQ8ePHjw4KEDzJo1i02bNrF8+XKWLVvGbbfd9m036VvArslG8cIoHjx48ODBw3bQq1cvDj/88L00VCTZKF8PnmfDgwcPHjx42A5qa2uZPXs2AwcO/Lab8i3A82x48ODBgwcPuw3HHXccPp+PTZs2ccghh3DTTTd92036FiCcja8Hz7PhwYMHDx48dIAXX3yRaDTKW2+9xUcffbTdGiF7LjwFUQ8ePHjw4GG3Y/LkyZx99tn86Ec/+rab8i1g12SjeGEUDx48eNjNSCQSbNliV4FpaWmkpXnDb2fCVVddRXFxMeXl5QwfPvzbbs43iF1DEPV6uwcPHjzsZhx55JGu97/4xS+49dZbv6XW7BiPPPIIAIFAgFtOOgkAP5DQ3//iqac4+eSTt7uPp59+2shiH3PMUmbNOgBQJc5jsRgA55xzzq5v/G5CXl4eZ555JjfffDP/+Mc/vu3mfIOQMMrXg2dsePDgwcNuRHV19bfdBA9fAR3dt/vuu++bb8i3jl1DEPWMDQ8ePHjYC/Hiiy9SUFAAgM/nI5FQfoubJk4kBISBOBAlX/+iGFDbn3LKI2b7M844w7XfJ554AoAfn3km9ZwIDAeCHH30+3qLcrJ5lmnAO+eeSxioBB5+912kVFcoFOK4447b1afs4SthR3Llvp3ai2dsePDgwcNehP/+978A9OvXz/BG/H4/L5SWEgbSUUZGPblAETBB/3IaUAjUAUvIyMgA4N5776W5uZna2lp8Ph8ZGRn0P+kkjgUeZDYqAFOCDcJEyDGvlFGTAM448ECECXEs8F8t8f69731vl18DD18GLWzfs9Ftp/biGRsePHjwsJfiEU10LAbyUJN+DFhDPsrImAacrzaenq4sgyUFQBUnnfQ0zzzTMW8jov/mE6WeMBB0fJugEeXNSOhtE+228AOTpk1jnldT5juAJND0tffiGRu7AF5pZQ/fBLx+5uHr4tlnn2XAgAEAXHnAAZToz2Ooyd6SQItRxsYMOC9dbTQBeAmUxVEBVNK1a1c2bdpEPB4nmUySlpbG+yedRAhYDNQzGGXGBPRRAIJEGclK/VohTAM1FBAFIARUA2XTpvHsM89w4okn7toL4eFLYEecjV47tRfP2NgF2Dv18j180/D6mYevinvvvReAkpISkskkBx54MgOwU32B/puOGBvNKKOiBpb0VF9GgJebgZnk8wjFwMlHHcVDTz9txK5SUlL4NQP03nJQYZd0va8Idkd5KIOmCJVWWQHECWljYzHK8AkC3XNyTPsvu+yyr3spPHxpdIJslOLiYmbOnMm0adN252E87GIUFxdTX19PamoqXbt25YgjjuDee++la9eu33bTPHjwsEuQxxqaaabWfBJDTfsR0K8WANVQrv0f5VnAy5zIIqYBNahQyNa4Qv2OCv13MbnUG8MmAjQwFshCGSIRFA8kYsyRchRvJAtIP+IIslAG0XOXX84JmkTq4RtCG7C59WvvxvNseOgQs2bNYtq0aaxbt47DDz+c2267jV//+tffdrM8ePCwE3jttdeITp8OKGrmSOBlYODs2Rx44E9RHoUiaokDUEsEFbyIoKZ4P1DOF1+8Smurmmh+360bk1EsDoC7gQYGkJ2dTWpqKoAmnE5A+UfmMJWVjNFHO3XjRgDO7daNFwwttA5llFSRS5QGcgFoIEGIKH6sh2M4yhdS51PZD0Eg4Bkeux9JLLf3a8AzNjxsF3t3aWUPHjoXXnzxRQAeP/54k0PinCd+PH06yvQo0f/y9DdZestKvvjiGnw+m87Y0tICwA1A+kAgCM1LlGkCA0lPTyc1NRWfz6d/lwVE6MNKZgBH6k9kP49Ho2bfyWQSn89HdvZoGvA72uOnwWwVp5YQsIYxjnPJAqp9Ppa/8IKXJrs70QbaJv1a8GqjeNgu9u7Syh48ePCwlyOJovBs699OwvNseOgQXmllDx46Fx555BF69+7N9OkPko8lfzbr1zlAKbCSKv1KPgEYiOJPVLq8GoAR7wIgBLEq+A+K0QHFbN68mba2NgD9txwoN+m0WaiFcYbeT2ZmptmdPdYpqIBJQL+XvBgJtVSxkjAVRKlwtM0PFB5/PI88/HCnkj7vVNhFng3P2PDQIV588UWmTZvG3LlzOfXUU9mwYQPBYPDbbpYHDx7a4dFHHwXg3HNTUDPDaOqpZjHLADUhp2PzPwYTZZXRCPWrnfi7KdtjQynO5Wpra6sxNl4F4nFlSswFFjESKOWYY/py110fA2I8PAUsJqG3q9FHulTvp7W11XA8AK0aeiw2NbY9KlCE1QIWs8oYUTFgBMpMKjj3XB7VhsvZZ5+9M5fNw87C42x4+CbgLK0s8WAPHjx8FzETRc70A4Xag6FULfyobI4YMm9EcOWTJEohPR1y0tmWb/xu/Ys1DNbHGY2ibbafiRaQTQOLyGeRGDME+F3RVerIkUc62Lv4QKS6aDPKRBIeh/q3jBDpOj02C2U8Ffuhmx+OPOccXn2ko317+FpoY9t24JfAbjc2vNLKnR97b2llDx6+23j66acpKirir4ccwvHAHOYTZSxqslYTfUi/iuBUu4ij0lLL9Z5KIDKCaP0m1/5bWloMsfNNDkLllRSr7RmOMmFqyMzMJCUlRXss/EQZCSYPRcIhfrNPp2fD5/MRjeaQnR3Czmph/dePMkBiZh8R/U2NPoOShAq+BIAx55zD05mZO6xI6+FLoLN4NjpbaWUPW2PvLa3swcN3Ew8//DAAgwYNokuXLhyJMiqygCoWOTwY6q9M3Tb0nnD8A4i4JpSkI6V0332v1K9K9RFiKB5FEcrjECMjI4OUFMk3KEF5PiajjIVKlGlgDyD7d/NDIo7XImQe138rgWoG0OAKo9SggixFjrbXn3IKD8fVmZ577rl4+JroDJ4Nr7Ry54RXWtmDh86D7qNHU4AyKAqxwuASkLjnk084un9/QJkKWUCDCUJIllkJ0Qa3V8NCwioRlC8hhE1Rlc8Vli+vBo5HhVgG6m1FfFyhR48T+OKLl7Y6SjQ6nuzsKv3Or88iZvaRTz0Fuv3OLcJYZRBRCsnexpl4+AroLJ4NDx48ePCwayGl4bt06UIBaoL1o8wC8VeIifADbWjI58oIKUaFQbQaR04hoIyNZDJpskt69BiKFTMXH0kI5XEIISXUtmz5TP8mgA2vSLm1Sv07MVCKzf5TUlLaeTec1Vpsa53tf6imBoC7iooM00P8IHK0sQUFeNhF8LJRPHjw4GHvw6OPPkpRURGgJmuhUAawJdJEnxPgo40bDW+ud+9BKGNAyJ3FAERrN7lCJ83NylTJpp6o3kYZEAXYGrFj9D78wIcA7LffPnz4oaTTVjtaEzDHggKz/8zMTFdIJRpVr7Nze0KiBAkOffbZf8z2F3VTJc3/sHYtS/r2NR6OEMrQCAGXavXU6x95xMtO+bpo40vpaWwLnrHhwYMHDx48eOgYnmfDgwcPHvY+BAIBo32xYdQoMw/EUH6EZQxDeR7UN1JQEWDjxk/p1u02lEeiBHKUiFYyGTXhjGQyyaZNKqQSpQ8wXh9hMrYQfRAohSIfhMDv9+Pz+XSmoaSwisJGAOURsfwQ2X9mZqbruPI62rCJlpYg3bo1s3HjrSaDMZlMGrLr4337UqKPIAGbGlQ4JUdvc/c553ieja8Lj7PhwYMHD3sPROemd+/e3D9pEgAzUHkhMtkuIx8V3shB+A6tra1mslZ/m4E40WgOyWSU9nBO+orTcax66Z+ibAabxarjNcu54IIe+sNW7JQfRmllFKAyWSS8UoLPF+/gWO4slZSUFBobT3JkuahzeZN81RzqTWXYMFCFCtw4K6wEHNfNq5/yFbGLslG82igePHjw0Ilwy6RJRuZKaJqyqlfTqwTYVc3Ubt1Ocv2+sfEmGhu/v5NHK0V5QYYbZ4jhi9YkIf4f4DFUTdnPgSiWOSFaH5IrImoYefQfWvxlTtmgW7ffo0q7Hcnr9GEB6rwl30W4KnKkLFRROg9fA+LZ2Na/nYTn2fDgwYOHToBAIGBeOzU0xNFQBBzEGspZQ5Rh2Dojda79OD0FgtTUVJMhArDPPvsA8MEHJzB0qA5cxLpZN0JNDCVG/jyqSoof+ALoqvcgBoY7a0WhABI9zTGcbZBy9ttqp90HQClhainFTZBNYBfi0oqbjz+e47xy9F8NHmfDgwcPHvYOPPzww3Tv3p2Zxx3nSAZVTgYpJT8ctcpfAMxlpQiRAzA9O5t/NTZuYwLHZWj4fD6j8Jmfnw+8ob6oGaQiM6GNKENjCcqLkdAtqQW6oNwffrRVgmKSSHIq6rtECamp/V0hFGcbOmpfTs7PcZsSYYKoHBc5WgRbCwaUARJEGSMPP/zwHi3ytWXLFg4++GCamppoaWlhxowZ2yyg+Y9//IMZM2awePFiRo8evf0dS9XXrwnP2NgG7rvvPvr06QOoB04ehFAoxA9+8IPt/vaf//ynyYNPSUmhvr4eUOXaL7744t3Yag8ePOzJiGBrgoAyNAL/B5TCoAQMqoDJL6tJVmSzKsFVKfXLYt26gwHo1Ws1xPOw6axZKBOnFGVg1KA8G6L8IWofogIaAmDDhkO+YskKIZwqYyOblRTpo4nDRbw8lh2iPD4BYPF558EebGxkZGTwxhtv0LVrVxKJBAcddBDTp09n3Lhxru2i0Sh/+tOfGDt27M7t2PNs7Hrceeed9OjRA4ADDzyQ7GylQzd48PVIsHL16kN56623AJgyZYrr9/J5WVkZgwZN0p8WsGrV0+pVQQGPP/44ABs2bOCaa67ZjWfjwYOHzo4HH3wQgH333Zc/TptmpvVp+vvAaP3mSNQSfjmk+2H0P1SVVoB6BiDT77a8B21tbVuVlgdFKBXS5rp1OSQSMfr2rUL5CkborWKo6fwpQPbRiDIuGlm7di5+v994S5yGRjKZNPt3kkWTHYY8RFE0AroFQawnQ4yOQpSKCMDQLJS1kQ5F5fZ6XnjhhR1eh84Mn89nso4SiQSJRKLDe3r99dfz05/+lDvuuGPnduxlo+x+DB78sn51JIrdHWfQoFeBCG++ObnD30ydOgeYo98FgHQGD74cgCVLfrNb2+vBg4c9C0OGDAHgtMmTGYFKPp0AFPfQG5RgaQwODmYC8SOASjnd2lWelpZmjA8nV8KJlJQUE3qRdNu1a7+nDI6sMWrGDwGJ9UAm0OT4dTMQo2/fUwiFnsXv95t9bsvokWOlpKSYAnCCxsYXSSaTbN68GYDjevc24ZM4NsG2FBgkua/i2gC6hSHjoovoP3duh8feE9Da2soBBxxAVVUVl1566Vbei6VLl7J27VqOOuqonTc2OkNtlO8y7rzzTvNaLO68vDz23XdfAEaPvsuxdQLVnaXscR5Tp87h8cfX0tLSwrp16/D5fEyd+jiwnFyWkQXUkouttwijRx/F22//E1BW6J/+9CfA/aB73o69DzNnzgTU6tLZFzIyMgBIT08nLS2N9evXk0wmzfbgThWUwfmHP/zhN9V0Dx48fIeQmprK8uXLiUQiHH/88axcuZJhw4YBany55pprePTRR7/cTj3PxpfDnXfeaVxKaWlpdOnSBVADubCie/ZUDOkZZWUMcPxWahuGUZSoKCcCefTs2ZMjjvgIaKGsLAtYzAAdR4wDjTQQNQI3MICoOVa/fv3IzMzkxBNLgBh/+cv7ANxzzz1m0kgmk57xsYfi/vvvB1RtC8ky8Pl8ZvU3Y0Y9sA6rIFDHyJGZtLS0kJmZCajVXzAYNPuRlec//vEPPv/8c2DPdBfvLfjb3/5GSYmqFVJLLhNooAQodpI2slBrGSFlVAOVytkg4leRyLOAO2QBqtR7R4TR9uEMWYxlZGQYMS7IUwOjeFbK8wA/Q4c28sEHQWwdFaW1kZGRYfbTfv8dufpbWlq2Iq0KZOz+dyRCa2srd3bvDlgx9XSgWbNo04W/qgvHDATSMjL429/+xumnn77VcfcUBINBpk6dymuvvWaMjWg0ysqVK034f926dRxzzDG8/PLL2yeJtuJ5NnY1Vk+ZQhgllBN2fF6Aep5ODIXYsmULAwY8DkzmiCP8wGYgkxUrIkCO8WT+PaScmAUFhyHc6GaizCgr4/kVK9x7z+nJJddNVG/jcOdvnV4VD3sTZsyQdMB9UbNIDWpmibFs2UygpeMfetjj0Lt3bwCuGzeOAVh5CxcSwAb9Og5UQOTVjRzn83Fsh2XcrTestbXVTOhOw6O1tdUYvU6kpqaaxdLGjfvQbQxqEi8AIj5YBx980B1lgeQhOh8bN56Fz+dzcTNM8xMJY4S0tbV1yN/w+XwdZq2I8NePNm7kp926GbtLirEBDF9dTyQ/34h8paOiAnJt9ySEw2H8fj/BYJDNmzfzn//8h5/+9Kfm+5ycHDZs2GDeT5kyhd///vc7l43ieTa2jzvvvNM8QH6/3zxAwWDQPDSBQMC4qyuwvOkIViQvAlwYshFQI3JjcshTzTcJ4O41axzbSgY41NJMH2qZXFbGnKVLycjIYOHCNYy/vKetm1xjLfdEIsEf//hHwLrAPHR+3HvvveTkqKByXl4eWVnq5k+e/Ca26mWW428Oytx9Gohwxhk/AuD71HPd228DuCaHrKws0+/vu+8+V+xbjpWRkWEG9qamJlOo67LLLtuVp+phF6AYNZ8XolkQcQjIakjVKWPjpQ0degh2J9bNV1l20WiUDRs2cMklX+D3+3n55U1AGhkZI7Ak0t2L327c2KEx09LSwj6ffcZTvXsb8dN+30iLvnl89tlnnHXWWcaIPPHEE/n+97/PDTfcwOjRoznmmGO+2o69bJSt4fP9CVgMlOtPQgymgWOB84BBPYA4vP7Cv03Wibikxw0dSg6qM9aTDWSRjXqYioBrdSxdxdSrUAGVau66q457743x8ccbgQhZehuJvYdCf6egQBTs4mbg8Pv9xsihHCvmvyHGxRf3A57kgQcONgNIIpEwHA+AK6+8chdcMQ+7G/feey/gdmH7/X7DGg8EAsa4BFixYn8AysreQ00tktAXRIkmbWSqLgU+HFg+cSIHLl68VSqhhGbS09PNcQOBAD179jSfS9glHo8Tj6vR5OGHHzb7aGlpoalJEf4uv/zyr3spPHxJZGRk4Pf7KcLqcIZRq/YSJRdKAGh45nNoF45wegna2tqMwWnrl7g9BmlpaaY/bNq0yRilWVlZZixzTuJtbW0uTlH37t1dhqxsI0av87epqammv8XjcfMsSH0VgfwmkUiY185+3t7j0b59zm2eRz0vQWDOuHGcMW9eB1e8c2P//fdn2bJlW31+8803d7i9ZE/uEJ7OhoXP9xBqQK5GGQLqScwlaqRkGoGNG+QbN4YO/Q1wGlHiWAmYZqKaz/166I/t2NozUYll5cD1AATZRP9ttG8kiwCbLFYEPFFWxlkffADAB8srGDp0vd46jJLlWQgcvJ1z3ggDu+kqRC+STB63zW09dBbkUVYmGVABVKC0HOtzqyVIK2Mcvzhw8eKt9nLzgQdy+b//vbsb62E34Z577gFg5MiRPHDAASbNNYIkk9ow77CFIVISHfu4nZN1NBpl6NCfks0rnAf8bN06/H6/K/tDXm/YsIG3tcds5MiRjBo1yuxnW6JgHaeqWrS1tRnv29KlS1m2bBlXP3sVLGxmw4aoaYMYCW1tbcb4+fzzz/n90KEMR+XU7PvZZ8DW4SHnscRIbmlpoaRkKrlYkz0MnIG9zp4hvQPsyLPRZTvfOdCpjY1Zs2Z9pd9lZGQYK1zihTuCrAwS7R7sjlK4EomEscC3t3/5blsPcGpqqnlAMzMzzWqgozS1WbNmcfTRR+/oNDzsZsycOdMMgs5qm1u2bDGhCrD3PDU1das+9WUg+3HGxAFX4S3po8Fg0GjHgO27zj6amZlpvCIZGRmmbY899hjRqJoUvFDLN4fRqBV5AmVyLgEuqKz80qJYQ4eexDBWcgVwFpA+qhcNKz7f5vZXX307yp8ygS++mLnN7b4W4kBO+nY36dXrWKCSg1CenQQQ0nyLYqDfunU7PEw2tTSQTTVR0lHGxpGTJvHru+/+Oq3fe7Ajzsaeamy89JLSxdt3330ZMmQIgwYtQEUzKxDWfi62kmEz6jqVP/QQ5cDACy4g6RCwGTToEFSVxCA2U9uWZxaIpfyT0lKmAgFWMdzxvUTYw8CJpaU8/9FHgBqwX9WW+DO9e5vt/MApQ4bw1EcfkUwm+egj9QANGfIUMJeHHrqOF88/n1OffFJt73AxnnFGf8jppppZDCw+jmOOeRE4lxdfVBUajz322C9xVT18XTz00EOAmqwlLJKVlWUmhcbGRkPOSiaTNDdbv6R9LcqLoLwaKv9pKmsoBl5DSSYJMbwGmNHO2PjhyJHEcK80pT3Z2dmmPS0tLcZobWpqMv07KyvLGEgmzKdfy/uZM2d6HI/dCLnOmzdv5ligZ47KrhAl0M8++8yMBX6/39xT56IlNTXVvC8rO4XBrOQstKExHKiD97t3Z//PrcHhNIDVCJXgllvKePZZlc0yffp0V8jPudCSsI3P53N9Lv1wy5YtvPDCC6DbD5iuLsdta2tz6X6ozBc1Hs+ngmrWUIJlNZUAeb16UY0STy9HDYmPvfOOMZJvmTRJR6ijrjoyJbj7t4ftYEc6G/ts5zsHOp2xIZhQVkaUC1EyN836X4R8ag3zOIEyAg598kliMXW1qh56iH2BO7VYzkhgGXFUN83Sf/NQvOWtr3AYRa8QmVxZq24BlpELlFC7jTtTghowqlFmkbPGwZAhT+hXFUxlFXUXXMBAx/cnnPAXMOZNf8teNYcKAnkcd5zidSSTnrHxXceJ80/g7ZMWOD6RdRe8++6pejA8h/nDhxME/gtsQgXZAH7/3/+69ve9kSNpIBfIYtq0EwF49NE78dC54Axt9BwNBCF9OcQ3KEZaLBYz3qimpiZXGqlM1hkZGXx/shIezEWNVzmo+T2vXBkts4H99TGdBoKkU7/66iOsXr3aGMl///vfjcjYgQceaLyuTsO2fabJu+++C8BHH31kjpGdnc2gQYPg+iQQ75DY6fP5tMEgTItSaqmglgpqsAR8P2IWwXP6eThr3DiTuRNBjbNR8vFTTx5q3B7Dtj3KHtphbySI/vvf/+Yvxx0HQJQ+WMoUzJ49iunTH6cAm4IOqpuK5T/tgguY/Ze/sGXLFqOd7weCNBChwUh3rdFGQyikLPGWlhaz+qxDze/pqA4+sLWVZDJJj0GDaFjdE6Vf5zfbt7S0mOMPq6nhpaIiU1kgBgwZMoOPPnoeu6KtZDLQ8y9/odDv58lTT0VF5fOxVRzDqqG6VEBN1acUFZXrPaqIvs/3S5LJW7/CVfawsxDpY5/P59JtkfudmZlJerpyE/v9fhOKi0QiypPwPLT8n005/Oc/m0gkxgPwxRdfmEF/3HvvsXHjRoLXXEPmli1c+fOfm2PJAD18+B+B0xytU89FMpl0tU2QkpJiJqktW7YYVUanHoIzju9cQXft2pW8PGXSe5oeHjzs4dibCKKvv/46AH379nUQPEuBQtau/T9isRhDhpxPLqsoxOaj+9GJpyeeSPSvf6UZ2P+SS/j0ySdFwRaw8jMWDcyr+6cZaJubm41npBFb1vnwe+4hFos5VhRFiHawbJ+VlWUG+bS0NBZjCy5LGGzIkFvMkbOpJwCMvuQSwsDF//oXFwNnHHUUDcYgaYR4HcT9fPTRRlJSAih/iXho1PWR63bYYYft6BJ7+JKYOXOmmcTFKADVVyQ84WT+p6ammu1isZgyRkOvMnlyHQsWKNGdQCDA+vWKKLxhwwZXyur69etpbm52ZQGkpqY6VoOTsSlNlkJ4zjkLgQUMYyX3zZvn0i6QlamT7+HMYpD3oFapEjrp2rUr3bp1A5RBIjyQxx57jMZG5a+74oorvtwF9WAg9z0Wiyl3aAyIqye8ngFMnz4fec7femuEiwwpY9YnkyczXe+vHNUjJEdPRgtncbbU1FTjwZCx68gjT2fJktf55JNPAPj0008pL1d7KS8vZ//9lV9k//33N9ygZDLJ/PnzAXj//ffN/rOysujfX1Ho+/fvz+jR9SgGCmzcmAtAjx49XHy0/fZ7ALV0FK+zH8gxfo0QayhFjbhBff6HH36F0W0GtYSLapWSBoIEWUVQ7629HLqHbWBv9GxcNnSoI0CRx9q1J5t3fZhvpGQCjt80olyP/vPO42XUcFxy6qlmG7mGoqKRxfYRZRhRItR3ePULUd08xPjxP2fhwq1roTyzerUu0hbQrR2IkyMSZTCvsspUdyw/6iiWAA30wRZBCOuzAtgPgLVrr6Vv32uxQ0oY+Ip51R6+IdShso6Gfa29HHDAx1hDN4HqW052UIKVxLlh0iRue+edr3UsD7sfztTU5FM2P0092c36lbq/U6bMBWK8887NPDxuHKeg7JNM7KKrAmVcgBrn/KjRxhnsdXpvlUGqfLejR5/NwoWKj9S9e3fee+89QHGQFi9ezK23nsfMmf+ld+/exGIxmpqa1OcbbuGXPRYbPZmRI0ca43z06J9jw9V5DBwYoKLiMFpaWlzGsAo2h8CYBwL1OqLPIYga1888/HAg2+QTgnoSagkj+qJriJFFLee8/LKpxu1h+9hFjo3vvrHx9NNPM3z4cEpLD2o3JIeMRb9lyxYORXW4dNx0hhrUw7aKbPoQZTKqA4pnQ1JjRdU2D3jr009dq72mpibi8Tjf+96/UWGKOqDRZYWrbYtQQ4PaYzwep6mpySVHrXQ9hE1SgFXbs8bLmwRZwiIjkR5lJO6KS9W6Dels2dLfwUzXEoIALObww8upqHiQp59+mpNPtoaZh68OCZ1kZWXRXcsky/0FpRsgq/vm5mYOuXAqCx5f6KrIuM8++xCJRIADgSozyHfp0oX8/HxA9elwOGz2GYvFDOnNKfusPivWRw87/kX0Z/J3IG/SzI/GjeOOhao9sh9nNoqTOOok7MViMbMSDAQCrpCMZHZJaAWUHLucl+fl+Oo4GBu2BRhALTFqzfcJoIF8xo07lXxsIDWBXUCFgXrySVBPGFvhyenddWL//X+F4kqEgAjjx18FwBtv3LbVtr/85V85//ZbVBdc+ztIgVv/ewu//N71W207fvzv9KsQsIRsGnQ4vBDY2vu6du19jO7bV59jNmqEDyBeuxjKHJHRMw8IEjUq6iD6JFHepFJfmRGsdC1HPewIu8ix8d03Nk45JQr8HZjASpbj9EU488idueiyRQzVrVcxGBhOLTVUsIhSrJ0sFpvqzHmsCikXoNPYkAH49dencdhhUjEwnauuepXbbz/E0VqR6FXWeGtrKy0tLa5iWWlpaYRCKzmsoICVZiJwPvZ+YCBRchy6H6V633V6mwqUzybPCN6UlLyJehjVGWVTS1STr/r06bOjy+xhO3jssccAd9pzly5dzCTrzBRKJpPakIAj/nQ4VG2ktbWVtLQ0M6nn5uaqNOasERCP0NDQAEC3bt2M4dKlSxczuX/xxRe0am6Qz+czE31KSgrJZJKFC1sZPz6VJ56o54wzVqD6h/SVOKq3B3nrrb+RkpJi+oycj+wbcPVX5zlv2rTJVSOjI6SkpJhtgsGgWck+++yzxkV/ySWX7NxF34vRr5/SuNxnn32Yz09QY8ICBvCmZoRZr0QYaCAOFFDPQfydauZSSyHWzKynD5BHAwEaaEYm7EXhVeZeNzc3OwzIU/Q2i1EVrNUC5pBDzgRGMHv2pSxcuNAYotefcgO3zL1ZLYE3YQyNlJQUU5fjkENexhncmEqDTumtpZJa0tPTXW0wXCSUYZRFlARRwriNqAW4i94GUXaPUOnzEG2NelYS09/kcMwxP+ff//7DztyOvR67SK38u29sqE4vgctmrCyX+/Qznn+efWbMcHEhQLp3BLGGa7DWMKiOHEQJgC2rWbkT7Yk42tL+FkhoZPu54wD/qqnh10VFzKWWVcZqB5uCK54P+Seca1CPW5znn/+xY4/HogwQNTBEtTt96NAfkMsqjgUe3oHwjoft44JLzofEavUmaxBvvbp1qeopD0/mqelPqzezm4ElTJq0mIULD9lq20f+/KiezHts9d1XxRNPlNHaWmr4GGlpacaj4qFzQO7X+PF/Q038xagQwBIm69KOMuFWAWoxImZICbWEqaUay0QLoMakHMd7NZaI98lZcI2CE9TwtiEHqCQftQAroIEwteTmXk/Xrl354osvAM19CKtd9uv1CTDJnEdubi5jx56Ju7JLIyUollFQn4vwj6Q9fr+f1NRUwnobqU8VdJzVGgYglejyqTXsjiJ9NdDbx/UVXEkINZekAwHvudhJ7KIK899dY0PU3aD7NrdxCl45XdlfFU5FPbAEIr/f7xJDckJY/R2J7GRnZ+P3+41b2rkC7qjQ0ZdFIBAgMzNzp/d1zz33eGp5XxKPPfbYNlPktlW5sqP74dzGGYJZt26dS39DQhF+v5/cXEWca21tpaGhgbS0NHw+n6nr48xGASsNnZuba7aR34Pqz+KpSCQSJgwpxFPZRjJTUlJSzOtEImH26VR6lO9AeT/kWN27dzfPTFtbm/ECzZw5k/PPP7/D6+mhPYRKnodakZcS1uHVar3FGkYC00BX/qiq+j4DB76CWnRIkrSE1fxYQ2M704dEUDYEsX4DFezIA84cO5bzfvc7929CQNPWu/rZ2LFaXiABJpk/j7DWQioBijUBtSMs0+HvIMqsEAF/hdH6vKtYsnYhALf37Ushdrkn3JQsIJdVNJhlZmSbx/Tgxh7v2Whra+Oqq+aCCZDI6frN++LisYRCy8jMzCQnJ4c3UZ1LtpQgxDDqCVFPAwNoYACLWWPsbKk7sDIU2mZ1xMzMTMeEE9L/3A+rcv9J7ecAH300znzekZSvz+fjxlCIZwsKqCRKjX74KqlllU69tZ4S0c6Tcw+yYMGDpm3p6elQ1BNqJNziPPtm8lCP5bQrruC5K67gBM/DsUM88sgjgDIYT7hhhvowodMCCEIQJl9/sKXJxCVsUU3GCRJmqED1kzwmTHiXt99WackpKSkmm6OpqcmIHK1fv96sLrt37+4qHJiRkUF6ejo+n8+EMQ44oJX33lPbz527mfXr1RCblZXlCnWIAZCamuriGYmBHAgEXKnaksqaSCSMIZGent5h+KStrc0YJJs2bTL77NKli3mdTCaNsdGtWzcTlmpsbPT4HB3AirwtBl7CjmjDeYUKBhDVq3pQ/oHh1NVNZPPmzTqTZIL+jSKR96HerPAj+ldZKC6QLKhcY1wYXTa1AmeBhwg2VJGfn288G8lkEhrrgCbXWJefn08QO2pZJkUeC1jDaKCsqgq/w5h3GreqvxVQS5g8GkzTGsy5lyCeiqamJrp06cLP6+pc43hKSgrP9+plCPcNVCIUWaewnodtYxdVmP/uGhsKc7Gs5QiW/CbIc209dcECLpwwwRgSQWwFZEUyAlBciVL9LosvR35ZuvR6Ro06j/argxUrNui2CW9j53BiKMTKggJTEtkPhGmgwSTuStTRSQR0t7igoJdeNIj6KUAjK1bMpKzsWEpRi5VBfggm4C/6YbzEMzp2Dh+KIVGJucehPGgOqFsRX40l5sY5+ug8Zs1qQvUuGRDjTJyo+sXChVtcu7/iipO4++5ndqopK1ZsYOLEl/S7PGDr8Mz2cPjh3fj3vzd+qd94+Obwl7/8xWHUScJqOjaQMI01xAiFHjW/CYfDbNiwgS+++IJJk0Zix0hFVBY5ACHDCzZt2sS4oUMBiHIYa9f+FYBPnqumf/8Iavxd7FT3IUvv7+9nncWBv/yl2dfs2Sv4yU9iRBwHyMzMpBLJDnRO7HnU04cqal1Kp28XFiKByQXAa/X11Nf/j/z8gcTZevS3IW0YOHAK5eX/UnvPy6O+QM0Ci/VvQkAt2YCfXGopAK6fOhWAU/78Z49LtB3skZ6NJ55QKpridj2MBl5nLpazESZfV2JVxkMJBQU3UlPzCxN/W8lIwqjKd5KDbc2CLIQLUaONjSBwm0Nf3ymdK0hNTTUDwJAhd6Im/QhQQ2pqqrakN6EGBpVpMmRII4qkF+LTT3/a4f7FAh8WChHSD4eyvrOxXA0xNsT4gDlzhpnzVR6VJVA1ULcpCEB5+YNEIhFytcRvOhBLWKesh23jkUcecWlozJ37MQApKWlMmlSBMt2aIR6A+EZsZVYQ/vvRRwsNOWDrPzSKlsVCl0DWPfc8C/iIRqOuGjySNijCXCtXfop69GVIzqK1ValETp58I88+ezHg1t8YO1bqP5QChwIDOfzwRubM+djlqRANkLq6OqOzkEgkXFLn4hU56PRJkIA3//YGiUTCZN8AJjyUlpa2ValvUP1VQkVdu3Y12T2eIJgHD99N7HHZKM8++6xJ+2tpaWH5UUdRCsRZxXxCQA752iIFZbMrc6EIv99vSsXPm3c3kyadBShKlR81BetINWrazeNlR451RyJGra2txqXX3NxMWdn9WLNFQhsRxxlsQa18xRsByqYP0K+fkg6vq3ulw3NPTU1lum7PzPx8MMlbeWYf774L8XiL2V7OV7naX9LbhnQbYPjw04AIB2FDRRHHP4BDfD7e8LwbWyElJcXwE3w+n0s+et687zFpUk84uJuy6yq6QVUpbnWXoP5rQy7K2WU1BKSfTZ++BhjAzJmf0b17dzMpf/7554bLkZ6erjlJCaAFjB8saIrzOV9v2rSJgw9eoT8vRT0t0gjV/6dNCwP/AeDtt6cY47V3795UHHUUOQ8+SGpqqjF4AoGA4Xg8dduTylCuryeRSJjnpFu3bmY/ToOnqanJbOPkN2VlZZltHn30Uc4+++zt3Za9AgUFBfToIYRhGUeqUQat+Gn9FBT8Qn8XY9GiawFbv8lKdik/gPhZc/S/BPCbKpVyLZTRKCH69r2fdesuJyUlhY8/3od9913CMC0yCNar4PSfCtLT07fiNsViMVaSywAaUM9CyPFtI7/8+GPS09NN6HA4NmgTAo7Iz+e5qiqO16RY4V9ka1mvKHP1L2LkUmtE55LJpGsEBpks1V5KgPGOcwhdeil4no1tYo/W2bjtqKOMMIvyTkQJ6w4nncdpPBQUzGf16hHm92+99TAAR0+ZQiPqQtWjyGoffPBbF3luZ1BWdgOq+4s7vRD12NoV3f7778f770eQla2CcC2CAHy/sJDZO6hS+NTatSYWOnTocpYuVQPPttTuCgquQj1wESCsH2xb5WUyKrtcoqXCOAFnNNbDl8LwQRj1pEpgdjdYojgYhJqxJp0a4t54ZHU7voMyOiZOfE6/j3P++TnomnsA/PCHuXrncxFNglRg6P77c99995nthFPxz3+eYfrNkUe+juXjF2OVGsL8618hjjpqPMoEHaPb8RLKcV3BMBo0C8DDNwnhCF157rlEGas/HY/Nv6jWnwVx8jEGUE929o2AcD0WoPrMcrI1D0wYZoXYEWnz5s3cUFbGBL3XMCspZyXJ5GWkpqZqj1alq/RDDJsn5wdaHQJc3/veM8B6+vWzJHilhltAjAZyaaDBhBoTDCZKly5dTPp2r369acHmy8hya/Pmzdy3bh1VvXpRpc/aGj8ribDSjLB3jx3LLyrUMSy7zdayytK1UcZgK66gr6xc/3POOQcPbuwxno3nnlMDbkFBAZMmXaw/HQZEGEwto1F0hOE4K6G0t9kjZqW/zz77GAs/ylSiRFi16mlXnQqnoJFoDoC7XkQkEmHo0MccLa1jsC7yFtaadH5sWERZ1RIdDOp/QhpVNvQysjm/Vy/+Wl/vKprklI52ZiHU1BxkrHVnefKMjAxzvuhoZraW7pUpJk9ft0OB4oG6ORUQjluN0TX0wee7mmefncAJJ5yw1b3Z2zBzpiqlHQwGzeq7ra2NSZNEdrkUcqao0pmTIWs/iIsQgDyNkXTz+u9/r6C3LoftxIQJzyLpy7ZXJzj11DhWc6VAb1PBSBoIogbaLVu2sGrVKkCFKs4881IefPAPbNmyBZ/Px+WX/wpFECzW+ynGVpNNEI1Gefrp1zn5jJMgMVlvk4UakoOsZC6lROHCC9nnuedMX0xJSTEhlYyMDGprlbCUZMkAhsAqcEqdyzPZniwt4ZucnBz+9re/AXD66advdc32FihpbXmKJZ1VWMgRAIZRi+TzlDh+O2mSeF+XkM0aouTqffrJpt5MyvetUF6vAqxRWYwyJu7t3ZvL9YIoFFrJrwsKTIBQxMDERzHY1fIC/W17T2nEZJLkofptFvCvUMi11eqESuN1ChsUO76fOBAm1kBBwhobddgRN4Jbfj3iaHMQZWC0D0rLXFIDDMHDtrDHcDZ69uzJlClpqK5ypP5U8TNWsYQCzTkoxQ6/oDrKIhM2SNCv3+N89tlz5OTkGC/A2rWPG6PCaWCA9RT4fD6XRK64oocOfRFrZ+cBxYSppQiV2dGoW/ny1VfzOeJmykdN5RHd2gBOHoWwyePxOFlZWR2m2To/S09PN0ZIVlaWGbDT0tLw+/307v1z1KOrRMCqWWamqtFoQ2M0yr2RAKqhMm4T4mAEUEjPnj3xYK+9M5tDrRbF4Vps+b81EE9HdYIIDg2takTd9bTTSpk9Ww3cbs+GpCIKYog72A6BpUhRjIA+ZBpQu3o15557pd5GNeavF17IQNRQfxAwn+XgoklLX8zi5JO1PL4fyNJckvgIMGUIq6lhJRVA+gkncOzbbwO4jIguXboYI2rz5s2m727ZssU8P36/33hdGhsbXem0YnA4Rcycab9/+9vf9jqDQ7J1BrDGkSsnvlzF/crVRuexYIyNz2tqTFbFAP4OWNNypfGwFhMljxpWEkaFuC4tLWUyagQAGFMAFMFxFVCPzVS6ft06nu7Vy7RGQhlLsMaGSnEuADL59NPN5pw2bdrESGoNd06MhGvWrXNx11paWngKNQNIgFD6/IayMvapqVEfDIQJH1pavjDkalDjcQBb8DBbGzOhggL8wP8tWkQymWTQuHHE9W8X6/3MAUZlybXy0B57mM5GCMu4BrcOqIXTuioEhrGGlTo1LF+HD3Ztm4L6dR4wnAaghvkU6+PLPPMB6obY5DJZjTjDKaBMAOda5Kujd+9DcRNH86gnnUoWAcpsK85BLXIlbz6uHuh6I/w+EMhiypQ6POrGziCmruNirD0po92HcgH/oz9QvJ7p08UshbffPhSAefN+yKRJv0WZqCrsks0a7TVT3J2oWZP5CaF6UCvqgc3X7vF6TSKu0LlLQXOkBNYsX46arIJYoykBiXC75YrtR3WoySQE/G7iRQAm1drD7oF4gaYBi7WI1jIqgCzWrl3oIty2trayShfsa6qro7W1lY8nTWIyGH3QELDShHkTQIAog4miDD7xBphYvIwVJW5yfDKZ5EQ9cb9eUGCyYk/5xS9MewYOlPpOmcBmV8n4s7Am01S9H6dXt62tjaamJmYCtUa6HCBMjDWkA+du3szGcuiWAwE/lCRkC/WkSc8tAV4vKaEE6K5ruNQAV+nj+nw+uhVBsEYFmuboI63kINc5e3Bjj/BszJo1y+gN7ArIA+kMl8iKzCmlnEgkXIJMAp/Px8aNuz8tcOPGjQQCga2ODerhc4pCOUMtTs/MrsasWbM4+uijd9v+v+u4//77XdV5nRVRdxXkXnaku/JdR/s2i6emZ8+e5hqlpqYaqfbm5mbj8WhqajIr2W7duhnSqcTsBRIa7NGjx14bQw/iDIU2EKaBaX37qrIL7UIPgoxJk1xaoUH9eqw2SitYZGqQfPSRCg2vYixZLDJT+4gPIVDMditRFuiv69p9fuKJM1CmS1fQxxRMRxkFO4r513IaKnAira9jGUtoZD7nAps+/ZSafv2UCJjeIoLyaMhoWYL1fr90wAGAm5fWa1JvyFJtqQRWGt/MdDxsG3uEZyMrK4tJk27ArrqCjm+VSzmI6hxLcDufJf44knrSURZ6Xt4A1q//uEMrNSUlxbgbU1JSzCDnFDlqbW0lFosxadKxKC9Esf5G0lAjhJhPRLdWYoDzUdZfNlGtdVeELQnnxzoQ/UCMUaP+ymef/XIr8RlQE5JTstdpbFypXdfKIh+MeqycLHX7SOeBjT0BVEN1XNa7TpptDVBOVtaJW12zvQnO1VZTU5PJqkhLS2PRIhWcGjs2DPH1UN7TjliKd4n1JFRgU1ODWBmkoDmWuu/NWE9YyCVMD1DJMurJBRLE9F5S9V+ZjOoJAaOJ8n3mG/l8ofAJJKwHqlcITdDv2EbaF0JVq8jWpb6Gg6YQTpy4XH//Kv/735/Mc5OTk+MqeS+fb9iwwfA6mpqazKJin332MaGT9qt157Moxt6f/vQnrrzySvZ0yHVwTuQBVA9pRHE55PrE43FDBn5+6lRKUL0pBzecZMsQtTSQZ+r0wAyW4adQe1HGAxN1t3WGupzF+HqvWAFlZaoSS05OO6XcIGo6SeUvt97K9XfdRSKRMON3GDvWOsfflJQU/H4/NTW/oahoJfYJSLBq1cUMHvwzhgwZw9q1FTyGCiEJz2SEPupo7JMkJFIJFdeSzYkFBVyMMinSEco1WC9zCcnkrli775nYIzwbANm8iR8poS43vxhIGDZ1GDWELyIX6woO00eLz+bh1NH4+hhGg45ayyUWHraaUMR1V4KthNIG5AKrzLZ5HbQnzle9be/36uUiTEnBt4OIasmzBsJY8ygLrDJxBbBEDWQ5QLYOtdg1SAU/nvYgSzvhintX4ZJLbuKhh27dwVYR9S/u4LiE0TNERH8QB6roQ1RPEvm406EFNYjMUr6OxTvN7SKUuBtY6WVZxclwrJQk46jhdwT2SajBDrdLGNsudRDcibp1iAs7T7dJzOhSx1ZLUB2pirMPPpiH/vvfDq+Qh68GSXe9bOFCfjx+POCuhgQx+va9ATiG1auHMnWqykgai7pLQvVVSwc3/8EuQUKGBL969bG0tR3NtCGKGlkFBOPQc/162hyVf8F65PbZZx82oEbBB38ymyeeOJczzugKLKGyshunnbaZRKIry5bl8v7VVzNdtycMjKqpcS0C5bXNfAE1Ouk5oEcAn6+S1at/a9ryuN7XWXrracDQHGhuVL39P6iF2EoXyRbms5gg9Safpwohk9qEztNOG82//12Nh63RqbNRRLyrpKTEDJ551GquMqihsNmsv2KIxV+CtdezqMVPuk5nEjhDDclk0ljQLS0trg7udAvLNk1NTcb16yzlnMD6J7Kww3kQO4WI8bGKcv2uGCsrJmg0e2tpaTErwra2NldlWGedCpODXl/PXK1D4jetkiuhINY9qIdpylwMsWTufW/RPGUKhdjHcJEejsYSZTj2vpxxxhnsDXjM5zM+iAHABRfchJq0S7FF/xK4swMSNkDsR+XNufzKQZTEstPlHQdqzD0++OCjsCnSWSS0USGxZxy7lwknhuIG+XD7ydQ3soVktlQjw+lIfW+Dji2lL8u6NAJUUEs5tTQwGGXqiOHxHwCGsczkRjQCt37ve9zw5ptbhfaccutOsS8hHFoRPHe/b2lpYdOmTYAimjrrEv3xj38E4KqrrsKDBw/fLNroxDobkv2QmppqnMxZQERP7krJP0IpVqszCNRSh5vFH2ANuUCDnnDV8CnGQzKZdLkDnW6/joS8pNBVHEWdtE42K5guLGYh7eVhb0QWMIAGIrxOA/n6F5OxBodq5VtvHUpaWtpWRCxwp8G2tLS43I1Xa5Gn53v0oF6HT2QFEwei5JKrJ64AEE9A3kIoevNNMjMyeBs1DVWboxYBCer09fvBXpaVsgB3rYhsanUYTD6Rv+IPCKt/wusEtiozbKZkoeCFgRh//etlHHzwCXob6fVKbq2BdCJaqIh2W4hHwg+koCS9pM02jiq0vUr9upx8bbqLySCGhhPy+4j+p7yLpbpdAaCCPlqN90jQJcFV/wkB902d6ko5fGrePGNsJJNJo2fTvXt3049jsZhZqWZmZprXX3zxheFLtbW1mf3ss88+nZLj8mXwyCOPUFZWBsCYMaeD1gTqQ1Qvu0AyhaCKtLThiMmZh1rhDxoI+KHwQzU+laMkyBWCQCU1NS+6UuiTySS1Oj12AQ1M1yn58p38lXuUSCTo9eGH3AG8st/vOOOMz4EvgAoaGqY5ivjlUaG9zmUffkg2yoB0jr/OELK9vzWIb/bDeWtRgUNb+G9ZTQ1FRQeSrknUBcDQhOq7c1Gejwa+j1tFowLwU43kdqm/qu8L0yUElHD44QEefviRvY4ntCN0as+GE8K9kEERwM9KClG50aXIcA0BbW4A1JKP6pgDWUMjdXX/3GVtatZ7PkW/n1gEFEHoqToObmjgirIylpFPmHriwEbUYyHR8TxgRd1SAKLRKEOGiE5DALcD+6thweefm0Hjs88+MymGbW1trgqfTmPmuPHjCQKryMcWtysFaqglQaPDr7Q3wOcrYyzWkxBE9bOo4cAUorxo6bj9RX7YkAdzhqqPGmO45cqHoyLgjajJfwFQyXnnPYg1X7OwKakJIGw+EURQfb4RNcVEUP0sicSbJfQRxFENDgiRzxoTTpPWy7Aq/hQxQMAyf6zvLoj4VZwGShY2pCPTRhhlZABMmvQjFiy4Cw9fDqI1MmbM31EcmQiAVvOJ4FR+hSwGDFiC3L0C9PQsXQl1P6OMRETbpGe1tbVtpfK5YoXy7R1bVsZNDgKzGAOtra2GQ5ZIJMyYsnr1dQwa9BKiWjx27GxUgCIVGESFbq2TJydGRUZGhqtIn+yzrm4KhYVP6pZ93yVJYMeyEpZrY6Mc8MfV3zlAA1NRiztn+K+abK2PpJ4053JVPG+ViNdc7oUHi07N2RCCWEpKCg1k49e8AxnAgqhnR/iN8l0edhVVSxwoYvXqW00pd3DXHElJSTETcfvVUUfZKCkpKfTv35+3a2r4W1GRyUFnGnx2SwgfKtSijlRAlOFaSn0N0MJGBukfrDT7DwaD1NVNBFTeuTDunQ+fsz1OOB9Ev9/v8tJICKawsJBoNGr276ytIb/NyMignkuppwI1NEmEtwA1iIWIAkcccQLz5r26VTv2NPh864Hh1LHSBOUkZGH1JiQsoXgVmO9QnzdGHN/JRABQBP6eOvtZOBQR3MTNPKzRWQ1UGVVDOUINKmn1dfJR9ysAPA80sZH+eqsCbIAkBNQwgKj5VBDCzdWQgdc5gATMljU4gyz19AFgObXGkydXSH5/yqRJ+tUAJky4g3nzriGRSPCjH/yAn6Gmz+hCVQLcWVV2w4YN5vlsampy1YsRgm5mZqbp988999weLj6XQHmVZLKUtbiQf4NY41KZhxI4K6wCIsqrocZIIQIDNPPJJ7/d7pHn1dVt9/uOsRzbZ9wMkyjZ3Ln6PaOvsrNYseKIHWxRaZ6cCG6Oip0tChzb55n/tV+SqMlCcZrM1dCOFedBoVNno0iKobhK/ajHyXlC8pl0Xxm2H6pRyUyBQMBY6T6fz+X2c07izvCEE848b/EG+P1+Q1a6YP16NultNqFc2HLcn8yZw+vTrkI90HnAetQtKQYivP3hh64VhJynM823ra3NDLTOuLfzd87zcsKpdJqZmWkMj9zcXFNEa9OmTeYcy8p8oCvNuImrzvWtevDk3uyp8OWBug7pNGLXNjHzOojVSYnob51ORJnYE45/ebjq4YiwQLwA1UeC2EwRsDoYdUAFhxFlNIotUq23WAK8Th/gOpSPLY7Ke9qINSWc5oIiVCsWiEUEu5pzcqAkF8a5F5VNFQFDjhbVButNKXTsV6ZC22Mi/Otf5/PFF1+oLANstZ6wJj2WLVxoVo8bN26koUGF/bp06WJSYrt37276tPN5cJanP+uss9hTYEOqIdw5SQmsTqb0R/GiqXGwES1uFbJh3rDZs+3d/fufS3W1SifuaEz0+/2O2ipu9VcZH51ja0pKCgP4Ozm6Zat05pRiFdUxgKhrQdU+HOO8r85FkcA59jklDA6j3mUSiAHRoMNB1nCQXhk3VzICRBmGHQvFwAohfT4tTcrXexB0as+GEwOImoQJodT5Hd+HUcPd91asYEQ7F+C3iT6spNYEgMTFJ6vY7wbKyhai1pWyDhUTzklerEQGtNx2OfJ7JDZsRJ1vAVGmahVagRhiMrDLZCs90u94nXC8dxobWXZGN5NDHLcvIAJUkkutKUAf1EeTiaIaUObHBBjYDSLdYEMXlLFRCUA2DXqVloeqIDSAlUQI6+wW3RojXS+BIQm6yJlHkMG6SLckgnKJg8inNzCW16lmmGFUqbb+/oknjEfCrZIKM+fN44pJk0xWBMDj48dz61tv4cEiHo9r76QEtGSijKDuVLX+XIyHHKQvLdfv6rCsDtVD3QEDmXTbL8yU+qetsSOfy3vn5/J7+fw83dLFqGy4Bt2qPtQyELeekRNOY8MpcZ9IJEx7nB4R5z4mO/YTRplcqn8F9SdzUaFLMblrjDdO9fcS1KKwgmzNR8oD1lAJDCceP3er9u7t6LSejTvvvJOJE1VYoX38cHsoKChwdbr2nbg9sQncbHchGck2zjCKM9e/I6vfGTPcZ599TDXObcFJamufs97Ra2ebW1tbXWEgpwdmW0JkTlEleVjl75dFMpnkzjvv5JprrvlKv/ewd6Fnz54mLh+JRIz+g5Sa7wjO/iok0vT0dONVTEtL63Dlm5mZuUfH1OfM+TXTpv0aGyyuIZ9VRmxe/G11RDVfB5bRh5Cuhi3BlQaysYnuYA3IXYvpS5fy/qhRRkjsC8BHi0t4a1cjiL06FShjY41JL1fXqxh39kQQdTXqGYzyGsWA5UbKSwks17OA1wHP2GiPTuvZSE9PN5Pl8OFTGYmyOKuxlVnF2yEusrt1jLd9eMRJIHJmoGxrm44+T01N7TB7xfk6kUgYd6/f7+cdLQ28kPm6ozcCPrJ5kwLg9MJC8oCfLV1Kbm5uh2qUzqwTp8HgbJvzN87QSUfn0/7cu3efgI31C/dA/PvOFXYYCJFNPX69jz0xlPKkKak6HLUeG42tPwLqHkawq0lxVztjwELwFW9GkK08Gz18avcbmvV+6rDFUyRwEWIYUeMjEVWMQtpPCWGgHKokiLgJ+ILD9FAaAMpZxRqTQK5WvPVEqNdu9oNoYDpKDGloD9WE5+LqeVuDGKTiui/W51OJ9eDIuRcDeaxkAYNpIIbic6Smphp+RZcuXUxGSTQa5eijHwYO4t63f+vq32KQBAIB+vbtC6j+LamvzpTv1NRU4zlxfv7AAw9w0UUXsScgGo3S3NzMr888kwHYKy89TGjKzaheGnb9Oo96CrV0vWRClQNzyeZ1tX9HlpoTyWTScMi2bNniyiRyejSckgGC5uZmZo8aZdqVhaKG+rDMkubmZnPvnPtxcun8fr/LsyHtkRo77aGfCADmG20m8V1U8vC//kVzczOh448HVE+uAJaRi3rmg0A5w6g19NlSbHjwrpNP5qSTTurw2Hsr9pBslAQh0EqJechQW6S1AbJQhsj53bszUxsc3zbeKysjC9XF1VRUbyztadhhu2Qbv9/dGG5WlPnYmogSHhCHvTNQpSbKKH5t4u3pEANCtCQENVjtwTCDdbigmgZthoGbdhnEpoh2BDFixHALk69Tu4uxxL4qlPBXPvWMQeWxgOpLfhbxOhGU1kU6sJ5smg1xWQJia0xyuBiTASwDpYExwNDx8M8f/YMf/Of/mHA/PAXYla8Q65x0WdlX0PGZfbWts3ZDJgIPHeGvf/0roDyhbW1tRpVHVuUSCPGj7n4Am5ln+5UQR0cDM1DT5mPk8wLHmi0a+FV1tTmuTO6BQMAVrnDyNJxaP05dFEFra6uLZBxA8dpkCdQMvDJ8OMfoCrNgF0MtLS3GCEkkEmbRlUwmTXsCgYDxmDnhDP+5dZxVobqsrCyyjjrKRSJVxtkERA0XFnR4ndF7k/ty3nnnbXX8vRFJOqnOhlPUB4p08qgakHJ1+mUpqlvIGgtseGFbWRtOD4YQkba3vVMGXNJIm5qajJvWadk7ZcOrsbHRSv26BXUhnVN4BKvt4XxI5cGKRCImxt2lS5dt1s3oqGS3U5RM/p7SvXs7sp44Veuwk4eswOOObYodr2O7vQbLtwVZKf3zn5/wgx+MhKyeyskhN+1/RShjo4YBrGQa1mX7gqmwUMzW/A1Zb+rHcUMJ+H2o6xnAGnoJ4yOQ6VelKA4ABlJPI5Us4hj93ZGoxMUgq5jNKqLkk4pa+QutzVJY40Alzz13Lyec8Bsgj48+UllFUn+kFpiQlsa6CZ+R+iv4czLJXXowb2hoYNSoW7BZOMIzkVaCRMhFtRcgRi3Tpt3IG2/cDKgVq4RPVI0URV5paGgwVU1bW1sNEbFbt27Gi5aSkmJCKlJfBdw8kEQiYe6jU+HSgwcPuw878mxsPcN2jG/c2HByEmxCkuLGy2AstirYVdSfevbkivr6HRbGchszlhfi5GO0tra6XIavv67cjRdd1J/bb38RgKOPPtpkj7S2tlJQoFp33tq1RCIRysrOxPo35gEx5rKZPGD22rVm/85jbdy4kVmzZgGq7PZttz3BAw9c7yqA5hxE20/8zhVG+8FWPCl2QotSwXxN+VRpwnbFK6vuZty6lXGXsban4OmnnyYzM5MzzsgASU/243ZUABLQmIbyLMi6vlpfxWVEwBRzT2An50bHPsJ6uSfX2PoBZLXlVO9YZUioQVZRQVwfK4i9K1JEq5U4UVr4OyLvnKC6Wq0cpZ+sXfuyazXqNKzbE/bkN/n5+axf/wCg+l9tbS3HjxlDggbqzbn5TYlzuXRBIM58DjnkNQDeeOMIs3/17MSAGo4++mKeeOI2AM444wFmzfqp6/jSTjHEu3TpYtJjMzMzTTp3PB43xkZaWhr33HMPAJdffjmdETKmyDhTjTJnJUwyHyndHmakNvJqkHRkIUBGeOaZ6znppA+xIb4ajgUuNlt0HHZNSUkxhp+TI9OeN+YMecjri0eNYjrW65JArYBbsVpDAWBzWRlpS5ea0u+yH6eOh3OslL7apUuXDrNRKnGusiWfLAgkyALmTp3KcOwTWYNQnUvBfBM0OWWylwjWR7Ty/PM57NU9XwJgZ7EjzsbOBt2/9TCKDNjZjpSmYtTUKGtDv97y+vx8btWllXv2/A3r1/98l7Xkoove47rrqtm6nFHHWLHiceLxOJmZmZx33nm0tbWZtLydx65xMd/Us6eRt5aBKogajt408ukyRQjNDNRj5sxUcSZN7omYAD0K1ekWY6ksBjEgYcyvQtRVEU9CFfO1IkyeY3vxasgQGMGmxIoxJ0JZSoxIjJhmoJJa6hmBKMsUv3gdAP/1+VTfAn7o9+P3+7n88stJSUkxbl6Jb+8OvLB4MTU1NVw4YwagSIcN5NJAgpDW8pB/DSaQqDQSpk4tQcJR//rXdWzatIlbTBw8n6OPfoBZs/YMvsVXxcMPP0xpqdLTSE1N5cADnwT60Eitg0sjAlUxlrEANXVKCNB6J0866RakOL3KKvsd5wMj9MqjOQRzi4rYb+1alypx+7BIRwR0wKVoDDBq1O0MwC11p+pWKYpoJQkCWC3anueMYvmjy9oVbsMcS9rhbIOzbc7SDlWuX8ewCdiqNWFUmrZ4xMsRgTNhwIAqEWDbH8EtLhlEGYAPP/ww55675xFGX3vtNa688kpaW1s5//zz+dnPfrbd7VvZfjbKd9bYcMYDrc5DnBzsqrwE6NkDaIRwwpKCXgUe7PlLvdX59OyZo38/F3iZXB7hWCRZT3XBH4fddCpgqzCBsu4XcNttQX71q+8BajU8XmsDjBo1yhVWkVCL/JX4o5Mp79TuWLpUqYkuXLjQlfIFVbxz0UUuz0b7tsnDN6N7d0NoGoh9qKqx1SGdUjZiWvQhqgXQnN/IQx/DPmo5QMCl/7Gn4JRTXgF+BOcVqsWNcD+d1lmBD0JjgGnM5lmKUfwJGTTR71+nHFu5RK68U7XC6e2Qz9UEUanDhAVYNc4gUK9qFvOf/9zKoEHK8+Jc/YEafGW1J0W7tjVQb8u7ty2vlfPzlpYW+vRRmQ79+/dnju67VVVVnHji40AODUADIVZSTj4NSKLuIYdIqrUQbBsJBAJ06dLFXMMI9dQSJh6Pk5WV1aGSJGDIovX19S6PR26u0lPIzMw0mjKdHQcemIeUV1f1mOT5zEI9l0WIx8IartLfgvpvIzAbmMMp+hdi/8oouyswatTPgEaE/lyDKlevHqxZQJJaehNgmXl+KIARPx/Jyt+t2NZudwp9+04GhuHUmpY6WqAqvM7VGsD1RrirCLuyiCALLPHIgHtJsKcvt1pbW7n00kv5z3/+Q58+fRgzZgzHHHOMMXw7QqfNRnEWWVJQk5+M/fIJcUgmrBBRBVDP97FS24oUpDLNnwdeZjRuv0QMOCwvj2Xks25duTmuc0LfvHmzrksQwkl7S09P57333gPgk08+Ydo0ddzs7Gyzn22l36alpRlVzzlz5phMlvT0dNe5H0aUgboNtvKh21A5MS+PN8l11bv1o4b3BUA9I4EA86lmALXG0JIJLQ+oda22nUNPBFvRVq23W1paXPUTOjN8PlG2nAEHj4BbIKs3xD/DzodCxxgNvKwcw2sI8nse5CxM6TVArr+E/YJYsa4I7jwS6bXi7QgAxdSjBINqdP6ImCWQYN6877HvvvuaPThFkJyftX8v/SkWi5kJOhaLGaLd1cceSx5w5ZNPkpqaarwhOTk55OWpM+rWrds29RAKCxVPv0+fPsydm8/kyctQvSvEW2/9iilTTsI6pGejBnNJL2w2bb74zTcBOHvzZo488hecdNITPPnkqS7jQc5l8+bNhuPR1tZmwgzOzK6UlBSz2v3rX//a6ch8PXr0wOfzMW5cVyxFUXnVJHOkgRqsym86bo1lCZxK/6sGFjBS19iZAxToDP0waqw4r29f5ldUuAiZHWW7OdP1nWNlU1MTl/I6lVgxsXoOQ3lgClGLPuU2jLDMNaYTpsOQsPO47blosr31xoxGPUsyfsWxQUk/UMBKslD6NKMd1yqiX4uOScSlniNLgpA+ryxgks9njPo9Ce+++y4DBw5kwAA1Fp188sm89NJL2zU2Oq3OhhOLF9/LmDHnAAHiNJhBvRqIxS3FUdKXpHCY/jWqe8ylDy9wKOqxdK7bLXaGO69+edNNVfzqVwO/6il9KRSy45SiCEq8KYi7SqeK3Q5DJTWWANWsYSFrWAzASOop0ceoYhVRszpycq9rUI+a08bfkxDUf0vhUCjorUmfTp6sLGWKgWk+qDgUQnnU4+c+/sxkrAGr7BJ5F8Ot9Ojk5Qex0fcQ1nvUbH4Z0v9qGcC8eTfvkrOd+MYEqITXT/+P+UxMo5+ceqpppSoSWIolB6sWrVv3/A6PMXfuSCZPVpPAlCkqbJivJ0eVlxXEmclyzNSpvPTGG+b3px95JKowup9TT32IXN7kfODIvUzoKzs7W0+gj4PxE6jJzgZXF+kk42IscVf6l0wOQewIqYy+StR9lt4ZQWqHHER2draLQC8E3fbGrUz0Tk90amoqd6JMipeARWSDq1xmG1KBxBlspBKaq+wiKiUlxZUS61ykdcQpys7O1pkpR+rrUGF3bFyTwt0YiPJBih84gVqQ1mA9GzUuHWV5HsNYD0fluHH8zNFv9xTU1dWZdHNQi4hFixZt9zed1rNx3XXX8dprilDm7HC7G04ikrwH1fFFJlmQmppKRkaG8TY0Nzfzz3+qQm/7778/w4YNA7bt2Vi2bBnvv6+KrzmFiDZv3my2kfow0gZpT2pqqosw9U1j06ZNXHfddd/4cfdmFBcXb/WZU/StfT8TUp9TXtpJ8PP7/caz9mXwxhtvUFZWZqoyO0nVyWSSoUOH6i0/+tL73lHl1oaGBqPXEQwGTWZKLBYz44Tf73eFXbalXNq5MFP/DQI5RBlMjg63qYjfSlbq2iNKMTYXIY1ayLozh0ZdKVaxjxSEAPl1p4wRU0aCH0oSyrzIJUqD8Qks10faRD5rGYPDRKpSxslLurLtEuCd1au/1LH794+grogz+bUcWTEsWfIvunbtypAh9+Mu61mJDTgHgQgDWKPLrimIh1FxZfzk07ALymXuOWgjjShf38vzrXg2anR9EzVYqrhblGwimom/EOuYTqC6iZJlTmAlXZTPI5/5HIuyY0V3QJjIEaznoLDwdD777EnAHR/OyMiga9eurF49j0GDzgUa6d27N/vss4+Z9F+fMYPBf/gDAOvWrWPwYBUPjMViNDU10dzc7GJtr1u3zqwYunfvzplnSvnwUh56SLXulxdcQDFqlXBpRoarTopMLmcXFBBA2epB7DrdugCV61C5DPNwSv6EqDePXBZSzVTWthG9pziWfhsD/NTUbFv5sTPhxRdfxFUmvg5C70BI9Loj2IUOqM5jloIjIH4sURK8whzyXeXfEyiaWoXjPVi/k7CPgvq9+DCUPLmgBljDAGCy6Wc+n88VZmufUSKrTxHFys7OpqTkRtSwfiYcDARg6tRi1q5V3rmllZWsW7eOSZOu0HsNk0stF/7sHAdHQnEGli9fzgcffGA4GwcddBD5+fmAcmWL8btu3UB69ZoLVJCv9UgA4jToJ1jWtTGiTOWQQ850XL9h+lpFkGq3MZRh4yzQKMZJdna2SwvCKbQnxlZnzJ4STs5Y7btYCYhcvLMCcx5K5NCaFnLtqvV7IUjmASWEdSg1gO2ZMVRvrKVyq6wQed3esHRmMK0eMQKAT7GJ3nVIj1eeu7vvPoi7715Eamo3Rq9aZXSSRLBuJrCMYbpFBQwa9AM+/PA5171ubW01BmRaWprR2VBhHxm3FqBGTRimTZ1nl7xJMBhk4MCJqDB7GPtgL0D5dSJI9uNo1Jgq1+ecUIipBdZLKd7xjgitnR2FhYWsXbvWvK+trTWh0m3DR0fxgi+Lb702irrlKubmzOgPYe31WsNYEEIjqG5cwXCUvTsQNQVUYhnLEWTqbcYdU+8Y8+bdxkcfqVXbyyedZByVWcAL115LBXCHzgTYGVx7bQMqA0H2VMoFFySAOcybN88M5NuCRGyD+r1cnwIkMCQPlVMfQZ2x3/FOcQXyUNc5jDXBYnpLcfUDCM9hT4A8ICFFJxDfchB1UxuxY7bYYiY4WYTwg+r14JbLKvpQTy1zsVk9eVgOB9iQnQQAl+t/5a6gSwTYukLll0dd3e/UmfrVHe9ZKu1xP9rz5t3NpEkz6KPDazvCxRevYsWKg7b5fUPDxcDFfPDBB5yvq76qBYGYxnlYIRNRbUW3rYb//e9qDj74KJP8vrfB7/eTmprqCPtmIU92VD+fS1DubaXFIjwgp78C7NKjAEgQZQBh1hDAdm37dFu+i/MvbF1Kobm5mbOHD2cM9ilyjstxbEXg3/xmBE1NTSYl/8x//5sNhx9OHDWGzQWWkY8NbeQBAfbb72w+/PDRDlWeU1JSXO1bsaILZWXXkM18hjuu44tVarRPT09nJPUsM7yRoN5iMX309YjRQIluRR52uXB2QQFz162jtFcvQxwNs2caG2PGjKGyspJPPvmEwsJCnn76aYe68raQwq6gzn4rxoaEFZT1XAPk6KQ6BSEWSVjdMjGcsfFmsomaHG8ZwGuQNCy1p3yi9KGWWocWQmtrq0uiXMIl/fr1o1+/fkwsKuJYbHc1nQ948LzzGF+huulrr71Gc3MzDQ0NpKSkGEt8woQJ7Lff37DVB51cCDUI9+vXz9UGpx6IWPch3EXOZbiOmH3JGqMa9egsp49ehQf17xUruwTLaM9z7EGGDete3VNqTzQ3N/PEEyoV84wzElCzHub2tCKq4uGo1j8Q+ZEaILEeNYjL4CgG3CqtiRllGeV6R87cErAlnypRPrrlKHnkqOlPdnKto6bmQvPOmZYINv1vy5YtLFy4kMbGRpqbm5kzZw6gQnTSd7OysujWrRuvPg5HHpmkb99KvZcKamp+AcAnn7zD7/v3xw+03X47qTpc5pxoEokE1dXV/PSnGTzzzDPceutfAKipWebywIgnrrS0lFnaJT5o0J+AYt56q5ApU5zX0MmkCgPltLW18b///YsLDj6YcuDZE67m7befAbYuJyDHcuowxONxQ2R2E849ePCwLaSlpXHvvfdy+OGH09rayrnnnusIj24LqeysZvB2j/219/Al8fzzzxsm7G0HHshYIKzVC2SN57Qna8wnQSz7H9SQP4AK1pCD9WLEUIRKtYVaZylKlY1hOwezRCJhjIREIsHQoafTB2vwgM3qENfkpaWlTPv1r83vxL39wgsvAPCLXzRi62K0z0IuBQooKkpl9WplSqWnp7tExgSLP/qIMUOGABAlm2x9DkJiUiXBxY9TBVQ5xLGFQFqMpWql66ssOStixkQQk23AgAE8//zzzND6Cp0VLS0tnHGGuNf7AwlbGV7mPqe+WQKIK+UL9aYUctL1AlIVWK8nmxKz7gRLvxSTF9z5UxVAFQO0N0H6k/VsNOL3+11hAGf6p+Ctt96irq7OhBPkb1NTkxG5isfjnHaa7Nl6uKCCqUVFzADOWLGCq6uqeGSgJkDfpoS2/Dfc4JKGdhoegqOKinh0iboO/fr1c7VXMko+++yXzJs3jylT3tbXRLwbpeDXPTMhTn3121UMAAp44omLTFgkIyPDtKG5udkIfCUSCROeTE9PNxyPzMxMHnhAiZLtjnopU6ZM2eX7jEajLF36PplIz9mCIr2nglaKVXHyFP3dZpRWcROwBnTFUuiCDOPjxhUCPYGevPLOYqy2Y4rebzPH65ohgo5qQYnx+RHKFpfeGQdaNK9B0QbjwBIee6yS5uZmPvvsM3w+H5deeil+fVTxKadSTyv/0HtK0+eS5IwzznClZnfk5ZD29eUdmpCQE5SMGcPpp59utmkEelFLA8/SbEbCVj7X3/n1+fxNt17KaTYAxx9/PFF9xepJBVKYNG4cEyZPZlfjrW+ZDH3kkUdy5JFHfolf7CiMsnPG/rcaRnHSeHJwn454tOsANWGWogYpmR3UanINEGMNVVjfhzP9U1zGO++qzTPHd4Z1ahxHrsQm4HaMBUAFfbTRU+vy1TcCkyFnKLiKb3eMKPlkU88wbZAJxIEfZQFqkIqTS1QXhgMxahSc4SfJFJDXAnUnDjzwlzz33AU7bFfngAwUOospHoOaZtxpwNLrJPAUAoJqcgwCjeLeBihlPhVY7QOw8RdnmquErPIQGtr0Bx+k4sILzS/sMXcNrrxyI+ppqUANqREA8qmlFHWn/1tWxverqjinqorficEBhG++mcHbEPa57jo1eQeDQUaPPo0lS/6+E61x8lj8UJTuuFwB+HA4oAjUc+Y8yLRpyzrYx56JTz/9FBDV0Ey2sBl1d7ozfrwy2hYulInWp/+lo6b8TUAzmWyhTZu8zbTo36fwzjvVAIwbV8y4cWN45x0JFCRxKtluS7xL4CxDHwOaEWJ9F6Crbk8L0MaAAV1IT083xqpUqm4aMIBua9bQBWV0+IHNKK5RQv96C2kdplw72yHt9Pl89NG6Rx21u6WlhR4oUywdiOjnsVmfPfrzLCBr/HiaFi6kSO9Puubo8eNZvHCh3iqNBDFzv5wG9t6HFLbv2WjczncW37ixIaXi/zJGuagjqCGyDrd6dBxhBw/ETpxB3CmGADHq8VOv6aD5Dne1rPPEcOiI3Z9IJMwKTrljg3qv7lLGK8nHTg7FtLW1GW0AschlVTqS+cbIiQNBrRy5kuUYBcDG4ea4Tlly54PepUsX+miiZynWPyHGj/LcKOZ0BFl/Cz+kBBtYCgMRR/5+H3MelnAmZNG4kVHujBDv0mmnjYasYvVhHlAj65ptJRtLDCUMFEAiC2ryUAahmJniKwtiw1GhdvspRA1fwxHh/R/95QISiQT7anntCiOvnXCFBsAtSS/9IBQKGT0Cn89ndCcyMzMJBAKcdtocrPR/BQNYY/pKDragYSXw6sCB/Ortt7n6ww+5YL/91P6BF25/kBtuuMzlzXCudiORCDfffBqjR89HDLNw+FKXlzA9PZ1DDjmEDRsO4a233qJHjx5MmZKnGiActARAug6zlOvWTcDvryesBfhOOv1EXn1pNqA8ABIuCQQCRvtAdDhAhZl2p8DXrlyJ3qa9SYceeihjxpyEumfjgVtZGB7DZ/PWmevZq9dfsfyDGLCQAbyCc639EptoMIn/akx87rnLXROys7I1uEs4yP12lkAQ8u1lI0Y4KquC9S5bRtjFFx/LsGHDWLZsGQ899BBdu3blzDMVIfjaH/8Y335ADayOW0mbCtTyaA4tPP7444Yb4WyncFqkbYL2pRrkWm3cuJHc/ffXYghwWLlKJAgGgy5tlo7g7MO9eg3HVuaq4IILTgXYyzP0OilBND09nfT0dLPWV/r/Sqyl1ghDN6KGyUJsISvJhBYjQ0IrMjEqxl89fuJ6Uo3hJEmONQ+g03XtLE+flpbGihXXM73sWVcmy0qyUZNHUH8SJDMzk379+lFbW2tSZTMzM1n84x8zA2VFV2O1oxTELCjngw+GutyH8jo1NdW0LSMjg5eXLuUPo0a5NAPDwN91Rs+NRUUUYQ0jC3X0PtTqXAOre7lYZ0UspFbX3WjUaWy5gL9Tl5j/wQ98mLN1uoJcqSfSt8CSNiqwrPVCrFpjBDtMChE0D6viKORceRgj+vsJQIKZM1XIY9OmTeYen/roo1SdfTb5ROnbdzSffPLOVueRTCb57LPPzPvs7GzTz8TYiEajnHbabVgiZjOicSBTsRjaNSiDvpZsXp94NHPmPMt8RuqtCoEsbr75I37xixLXs+GctFSYY7raPivAmjXv0r9//w5d35MnT9bckvHMvf9/TD7vYHt5qMPSFpWnLTs7m/W6FAGJtznyyLeZPfsgevToYSYLZzq6M/urubm502SkOK/nANaQBTq1tQKqFPVRrmM4fCHJZJKePR9C+FkDwUWQrAAWEcMGeuvo2/dM1q593HC/nMJZ7SXKBc7PhUOmRtYAVo8FbJiukj/84SeAUpeVbJbNmzfbcS1H/7wABoUg6LDzqzu4Hm1tbS5DyLkAc5Ja5XVLS4vh6zQ1NbFuyRJSgKO7djWZTU6Sp9PYcC4O3eq1AZz6vjtK2d47sGsIor7kdq7m7o1VtpAAWukB5OKMVar4ZArKFkpiY5UtWA6EjVWOHVtgHqZ3OoxVZjJ+/JAO2+NM+QLV8T5YsoRMtherVK7N0tJcmpqaqKurIyUlhV69epGxZo0rVtkAtLKP3pONVY4ZY9uzvUqrzc3NrH/vPZqwjvrBY8e6XIzhRYuI62M5Y5VdaDXVaHOAfdg6Vjl87FiWLlqED3GXpuAnsVtild8E5s4NA9mQkQkiv9AENEkfUv2BDHTtZEmrrAPq8dNCwjhc01D3WnpCUv++C6ofbkbd5WZUXwOT+poRZFCRNRbay4nXrVqlK6qkAtmMHTuY9mh2VGVNS0sz/Uxcuq2trVRW1gHdsdkxn5NFwji+W1E9rpk03eY0IJX99+9rjMolSxodW0Lfvtt2rwN8uq4/dIVRxVEyMzO3WVl548aNfPhhHiNGfMHy5c7+nUBNnptRNyKbsrJUc76rVm0CYpSV9SUlJcVMKCkpKa4JSCYIyZ4A6NWrV4dt3x5GjBjBH//4xy/9u6+CW265BYDp06dz9Zgx+FEep1q+D0xm/fqzthvmuKlnT2ZgTdungD8zAGUEBoDl5PI6w4Gn6+u3+n17r66TqyPXubm5mfVDh3I38AIjsaXZQfyq2Szi5rvuAqy34f7778fn83HZZZeZz8+59lpDjRKfWTXKrH8KmPfBB6YfOnWX0tPTXYZCR9cimUy6DE7Zj9NDsq3K385rIUq8ZxcU8DqD9bUMAjXcfHN/AK6//voO97E3wOfLBQ7b5vcHHFDFEs3n2h6+A6mvXx87qgTbEToKqThd1zsDpwvcmVGys8d3PhDbEnDqKP2qra3NNeh+HXyZNnv4cnDKiTvll7e1rfNeJJNJo6fh9/vp0qULqampKh6uiZRfxwPV3Ny8zfbsKKYv2LRpk8vd7eyLPp+P7GxVUExNCNs+d0H7vij7dWZtOZ83ubYdVUHuDPjL++9z6P774weyeYUoFfTsOZNweP42f/Or9etVhVydNj8GGMkalrEEHS90pct/VTwPWos4iJv8XAD4ie6k2vDd+m8Q62h07m3o0B8AUFn5Cvv372+2Gai3v0u8XbsAj/XsyVl6fw/17Gn8leIjD5iWlqK8fQXAul12/M6Lb8CzsTuwePFiDjzwZJSFPBCYwerVJ+D3++nf/zG9lcRyA0gcOp9VLt28WsaiCIBFrF17jNn/6L59qTepr8ORtM+6urPNxO0sNtbW1mZiws3NzSQSCW4YNcqoeoByQVdjOyW6HT/+/e+JxWI88sgjpKSkcNVVV3HIFVcQRz2oSsjm+1id/jCwgFyWMXfFCjNZZGZmumKV8jqRSJgB98xevUzeyQvl5SYDANSg/7f99mMBcNfy5YCKb0tcu33M1ul6lOtwyKBBNKKyXsYS5Z1O6j70+f4ETIb9RthISR0qWSexEUiHnIAdQ0PL9Ua3MYBnjRBR1HB0gthhUgJSpSheRiVqWF5oMoWiTAV+xN13f0wkEnGV85b+t88++/DYJZeYsIakJeeyzBCPx915p1HRDAaDhEIh7r77btra2rjyyivVNuPGMXbsz1CS9Vmo52YOfVhlWhwHVhnuUzF2TZzH3LknAjB58if6XKrNdzfeGHQplDpx222/hvE+yIOlNy4zImBOQTpR+MzJWcRzzzUarsXUqRFsYnYl8kS/9trRhnfxf/93JfPmPWWum2SjRCIR0wanoZ+Wlmb6cWepkaLGwftQnrEKslnlrg2l/76nvRPbWpC0tbVxfq9evEI+oiw6WIdOn1+nJsr2hq7c140bN/Lb/fcHLA/sJa0zNGTINGztY6FQSvgwAVTyySfX0b//JD75ZB6g6myAKmIpbdu4cSMAfx492oyCyv+ivBsrdZZdDm7hQpHBqQOWOgwOp4ewPeT69O79b+xM8SojeZPJ2N5fg+p5Eb2FsF0WA8s4HrgCKUefTBZ3eKy9CT5fHvCDbX5/wAHvfZc9G80od7PEGRVCIcXWF3daz54/ACoZRj2lekvJ36g18d4SnPA7/rflmF3B+x3i5qVLmT1qlKH+xR1HEmb/tvi3xbhlx5ShIW7ISmCJeXS/DB745BNT0K0jnP7hhxy3adM2v98eBg26CBXKUtkZka+0l+8KKoAgVI2wXUuiIo2Kp05jRGeZOHOO0Jk8wtoP04d6EtSaAmpWGTNH/1ZplOQ60qoB7r774x228tLHH+fXZ57JAKLEWGmyqLbH+e4YkrcFoqMSZ5VLs8ZWCg1iNSAtk+iDD8YwdOh9YDJtJrCttfFttx0PlHPfmcIzGbuD9j2FqmkhCGCfpALUlDMXsJWPD3Iore6pUAb/aNRo4dc9SL12CqA5U1OdcPJkZmqjolevacgdF0PDuS0oA0CIuKNGTQdTHbUEGM+QIR+i+vUM0DLpMp6uXXuey1OXkpJCTc3bFBUdR03Ni67iarJN9+5KkfjmtWvN501NTZR89hknAEdMnAjYvLCgbk0M4RgNdnE5dpS90qvX+fpdRP8tp9Cx3xCqlzvzAIXuqhZzek7K6caC2R/h5qvsrdhRNsrO4Rs3NpqamsillgYzgFcyaNDvWbDgbGN9Z2RkkJ2dzccfP0YgEODpXr0owl1CzLr0SgFbRCgdyNfZH/WEgAJCoXNcblcnR6Ktra3D4j+F2OE4gn0YirDmTfxHP+Jfv/qVWTlcfv0VJFEduRJoYLBun5PSlcejmind0XGd7ZT2gfJ+iDcjHo+7aqlIbZeuXbua/ThXmW1tbUSjUd7fd1+qACn7FeUO1IB3FsqujwDpzJx3Lp0RPt8QzKSaqIZIsfoigGbXyjATw1aKlLscRHnK8lB3ezHQQB5Q7+oJEWzWymKyeZOgYy+/+910s5pzstyd3jQJj1z5l78QCATIzMzkiZNOwo81G1Zcc42hse4DtN51l+lnRUXKXFXx9gpsrpJKt22gD0HXhC1E1oh+r4byyZOvAuDjj58nn0UUA3VEqSXOjTeWANOgR0/HLqqBCm66aQm5uYpzFAwGO1SklPNubPwjOTmHYdOQj1FtzUqHeAJYwltv3eqqNHzbvHlmX86wUVtbm/H2BAIBV/VSuaYePHjY1eik2Siff/65rjop3oZKoIpLJzxokjargN/PU665tLQ0vrdiBQCj9tmH8iLxC6QTCn0fcMdys1AOY4By5lPuqGTpjPc6C7FJOMNZ/2HYkiW8NHq03g9E6UMztZRgszpi2EHV7/fT3KjTC1HrNTUJOCc0ZbLIpCHHbV+ITdA+pi4FpzpKYwN3DH/Lli188cUXlJVdDFTyfeoZjtSZkSyEgcBkyPJB/BhEv1vc1p0Ng1nFKnJR171dTDkRQ03MC1CTrrDOnT4AlaqqZtZCagkRpx4brxTJUeWhOkgXdAoDYzXxr6mpyUyIV1/dyF13qdeNjY0ujoFc48bGRrp27crh99xjeBkAfz3nHKNaGwRKr75aUTtLS02fq6qq4j//eYZDD/0lNluqACgmrI2NImAYUWpYRJRhQJB33vk9R44bZ4zm7Oxsk9OlPHZ5GOeyy0US4Ve/epeiov6MHTvW/HZbSElJYWB2NtAHTKZZSF3DuORrxcw1cRIU5RxfO+QQDnhGKYvm5+eba+vz+cwz4+Rr3HfffVx88cXbbNN3BarN4h3z49bRl7ExQK9eNyAeq08/VYsAn8/XYabJunVzOlSgTUtLM9eoqamJd955h0su+QC1yMjRW5egGCA5SIXUtWuvdfHYWlpaOuSorV37Mn37XgFUMW7cQJc4mLRNJAJAjWtCcq5Yu5a2tjaKioaRR9TFDPj5X/7CJZfcRK9e06mvt8U7nYRh57n26nUx6rkvp49ecBbqKyphomqEkJtrpABkFFCZefoeBDpnzZ3dg06qIKogfGoVt81mDenYBMNF5DNp0v1AIwsX/sL1yxt0DPMGOu4MYaznoXzd1yP3LNB/oxwPFLGMuZSwjBJ0GWLHtqkVFVQgUXNYabQsFmITU8PYuPiux375+a6HtRHIBqOwauOUEgTSK/S8nlDQDSLd4MMs3ebOBxWxbqDB5YpGz2eSCF2J6ncSLnFCcvUSqCtVQAPFWGNDDMdyTqOeY/XW9Q8/TF2dmkzvuv56GhiG1Ky4+mo1ad9445cLnp33yCOcc871PP74bzjzzL8xn4X0IkpexdZJzqpPDUSZFmqyimpxNz/1FOtrU8NKY/Y+NmsW7x+tQhdX9+ypC4PLgJuF8YQ0itGmPDk33fQpjz7af6fOITt7P1QPjGMrFs3FSrkKmWbKNvdRDRywU0frXFDer5ew3iYxfvOwI5iEgJXHql+/L4AllJePNPvJzMw0Bp/Tm+nkNjgzd8LhMI2NjUABDzzQw2SjXH55COXNCxAKjSSR+D5tbW1bKdo6M0Zk/0rRtgBId4U8nOR2p9HiDAn5/X5CoRCLFs3hnXfeMb9NT0+noaGB2267ikMPPdRlODm9hW5y8mhU36pycT8iGBOXWrPQDdBgyhCs0ev2IEYBOILxUHropJ4NFS88C3Xr5wKVRMmmmqiD2DlB/4swfvxR/JEGRv3vf3zxxRdmZdO9e3dXGEIegqU6PRAUwdLpanXC6aaVDuv3+42rOyUlhX9p0svo0f/m7benACeQkZHB559/TuXhh6sp68YbSUElDcoi8I4lS2hqamLixHP1OapvsokyX4dQnCx+Z3ucaC+r7qyIKJDUrt69BwEjadAm2wAaTDWUAFaKpxDFXgdYxmNAGGqmQ80E8PuAShPT7WxIgNYK0UoTToeSUSmJAyGX9Lv8TWCh9lOEFfGS31YwknpmoGiZDz3wAA319bTq1LhpqEk9hhrgGhgLDOTGG2tcBoez7okQI1NSUsjMzARUeOKpp+7glFPeBIJkE8WH6merVtmqoFlZWTzzzN306tWLyZPfxK3HLmoB4u+A0//3P5qbm+l29NGm8ksVNlSoQpxiZldjV9kil7SAs89+HiHJNjTcv9VKNjd3f320QmxtoIjej5BCI4ixO2XKSTz//J9dMulHHHErb711K3OAU7VHr2vXrq7nVp7VWCxGNKruZ2fwagjG8qZZtNQyGOvdkLDrGOy6W3g3CYYPv5Hy8hu/1rFvu8093vzpTz7GjfsqbDIYNOiXqPu5mXffXU2/fifw6afPfa32fTUUoka7ciKOGlERpE7UANwcPrWIqCeiS1yoBQLEIe4ZGhadtBCbGkyFl7wEUX2pN65EUJa9rDqH8xhvUn7wwTQDl7z9NqAGZiEftYczA8BpTTtDFTJQCWse3OIxTot+0aJpRkxITey96f3++/x7//2NGoMfNS1lvv8+Pp+P9PR03n//OTZv3szj2uUcgQ6JTk4XY3p6eochFee5tE8xtFa9pGzBGpaTT62pjCIBArATbIBlVLKMel4CJkBCBQUyM8s6vK7fdcxLJvH5yjChkEZhsSewpOQscokaMmb7DADZuoYGrcMpphqIoJFcx3vvvhuamjjguuuMl2sC6i5omSYWEUY4FTfeGAHgppu6mXvoNBzBamtEIhEuueQvQJhh1DMZ5THzAyU/0WJKwBMoEyDKQaiBVIV6sh2cjRhqqDj9f/8zGR2HAGs//FCd+377GfpoHhBmGRUs05wjOdsEUM0A6mkGagmra5n7PcRrociugA7X2COLxwhETBrCZLPK1MObMeNGZs++A1DhwrfeuhWAv736qitzzFkfRgyMLVu2bDNz5ruKzz77jN++9RYPTZlCHKg1wnAFGH7L6J6O4oD667haeV84XBkkf1y40IwTXbt2dRl+0recIY+KigpXuFVef/7557z77ruAklLvKOV+y5Yt5vrHYjEmTXoCZSpVkE09caCVVKCaEf36mQITC4AGpvLJJw+bfcn4umnTJnPcK69czgMPjNuqbRUVFfTt29eci9N74ySjqmdc9RVnUngcqDfZOgXY4Ir40kNEjYGuFiOAS1Rv74bSlfq6+JbCKBGUC1Wi0sNRKyGJH8rApOKZNVh628uavVwEzPmaYZKvizxU69P0v2496JBH7xyurygr427NQdm1KEUNUrKcL6CexQRYZmz5IHb4l18MBxKspJKVVKCKjV13RtRV4KhzQekAWI6AE+k4s+nl/jkh/Ux9Lv4JuaYq5NcI1N5zDyST9LriCsqxDBHJs8hz/MpW6ZGjVbEjvPTznzMVe49KgHdQsmLy2PuxOV1RTUvO1vL27QM/kXb7DwMfabnyBG6DVP6FWEU1youiBuuAo/aOjYQP0F6iGFDPSOyKXJ5cZ56BXIN0ouTjp954lKZPfxaAN988c4fXp7MjGo2ydu1aKhCfm1yFIPg1KbcUW0kghC4UqPgUcn//Nn48V1UqU9cZLnEqcDqFC9euXWvCLk7Bq5aWFrN9enq6y2jZd1+R6paelMCO0XUM0H2uAmiilWb8NDCM+YTIpUEnAwTo3/8aAD7++PeucIkc9+ab+7uMBzF41q5da9rjDJ07jQ2fz0dl5VRKSm4EKl260soAzsPS+4tx14gKo8YL8SJVAxFjzHropJ6NbXkjvg6cK31nuCQtLc3lDXC6acV13dbW5tLml87sJDdlZma6sle2J00ilrdsL25xJ+ThlzYkEgnXQ+MkfznPpaOaAF9X1MvDV0dxcTFbdrzZVmjvcXNmIgkJeHfg888/32769HcFzn7/xRdfGG/P5s2bXa/lue3SpUunldhfRq524YsRFoOENpQXKC+lorjUYQsgxE3CdgIYW1LCokong2zXorxc8eZ+PHw46di6UfX0AeKimcvHaFFe8kAH6Rr0gvLtt680VXp3Jyorb2Tduh9y56RJgLp0iqdRCozQf52p62B1RLJQy4Ya/a/nbm9v50An5WwceeT3sOW35QGRVblYmhEwlDUdl0U6t/IT1JgSywrCUE5NTXWRhpxu6o618HFlpjgn8fYEJ/krg6F0WR/QBqx8cwUpuEMwKSkp/J/2ZFxXVkYYa/h0VJvA6SZ0Hr/9SkXO124Txy1AFUDq4Q7XK8+w64q6VUhG6+/ijkJ2nRPF2LolcqYx/b4O8NNALol2NFJxwMqWoCqm1tNo+B0SDDj9nnvYb7/9+PO0aWZ7Ga5K9Gtxiluvhs2Quf76LG6+WfWJ9vVHsnX11Sv0PgLYe5Sq99BP96cBaWkc4TA2jx0yhBLdTjkv8c1UApcff7zJplqB9a8sx3p0hI4Y08eqR7JNhutzkDCI2msf7YAGeT4nY3VlJFAVwfp+YvpvDlCqywiqz/73v3PMuciqctOmTQQCAU45ZT5Kr0PLnhWl8/I9swAVdpFn6cEHH+RCXV33u4xrr72Wf/zjH0AWURdbKIGimQNVQmQGEf9S38VYpT1MqzSfIzs72xUuSUtLM2Nfc3MzGzaoIgWtra3mWiWTSTZpbZ7s7GwOPfRQ0wqnwSc8uT9WVPBuaSnVupX1NHLPPb8hEAiQlpbGh7/5DclkkssuO5fLL5+HJb0GmDjxOT755Mqt9p9MJs1xX3nlFS64U+tkfFjNXXe9aNosPLIePXoYw9I5Fre1tRmPzebNm/nJwoXm87a2NiZNeh9lABVgvRc2oLy1pyPCtdf+Fg/QaXU2FJzu6SCWgR3U30f0NjHHZ6AGut23ghk06JfGit8Z9F2yhE8//RTf9deTshMr0jrUoH7a8OHGzb5SD+YrVizo8DendO9uuPv/66DWgcVy7IQAlhqaRUQ72cXYcHYbCToUguF3fDWa2HcFRVhWuWTdSBZKI1JFM0SDIc2CNTKcoQllgLingpN05VaAFwyJNEAFSmK6FDslx0x7ivReZT0a5oYbmrj5ZnefOflXv2KQEtpk4wZlDMT0vhKo6iUpo0Zt88zzULwhZ8AmgZqiYsAysqklizcJOLZGX68wDYSwBdLkc+FXTUOtDHNQ13MJ0Eytg2xrXdTFWLXV5XpbOXcxAiVo40etInc0mC3Q26v+/c+7ltHZqy2oSVPIGBLik8AXYKZ1WUDUoK6nkDgUcqmltFcvKhxhZWfFXsBF+hbvUFNTkynqd9FFrzFjxgyArRZosrDp0qWLKcOm7mYBl1/+W/7wh6u34sEN4FnW0Acr++13GUJOY0YWUccddxxX32Gvz9VXbwDu4+abr2JhWRkD33iDHj16bFPoTNC9e3dTiK2pqYnS0pUoQ7VIp/k3o56IiP6FkG+zEGP65ZfPab/bvRid1LNhPRbOSHIETOVCwK/dhwk1KDWQrTX2bAeZu3Rph5UMU1NTTRrih6NGEUElmL2uC6n10ZOMPKphoIGDUINqAcOHn8ZIVvJoebmLqOl8LQ9KRkYGffr0ISMjw1WNEOjwt3FUdy5GDfPKF6EG6uvKFCmzGkmbDepzVaqW2dRysq6H8HR9vWt1kpKSQn39a+TnX4/tFHHEVyGJh5Wg46dqsKmglmqsiVKsj9o5HdIKyeTP8flexD15yRUQf0OO6YHOdaOTuxDEqmrIWjwdjIDatGkXogo2TQCCNOhJcDYPUoi6wuIhsUqlesXKAqCc9PRzXLLbgwZiO+YG+0vxMrSNHElqu34m8Pl8zPzoI/4xZIhJdUWfX7G5GlGWkYfyPkzGmpUyicnEJ3lMhVhjYzL4B+mw9iCI5yEmWtSVvqmvcU4xBIuhRgwaSaeWZziG+0pncfDBz+htinnmGaXDUFxcrMnZymCcN09YUXlmsmlubjYToqeN4MHDrkYn82z84Q9/0K++hxp0KxhAPWHqibIEW8gCQA9qjX5E0KYeyKaeNxYvBqB3794ucpPEuuvr6+mpV3+DcCaeFgEF1JKglhD5mviWh4or3nVXEVdfXQXUKJplOEx+vpDhrPWdlpbmMm7S09O34k20zzCQ3/55zhwenDbNEP7q7rqL/YCFV19t1n0rycW6+yJICfMo6Sad6778fC7R9QKamprMuX/22U3U1qrBOBaLMWXKTN56S7manVLCkyYpAaoGinmTSsJaa7UYI/bNBn2/rr32WjofxIMhM3cE61fKM1tJmKIjxFHG2XzDYgcIc6YhjQ1E3afJOHN95rOAIlYSQMziGpR+QTpS2grmcBANrP3Zz+h7++026ygOSd0RqlD3IoLNpREjw5ml5Az73T54MKOx6zQwqgHGTxhmDbVmK0EdS5aczujRR+mjFWG9E0G7WSKmZN5zfMrwSIzB5t3IFdXepGJ9iegGNaXYoE0FYgRboqEk5opXLo+TTqpDPb3letti3n77EjpCeno68bgy7TpT6qsaR8TAkzsF1ryVBVlEf1dBLstMyQQQBQ6F9iE5HJ9LWKq1tdV4NgoLCzn3XCWe+PHHvzcZPZFIxFzP1tZWjp+gwmIFukW1jAQO1UeuJB6Pk5KSYvhs6enp+k7WagUldU5FRUofJJcGh4iWeMBGo54nOfeQeV17ww0cAGw+5BB87ZICOsrQS01NNXWh9t33nyjV2gKd2g/0SIcNpbiXnOIJtlw+D4JO69koBxYwmJUMR3Wp+SxmxYqb2bJFSQ5v2TLfwWE4Djhup/fes6yMnrJgy4PRS+Rh9GNDNWHDgc9jDTItPPDA/lx0kX+3ruynP/IIJeecQx5q+tugDY35RgTsUNSD58eWImoE4tRoYyO01V47xltvPdDh54sX/xqAMWMuAwKsJJsaokQ1Wa0IyPvRj4DOamzEsNlO4Cz4JZ61PNTQIvlPMmBbvr94mI7FiixVO45Rgg1CpWMn5RIWspI8pFdVoybNPISjNED7QcYAo3/2M165/Xb107C7xa+ifCCNwP+WLeOf50vdh45RoM9pn/feI3yAksISxoR4WYJArTEQlKk1d+5BACxZ8i9uHD2axTRQb0It0tuWoJ7dADRKvo0sBmTQFrMoaC9oBNy1WMLk672vMWnB41FhGs318AcgsV63Lw93KGHPgQptyDWMY8NJcq5h3H2u0oTpgvoTCXvKVNC+Pol85jRUhaR/8MEHA2qhEo1GaW5u5sADq7F8OjGEjgfQ8gRjgDOhqCfUJIHZ+P0rSCQShm+Wk5PDK+RiFV78QB19tGJnENUtokZGX463BGtwgup7WSygwWj+jnYYFU6OnZMD5y4rPxr1fJZDIgKJPGAEFPggJNe5EKfW6EMP9ey0WkO7B50sG8Xe/CcZzOsci+qG5UA1tZSVnUllpUp9c7pCU1JSjKcgMzPTJdIlrlOnNsUWsONjxDlMNWJdxFnm0wRrgARXX12D6nDDGfHkBZSUlJgYZFNT0zbJpS0tLeYhk8997R4I8TyUlJSwfv16Pn3ySRpPPZUNV1/NAmT1fKj+xXjdDhnERT642SjeVbDK1oJJTzfXIS0tjeLiYnVeiYSpN+GUGQa7Mv744yc5dN99tXLkWKCUKM06CXbH6ZnfVSSTl+jqr9X6E+EGKQMuX1eZlNojYH0X1SiH/xrDYC/BiizlcemlC0DX3LHCVM4QiTpmBDU4DqBWJ8vKESy9sllvI/eyOWGNgmrgWXKBUwC/SznRKTwnz8ZtgwZRBIxftYpNmzaxXO9Hpvk61B1VygLCC5AH5UDKtdhcKULGrsadHFyNFZcKo9wW4qGQbeS5yrKCrY3VKI+OalG2Vi9R/6IsogLV50tgdEAuM4R6Qvl0/SYGBJk4MclrrylCo4QuYccZYt9lPPTQrVxwwS/JpxY/a3Rg03rScnUyvYwETtMO7Hq8gC+Pgd/bF3rsC8CICUB8Peo+C59O/Cbi/8sDpsHBPdVjscAHHxah6MYWp5zSF7gT1etCiBElDKoEaAHH4brlwhWqxp2kHUYthg5iJUGgjtt3cE4FBa+guEWy4hyOMmZmo/phEOJHQlyK4IF4z+0z/b8dHGVvQwqdSmdD3HKH8TqHorpADNW9gkAtIUpKlLbD+vWzOoxLA67QiTMXXCbcCmC5fhoTjZYdYof4AHb4lWFSBsmBPP74ObS0tOD3+42AjTNPHexg3752iXwubQK3UZSVlWVef/SHP/Dra/+MdVmLRS9sf2mT05WtHtdqVrkMjI7SKNPT0ztMB3ykRw/jqIyjggAVwCLqsOt8uOWWn2z1286FCJYgGsS66qupRylGOKfSIPbsq4A1xPXvI459ylAvg3EEdfUSiLHRh2WG796M5nkQpYpVujaJUhV9hQqyiJKO7dPVuCmtcv/Ly4/vMHtKmPZ3lpVRABxfXs6WLVtYvHgxr+htY1iW1CqXAWVdxvF43Ih9DZo5k7rzz9eZOMWOKyTskSDGoCCOpdOCXf3UwMKg3n6xvjbKeBWWhuwlRpSV1KhtnTfDD4S7QagE6+b2c8QRNn/o9dfVmJKamtrpRL0A1qxZo1/FTSCukHoiuq5HEHuVnfdRrjrY7LIRdXVbhXOdhSeDwSAAN9zQnXfe0STjU7CWSyUwuyckQqh7VoPN0JCNCtS/JSNUR61SbQ8Gg2zatImUlBQ1/oyfCDUTIfQB8DzKY2E9BdbQmIZ6JiuxmYliblfqNhSgvBMjkHCds1ClwOfzUVDwa73PUsjSBlI8pvezgAG8SRFQw7OsMUUy0fufjErR3cArr6zZav97NzqZZ0NwMZatvxgbRe9DPbXagu7Z8xAgxGAaeFunbO0snA7zamzan7pY5cBc+lDPCL1NM+gIu3KlnXnmAh5/PJdvBjfoFsjEBbaCSQFWj1KsbrUSrTXpiDuPH/foYWrkyiooDsbEyKOWaq0LWf8V9v/dhDwgOajBSkzcYtbwKrDMrNkckTdOv+ce8i6/nAUsokHLninI1mGyWUOUcsQAEWErcRrLwl6mxijZ2Em5FCjiWeYQp4GBP/85PX/zG1787W/J/+lPAalrs4p/Lzhsu2f417IyRgPTVq1yVU494Fe/AuCmm+6hDw08NW8ekyZdgXKDi+O9Y1fx6bNn0zh9OnOZrz1e6sr8+c/HcumlFfqsnIEZ2Y+sUMX9HwAxJBwyaRI4CCJmdDVQAZVD7S5lsRnKQfVSSZ1VvpmXX84Gttaw6Uz4zW9+A8Bttz1Mo+4/6djnM4A6a+lL6rmUPq2u/2Ci3KyLmTlTQZ3FHVNSUkzFaAgwblwfOLjQrTbgR2t5LGCwNpjRiclyxBBrmE9CkYPLh6N66XJ++MNW4GDgScjIhv3kB0EgRB8WMQLbSxYBYlrZvihmqDPcGcL6bvx88MFZLg+yc7GnPgtjQn1xeaJDCE+oCGWKlAA1rOI/mrf3OuX6XBqJRlPNffEg6GScjV/+8pcAvKhrSOws2rtHnaEKCak465vsCjQ2NrpkzDdv3uzav7Rpy5YtrowYZ1udkrpO2WBVBOnri5s5tUTa63E4j/9VIfdrb8Q3IT7UHq2trRQVFblc5KD0D9ra2mhubjZ9Xwh+TiJyc3MzH3/8MaC0KdqTlNu/d+Kjjz4y/ebzzz/f5vk7jZlvG926dTPXI5FIdPKiWcWs0ZTifBqMsRFBOQ+UR0wYOcrjWV19FfDln/Mnn0zn1FMTltcrHW4OqLy92YbyLGwkQTVQwSJdO0f8LItRE3WNet/UCi/HUCbSAmAxk1Hm9RKzJ/HaykJLAkFBrIS4eHTFm1gNZom4LdQg8unKY4JuRyUQN8Z/MW696kLW8Cq/o54K4LIdHGNvRCfLRhGMAQr9UJewfGN5jBo1AVKtpkpYRTXD8vJYqlNZnQNma2srn3/+OaeUlbGSPuRTy5HADZ98wn39+wOqG1cA9QwG/OSzksnYQsrgrG/ajDC/L798NtOnT3cx/cV151R4dBORLFJSUoxb10li2rx5M/defjk/vOsuzj5bkmCr+eSTQ10iNykpKfTrN0ZflWLcrmsAv2tyEHTv3t2lRiltvqiw0PDds7B0R4nAB1CDQRg1uC03ugmdF8nkr/D5rtbvioHxUDRIL5hUCGENjZogbOuQAuzbowfjUX3zTV0DxEINkiraHDVp1HX62xrg0YULGT/+AuzaVPhCQb1VELnqi5lPGDihSxdisZjpj2HgnQ8+ANwVPJ24YdQoxgMTly+noqLChEKcIZff/OZH5OfnU1FRwW23nch116VhlTjUVHL11QtIJi2Z+Pe//z2T77iDgh//mFf1OvSK3/2Oa6/dgs0YqMMSmJV7O18n1q6kHFt/OYEtyKZqqtRSRy61DsqnJgluqFZvK4uxDFu5KwnmzGklI0OlxDrDrJs2beoUQl7bwh/+cCLXXrsIRVxXIxZIuGECkpavEOT994eYRYuTEwa4QqfOUKtkZ6hFThhq8tRzkBAjbS4wm5FEjR81jptcHDF7DmPDHOVY//EGFGvuZlT/qiabZYYXZYntzlCe8EKkSo6EUxbThzXUkosE27OyslxjsbMP9Op1P6DCmLCsA+9vHivJ5T80CIXZYLhuwdHzfsq7777L4YcfjgcnOplnQxAE1a82WCGlIFIiS8HW4/Trmgwd43xtaEAhCWrN78/Sg3Rs6FDtflSuucWffsrD/fpxQnW1GbwPbWvj8o0bGTXqv9jy483sLMaMuRqoYtSogTvcFtRjevXVC1EOvUrUgzRmq+0+/XQx/frdi30QY7S3Li/u25dXGACcCRQQCn1/m8cVowJwcTYi+rU4NYM7dRadA8nkXQBKdyNrkBpV4mgeQClQTlgbGxLIOvBPf+KJo492pK76cYcM1JXMQ3mhi1F3UIyNVZqXoTRAE1i13AjuVFyV+hnHOo5vd0yY24uQnnHggYDN0/gyuOuuNK6+WszM7WPwH/5Atc5G+slPXkb1MwlrlCPPSx89Ocr0EaeBNWbZHES5yoVkGwMqaKDKEJ6tqqOeaKoKIJyuy9vLNnnAt1sLaXfhmmuuYdiw1zn88L8CAYc+Sx7q2knwMwEUsGnT58aLOnHicuBmDtL34Bm9MPP7/a4Ck06SvfJgzIWEBPxAlHhkjBDKs9P5AdBAH6z4mN/xWtgjjSiehpqcpLdUAou2yrgLYr01kpUyB4CprGQCsIAG3qSStWtf3upcfD6fgyd3JBCmlgoGEDVhTTnDWkWf5nUaeZ1KclllGBuS3ROPx7nmmmvw0B6d1LPx5qxZfP/qowlusNQjWSPZhywCxHS562xD1OzatatZtdXU1LDIuBdDRoZZCVypB68UGdZ1Xnc0ygnaEHFC7T8HS88rZODAX7Fq1Q2AClmINd3U1OTq7OoILaxZutR87sxe8fv95rdbtmzRlTQjqIcqAUTo3/9xampeN+2xBYDGQ49i/QwHzfe5NLB582a9Mowgq4yCgr8SCr0A+jrIdcvBMj/ScepYSqUFWw8xD/jBrFlbXaPOjGTyOHxjsJmc/oCucDtcT4qQo5OhJ3fpwmz9O6UDkMDqYxQiE6hMgVlIlVjBcMaPb0AGP7taC+NmE1nORAyleRABGvSKLJuoqw+Jd+Pj997jzWVKqj+RSBAKhaiurqaurs6sdtPS0oyH67rrpIiWxVVXobN1IvqTMD5fL5JJNZn/SKc933HHHQ5Whx+Ja4sKSL5O8RWIp7LWTEiyf6exAXZFG8bG69vtqRFkZPj3v9OAGPF4k1nN+v1+E9ap366yrgcPHr4WfOwSS+EbNzY++eQTiCjfgazZK1ADVRQhMYUYQJQs4KV5r7q4GZL/PH78r8GQ98oZiB3O+h9yCAAfosasGGpo++XQoVz+xhv069fPDFobNmxg4sRmLNs5DMR4++1LzOqhfbllZ2ru3Ll/5sorryQ1NbXD7f1+v/lc1SHIQ4nzqJS2Bi15XVR0KQBVVX9k6NCfoiz/sxxjtl0DF+h9jUclxJbzd8OnO7tAuVsrgLdr1BQYxBobCaxRJ0lpMg3G9Latn3zCHocgdolWBFSNwVlOehlzeOQRJWSmwniy8g+jXMzgXNFVoiLSEvpeZVZtQSDCBx80MnRoCAkzjKTWIVtXa0h/EeCy22+npqZGEyaVodkMzCgr49nyclMB0+fzsa/WzwAlHz148GAA+vXrxwfakF6zZs0OOTfJ5JXb/R7gxz/+MT/5iSQbBrHk5RqghgLUc1Wtt1DhyhJsyqxc8HSsz0zJuysIATDP8RuwCqMh5s3rbTgqLS0tfPGF0uJJTU01dT06cwhFsHr1aqRHSIg3anJQnCG4dCZMHu/4ZTVQqoibZFFYeBJQQH39PcbgdFZ0VXVOZmKp+UKkVD4AMZ7LgVcYhvLAxh3HKkSFdoqwukV5WP/FFvpob6FI61UhI5f4EgrUb7N6ahs2iR2JlHdruP5XDUC8w3NxFrNU+54GRFhjlHLAGrTFut0lQIwGQsxnAQDzmc0999zE6tWrOeyw7ROy90rsGsfGt1NcYMFLb+OfONEIIstkl+sQfZGMZycKCq7CqhUGsXG+uJkGsouKDBFpAWqaUASrMDnU89dDDuGmKqshceCBc1E5YBEsYWn3aUzcc89JANx6+eV6KA6iDJ3JAAwcuArlrk4Ay6GmEltnscL8AqzG46GO/S/Xfxc6Pjt53jyampoITpvmuqZC04pgE4OnvP0277333lc+v+8qkv8B3wRsYg8BqBqPDYDEHVvLRmrCs/0hYraIY9Vp611pdHHgJYYOfQlhxx+kRbyEKyNVUsqx6hzyufOZ/nO589tvCwnHXzGeqhhJlGIsdU9Brpvwi9Kx2jZ6UhvYDeJjdJbJEtQ1DWLlxwRB1BXbtKtP6DuJyy67jMsuu4y+Pp+jD0i4S4yxCBBWSq6uJUMAdX3leodpbW01i55EImG4HNnZ2cyaNZMnjj6aSqKOcpYDgXSzpFHm9RXADKzfbrE+1mTIKdSRwFLUSLsEFeZqI0GL8aLKiBo254N5Z+zOap+W508gHq6FrCKOPB8lxsBobm42lbRbWlpIJBIMHVqH1c0txc4qYJ9vobwKa82Zlxfgsss8Yug2sWsoG9+8sXH55Zfz6quvGsmgAmxndLqmY6gunpeXR1HRXUj8O5+VgOrnUf2LbKJm8FYZ1QqzgSjHo3p1iGUsIcwapgwcqCV3Ac5HWdMqLfa3vx3HEz99hLy8PJd+hTOlTB5cWXGK4JKq4WD1DwTCD8nLyyMej/PTn/6XmTNncv75/0I9IAOxK4xi7OM+F6vkl4U89FnAy6NGmXWFPDLV2BBJlD4mrPOPSZM4FFtiSx55GaKa9fUMoKSKL7/8cvZEJBdog0Pg76YHS4Aqh8cqD2sa2MDTAK2uiv5E6VaMAZeAdDm5vMBo4DdLlrB582bemTTJ9axKX//pO+/w3nvvEQqJdyWbwdqzkYUlejqJcCkpKabIFFgvm9/vZ/RoVcdkV2Zm3X//HQD88Ie/Q12XOH20oVHabttXqMR6KIJYoyOIMaPEFpk7CDZYz5KbJxUEhitZdOabSTMtLc2EBjdu3MjJJ5+8i87yu4PH33iDQw45Rb8TE1WedDH2nEyfav1XpvcIbtXRjhFA7khEfxIH/MbDGeUw4EgY3Q1C3dQmIc2t8BdaV2nNUCgv1m/eBnzUM4CEluEK6r3ngC4HCWrRVAPxnrZrbMgBk5wPi4iwiDBQQFXVH7d7Lh98UMjQoY9hJfDNigK7bA2gfC0V+q/VFHrjjT2vH+1S7BqZjW/Hs/HBBx8QwubZCz1IGBPSZaIMZsiQG5BBDqyBVYoqtBxG5Z8vJmpElCTC3sD3Ua41YQZOo9YEbSRGHEAZGq/ym99M4YADDuCAOXNIJBJmkGtpaXExoJ2Mb3FvO9FesdO5wjjggAOYM+cA3n33Xe655xAuv/wLx1k7EQEWMFV7e8ThLFAsDfXIBPVnwgkXFBYeTjYrGc/W63awj6MMU8nf/Y4PPviAI488kj0WEaxMRMJOcHfckefIdsrCroQsSTHMKgZiS4dZvoGQ9wASFKEYG36/n9TUVKYtW0Zo5Ejjkztk4ULeeecdFi5cSCQSIS0tjZtu2gzM4Nnyq7ZqsvS5lJQUl4KtM+MomUyaiXjo0KFf5wq5cNFFFwHKCM3OzubSS+8z7nHJp7IBp3qtCCrkQXkihRUEhAepC1gMbBiOeiMrdcfTvZ+PeQ/Od6WUt7W1mdDJnmhoALzzzju88cZTABxySBXK6xnEXetHgnBgA6B5tPd1y3VLTU01fTstLY2cnBzO+d//uODgg2nvbVAGdABjSlpnA4SKgZB6buLpNgKWE4DGQpT0eReglAYS+FllzM0AkNCh43oxmsJjrNMm1g02jMAl/6wXV2lpaab97QUWBbn8QheZnIwKQRfrb+qwuWbinQsDAd54Y///b+/c46Kq8///HHFQQXLkJ2BiShR5TdRQy9S02s0uoubdrazUzUveMq9lautqmpatqWXu/qpNxTLzmq65XtJUEkEJ8YKxUEoihoPApA4w3z8+53POGQQB5SL6eT4e82AuZz7nM8OZc96f9+X11r/zLl26XDWmQsNCqXTnrLAezfXmzcM5caJ+Kj+BD+6iuzLOZsM4qT/FaW0bB9sIQR6aoaRiY5ue9S9XpWFAe6jXUizhZcKz8yBGFrY3kMCiRd1MiZnlx6JFdzBqVDSGySC/A3HCtiIz/N1bFB1BlMb5aqttG+7rHcNkM7pagOHNAONCYQU6rV3LqVNlFz66WXDFg6Ul2peZQMEhM7P72on8VjNpRTwxBCHXgzLJUXqfALL58Icf9JG8WrXCERPDndHRBJh0Ia7Gj7VrH7yRj1YuLF48HG9vb/z9/Vn31FMEYlyLxN9sDP/ZKYTBEYRuSBwLBIe3+HrrWOC89G2a8hLqeLJn2d7y+Dg3HVOmTGH58uXao+7a32zc1XA9TY/BqAqRv2abWxfqqlWr6sec1WrVNX5OEGwaQ2ZsSS+KOC9yxA+8PE3baJlGRxrCEZu4r9etZGKI8PuRSgp+ZBbigfd0F6UNRPNuSMTv6ujRPVitVn3+5vb0VapU0Rdyu3/6ifvv741YXPbWvGJAxr0Y+SBJGEHjDH7+WRhnBSVRK0xUZs/GhAkTAPhs4sSK2H2BZGdn43K5CA4O1h8X1PnP3DXR3OnQYrHoz+fXRZDj5OXl6eNHRkaakpsqngsXLuj/l9sVs17BjSBjynI15uXlRWpqKjFaFUlaWpoe6jAnG9erZxjb8tiS8zKv8M3vMR9b5s7Epc0kTdlUMnPmzFLfhxnpsbHb7XrVicPh4OWXXy7T/d5MbNz4I926dUFcJNMwSonNSwYZlLMilyQnT64t1vhr1syjd+/F2qNkjGTUWhhl2w5wSCMgAyNJ2IqspjMCtOcBGcITOU/JxOBAmCCpehL1vYAfOF2QYhFvz7iCqNATpa++7HSrXyoO0dHCI9R6ocnTbNfCQFEubWzxOTdufIyzZ2/NUupSp6icjWK2JaowzwbAIJeLSRaL5vqXDjeJOdmsHkZsPAmAdJYSyRbt9TAMF2ISxlrLBoQIY/ch7eEpYGUbOGZc6OfM+S8ul8joHxwczCenTukua3Dvmpgfs2iXJH8LcDmO+QLyxhsO5swRSo3z5jVh4kSZ1mrDcJv6kUyqLuIrvRaZekJiPdKxk84pjEQouQoRmRiiwkd+r07AgV3LC0jAkEd+/zY6iQvv1knEyedbAFyux7l06RIvvTQNEQSReRtmP3IIp3FyWg9qeWE0NJPHk4PuDzzAnzCcafZGjbBNnKgbIRaLxc1gEIJL8dxxx9P6Ci4vL08P1xVmvAJuqzyJNHrLkunTp4Mmif6mNjfxOw7C6OuZgvgGZC4BQBIk1xPbWC1IT8iOHee4fFlUMaSnp9O588Ay/ww3K0O07r6fffYZX375LX372jBCvzIoIXFgGCN2wDCazWEUabxZrVZd4Ev0S5EGrlz9g3tVkDxLgCHgloyPFt6VJkcmwcAfiDCK9ApCJk49t85oe+it7WsfZDgwegxtobkmIhcKzNKq4szyAVar1e3473jffaTShWPHlujPR4+N0bdve6iNFo2xQJTMJ/Lm999/179nRREUVY1SzBzuCjU2AMK+/JJ5fadhdHcE9F4lMkDQFGq1EeG4NC0evX8Q7q2JpJphmmn0JMAOft6GaKIDcT48Jn5w8+bNR573n3/+wDUkxEqb9cBf9Edz5zYGYNKkJIwTdggnsOPDaTIJwDgxhODmmtaNMrMbMhkj00N+cBEXF1ofkK6FCd75sjK2kb9+XFfAYtkAfEt9dmrPPq799UQXUqpVGzLkilIiQyvyuRR8taZuYAT85DptryY6dy0f3tSpy6nMLdRnaRe12gsWEBAgfkHPP7/HtIW2kgWMWLwdnMKPvmtXMwpwIt72DBo0iI0bN7JhwyXCw2shvscQESLIkJ4NL2Tfp19/nayHFcyLJXPPlNzcXN3o9fPz49//7grA88/vpz7pOEnXcipkSMYI1vuQSBhimeONIZgVD2zTPS8yIdichi59G/L8fhApIS6NpABO8DiyJk8cLXKeubm5BXb4DalbV0tkHUOT/o3hBTj+9Al3Y1tmwtrkd+XNhg2t6datW7H+B5WF9evXM23aNL0T9MKFC+nQoUOh24eHh5OYmEhcXFzRg1fWapT89OnTB5erjyYyJFPovDB03RB/pQmtn/e9GD7cg6VLZRwxGzhFczKRcr9xpAFBkKRpCqQZm7pcYjU2Z06OaZXZlFQakZube1VFifmHKz0Xcpv8CUvmH4d5BWt+HEwci6dM4ZW//Q2Xy6XHDWVkyWL5AWkoGFnc5gtSEkYmh4x32zCOCpHGN3t2d6ZO3YcwxLIBbz79VOzrwoULjB07ltsRl+t1/p9lgr7Wkl1+lyx5nREj6qAfc7qrGMTBk0yA3snCOFKD5DgYpp34rz0FNGTevN1Mn/6Qtm+X28kTYNWqsW7P5+TkuG3jdDrdqqPMfyUyfCJly8ub8eMNo/W5557T71ssKzGWRqcQVVb7+Oc/J/LyyyPKc4oKxS3JY489Rnh4OBaLhdjYWPr27cvx48cL3Hbt2rUl6/9UlGejmG2JKtzYkJhFhiyWwxjq/GmAF660/O9oBixhyRL5ngfx1UrypM0fQirfsBTXmnwCRqbw85QpU9i4cSPh4ZeQKy55EpeuuMLCIi6XS8+ONj9vdnnLbcC9HfxD2t5CQ0MLtLJdroeBh4GrxZcslta4XNFXPV8YKv+pYOZ8/DH/0qot3hk3jjf+8Q/tFS3p87wN98bvyTQnkfYIg0LmzWtBAf2dMoJtdM8VybrvaXkOr8+cmc8ItXPPPffoRi64V5oUlDsE7r158vLyqFq1Kk82asTcr74q6VdRprhc+UMigypkHpURo516WwgNlHIYcEY7xxxpBhmi1qNq1ar6cSWPB3A3bs1VdjabzZQn5KWJoUMbMvEiUz/7pulbCJ9qe4x+VjK/8wiZpANOPDA0U+QvwWyCAxwhgFRdJ0hWNT2OCIgDZCYn6+dd85xdLhfj/P0BEWrZK2ftJXbVeGcj8XOVk5bO7yQAB2vX3lNgi/rKjtl4yM7OLrRBX1ZWFu+99x7Lli2jb9++xRvcg8qbIFoULldL06OgYr7nwA3udTeQRk/KR/q4PZgUJUtGSQwNxbUZ8sknALw2dKjp2fUIU7AhhpolQDZBGF0dZBGdHaPaZx+yfwRoikXI1usFeSJXzpqlBbVKhy0nThAbG1uKIyoqEqkKS52eEI6hAmBOw4oPgowr3HnnTxhNCLYTbGrmHkl90tJi3Co7atSoQWCgOAt99dV0+vR5FT9S9W4sdoQ4oPQ3g5HFZMOQLbBhqFg4ceHugrZhmBTyuSQgFT8MaS2pR3pOW4371qjhVkFjNqql6fodsFeWtUrJFpnCIiedBpyXCqXxHD0q8oJ69uxZyDdeefnmm2+YMmUK586dY/PmzQVuM23aNMaPH6/n7RSLUgqjFJz1eJuRlZXFypUPc+jQJOZoB3v+lvJyFSn7Tnh4eFyVNCqflyJfZm9H/jEfPHSIlitX6roBivJHduYFcdJbNno0LpeLRmxDSDqvwf1U63fVSk/m7G/Cl034ks6fMWSRQfYRgTQ9uv7Z9On68WEF1h07ho+Pj368mKtOzDezR03eZChPxuhdLleFhVEUZUgIxpVZ5oiab3qOm0ha9iGSMNBzLOpzGj+/GUXsJIg4fPRUUVl7kkkH7dZFb+hgxVCgSdHuC/9bDkbCtDQnQnEXKAshlWDOYLStMGfsFRfhb9RMFSkmnYCo9T92TtzO/4CQd9zNgQP3lHAPlYuePXty/Phx1q1bx7Rp0656/fDhw/z8888lN7RkGKWwWzG5KT0b5c2AAUKxb8OGDbog0uXLl90y/c2GRf4wSf7nzW5LMy6XS3dtnj59Wt+vomIYNWoUS5cuBWDwsmXE//WvnBwzhjFLlvDZiBEmgSrpg2pKJHbgNPdiNGCLxAejM08gMgnSV+sRka71/JFOZCdG9UgKuIVLzKE4eTzl/2vOB5LbV61alfR0USGgsuxvHWSPm/vvX0+Pi93du1eC6E9w/gxGwqUIXWTiQ5pWdWZHXpgNTwGI8ISoSBGNAD/9dDgvvjiHfZpYYjZwglbAk9rOMjjBEbK1ShR5/GsFspqx4UD492wYMvUyw0ma6Q7AjxjSsJGpy7o137ePYG0+5tCJmX0BAbr5/x0AvaGJv7BhpLGRfA5DRzoZyGbPnk6cPXu2yJ5BlYnFixfzieaZ/fbbb/WQWKdOnUhMTOT8+fPUqVNH337//v1ERUURFBRETk4O586do3PnzuzatevaO7pVEkQVipuFYD1nA1766CMih72POFGahdYgkgRNShmMEtkg00h2zE2tZLlduv6LNfRcM/WQy43xSrNmzNu3r+gNFZWSnTt3svvZ2jwS30lcULdrL2R8i7iwSn+DONYglJ3aZTmAdK2aLQ1///lAAufOfYyHh4eeDxQYGMilS5fYsWMpjz46gHhStfe00W4gjuuDHGGnLmy3G1FbIroVO7ByhWBOaI01pV6HDO3IRHeZ6dGUnaQxbcdyfQ5yPvkl9/Py8hgVEEAIRsjyNH8GuguRVW9t+GNXtO9Ddoc6w+7dfyUvL4+dO3fSvXt3bhVGjhzJyJEjATh16pTugY+Ojuby5cu6eJtk+PDhDB8+HICkpCSeeeaZog0NqNyN2G5W4uLiqF1b9AEwCyzlr0wpysuRP6FPejOqVaum98GIi4sjPDy8lD+BoqRIie8aNWro/z+n01mymOZ1YD6GzMJc8r5ZmKswjRez9wPgzJkzBW6nuIXYjggTZOzSnliPEeozaxOBrKKykk59UjnNQeAIwSQCHxe6ixMnvqdXo0akkEq6nuAJwqh+im9IIV43KLqArgQUixMHTswJmNLYSMCXE4AI9Ajh8iBExmvB+Ps31j+LLycI0z6pYVK3FK9nI+Ixp0CYPtuRreR8iAP+Wug+bhW+/vprPv/8c6xWKzVq1GD16tX6uaFly5YcPnz4+gdXno3SZ+rUqbz99tsAtGvXjrvvvhuAS5cu6Vne5vp1l8vFlStX9HJYcya42UKX4kynTp0iMlIkbb311lvl98EUhfLaa68BsHDhQn01denSJe2HGoIRJJfIE3lTjGizDcP0FyJVUI/TOAggE19S9VCKxMvLi/HjZ3D06AG3ChRzuWthDdXMx6I8thbFxLB37+0p8X07sHDhQt5++21mttjF9K/7Y6zvpUdDikmYFTBEb5E2yDaOp3EiAhsf+PszNi3Nzbht0KABII7Dz6Ojmdy6NdtIwAh/NEXq0ZwgCXPjNBG+SQUcJNJMe06KG4H5aiWOWOHdS0iYoV8UzQa2h4cHwaTjR7re/8mGUW4OkE4SsBt2/0l87JQzCI/GPhppDTv/9cMPbN++Xf8Ob1UmTZp0lcqvpCBDIygoqHgaG1C55coVisqBbPouwx7mdLgg7ea+ijROh0FABn5aOXaadmoEePvjj8nKyiKY0unF07jx98TEtCl6Q0WlRi5QqlWby+TJ6aZXpKFrRxgZIm25i3Z8hWHUh8iOSSnAYj8/Rpw7B7i3oc/Ly6N69eosjI+nadM/Y1SRSMM6G2Gy+CFylWzayKsQv43XMWTNpaCXUx/lChAfLwxjq9Xq5rmTBoefXzDNtb0EmfYmy3MBVvElqVghIxsyghB+j8NACh/tFGJ9e/bsUQu7G6WyN2K7WZEH5syZM7nzzjsB4a6WyUr5hb4Kqjoxb+9yufTeDgcOHBAyz4qbDrO42fz583E4pMqAXCmCUbBvUodzk+YGcXKVazErTRGqiOb1ocViYd748VeFQc1icOaqFDP5j7PGjY+RkNCFffv28eqrr5b4cysUCsU1UZ6NsmX69OnMnj0bwC23wtzfxOVyXVUiK7eRWCwWNm7cqI+puPl5/fXXAeGatFj6YUh2ifhzc1KxkUo2MbrmgFxt2YFt+CLczTZd29UJJGoiX3l5eaQSzJYti8nJyXGrMjFXmrRoMZDo6M91sSZzrwsAT09PIiIuc+jQIV544YWy+joUNxmTJk2iSpV3AZg48TBSxcqXRP2aEIIh/W1DBF2OII5D6a/LBrdqJnles1gsupcjKmodYWFS1yfNNKLWValWbc255w2nPSCvBlxpjyFLbxjmcm7roqLw9PTk3nuHkpz8qVsYRc6nneaVEQ0WjIBNIEZjgRBgGitIx1t7ZAccbN36GQcPHtS/K8UNonI2Kp5WrVYRFVVMFTZFpWTOnFY0adIEgB493qQ5qTyCOLXJk6cNwxyJB+JJ57QWP99PJDZkCp9Zbr44PS2ttG49mtjYJTf2IRS3HLJDc05ODo0bN2bKs8+6HZc2jGMyCZE2eYLm2jPJBGut3zMzxUXdx8fHrQ2D9Mz6+PggMlJBXPobIi75pkRUJ8bFSM9Xlg3W7Npjbz0Y+XRYGKkEACE0bDiUlJRP9c+1XeutE67t1Y6h1WUDvEPBW8spHf612Ob/sw8Z6lm79nmio6NV2/jSxANVjVLWTJ06FYA5c+bQrl07QJRnyQS9o0cH6UJeYMQbPTw89MqAyMhIfRxF5WPy5Mm6h2vJkhHUqlUL51/+okvig5GeJ++L054NgEQCiCKVM0BEhBinf/9oIJSaNWu6ecrMCXJihSci7S1adCY2dtdVIbv9+/czaJCS/lbAnLVr+U0zOPwQx6EU2U9CZh15I6Xo7JzADxh6r7hyJwN7U1IoiKgoUc0RFjYDYSR7antJgwyrkEu3egs9LxcYeUsyxwMgiEztV5OpN4i0kZy8WN+PPSCA3tr9fbj7RTSdUKMBOGBJhtAo8CWOFVve1SvLFKWM8myUH2YredGiRdStWxcQq4rffvuN9PR0PD09dWnhs2fPMmrUKAAeffTR8p+wolQxG4szZ86EGTP4pUoV7njrLd0tLfOnkpHGhh2piZgEpBKs5YGAOGNa8fYWJorZyJACXxaLhaNHV9Os2bMAHDt2DIfDgcvl4rvvhJyRrKRR3L5MmTKFGTNmANB09Wqa9uuHpSFgBdspsY1UgjlBgnbPj3SsJBCnmwKJGJ4Nc9+e6tWr62GOQ4dmcubMGcLDRSmr4bmwgTMIyMTQ+miDoaIr+6JInY0jSI9HlSpV9P3eJ98KOB1GXZdd+wzxQGACbi4bL2Dtrl2kpqYSHy/8h/L7UJQSKmdDoahYXO+/z8px4wjCPWdDGBtJiLWlg1R8Wb/+fX7//XdtKy+MdWdRODl48D8kJiaW1rQVtxjy4jpx4kTyVq2i/z8HgBUs2oW7/THZDi2db0hCtlJLx1uvkYqN/c4tJ8iciFytWjVAhFf8/f05cMCfBx88hNEZKBBD2rQKYIOG/kYTaqe2WbLU1LAiDI8Mt325nKC1Q9FrauwIM+UwwqjnFIRoRlQS4LNqFb/99hvR0dHMmzfver9CxbVQ1SgVg/RYmNmwYQMAffr0Ke/pKMoZmeT79ttvl0j46+LFi5zTygzRNDfMMXLzX4mMm589e5a+ffuyRGtxrDwaimsRMXgV/bcYrRACHdA9WVy8D5KqqW1YMZqsQIsWD3P2rPA8dKlXjx1aGPiuu/6GoaVh5cCBBwCIjn6I1q334960xQfd525D6HXJTNRkIFkWv9oxUj8NEjDCkVK3NxChzJ6ByM9Ixj3zqUtJvxxFyVGeDYWi4njrrbdYtmwZwz7+mLmvvKI3kcoATebZgTjLilz6zMxMJk+urm3l4KefuupjSSMjNzdXN0CqVKmiua/tyquhKBbmlf3yrCyGrB4qHni511MZ9+QxCj5k0r5uXe4FugMrAwM5AwQDiTyjvecRHnzwZ4xcjN5Qx9+Ilhy1wmUgzyJiHnYMcdGUo8Dn2oa7kb+LevUm8Ouv7wBC91OqxdgQ/hcbwsC4gtEYbojW0TQlJYX+/fsD6H8VZYDK2VAobg7sGILRp/FFyBDJX6esDQDjVC9Nk6KpT3rRGykU+RgyZAgfavo+ISEhVOvale1AKq0QPgOpEyOOyUwCyMRBLTKxI5wSDYHxv/5K77vuAiCSbIQpUk9sEVZbWAS1EJbAL9rOL2YDh4UnI1kGQdbQihhAeCvigURaAUHcddd4IJl2pvmHmm5S8eaOAwew2+0kJAgPjNKVKSdUbxSFomKRYm1+wAldjlzKmMs8egdffz2OXr3+g+GLzND1NDw8PHBqgWqzXHnVqlV5IySEtT/+SJs2Sh1UoVBUEMqzoVBULLLUrj3g1DwQibquQAKNSGXs0qVarkYKhkcjGYtlgj5OQU3XrFYrDiA+Pl4ZG4rrQq7833vvPVrv3Mlo4JsuqxDGsB3h1ZAGcCBgJYYEvEnVVTRcLhdS0vAKO4nRe7CEgLW2kfYh850vgyhc3Y5I4YynFXF0x4i2ZCOcIYkkI7yAQmc3Ej/82AQI30lLNE0NK5AMu/74g6NHj6qcpfKmlDwbBbeTLGM2b95Mhw4dsNls1K1blyFDhujiMgAvvvginp6e1KxZU7+Zy7EUty9ffvkl7du3x8vLi86dO1f0dADouno1vYHeQDviCGAvjUjlQ61EdfjwiUA8x48v5fjxpfz4Y0Sxxu396adlNWXFbcRrr73Gpk2b2LRpEytWPIK43KchjA1v7SY7xtYjCZEnkQEcb9CAPidP0ufkSR4BfNmEMCYSRAVrFOJvvDbs5UvAt8B3+PAlzxDHGGAAQvnzcaDrL7/QFBkitCLMkFAgjIMIATKpxEE9WPnmCl4fPp5NmzYpQ6MikJ6Nwm7FpEI8GxkZGbz55pt06tSJy5cvM3DgQCZMmMBHH32kbzNx4kRmzZpVEdNT3MT4+voyduxYjh8/zo4dOyp0LlIK+f3336d1hDAgutSqpb9+5swZLl68CIAPiXoiqNVq1cv9cnNz3bQ1pGfjwoULSrBLUSb85z9PAk/yxBMvYCxZz2h/03AiEjFrIeyHlPvuA4y2azs5AoSBww+2+4HVX7w1B4TWhpDjkhLjTRHmhCzFPdmgAZ4I/8hpPY9JSK5LZOrqrjf+A+fPl96HV5Sc8qpGeffddzlw4ABff/21/tzo0aOxWCx88MEH17XTgQMH6ve9vLwYOnSo6htyG/Dzzz/Tpk0btm/fTuvWrUlJSSE0NJSvvvqq2F6Kxx8XnRGWL19ehjMtGePGjWPu3LmA0CSQIkh//PGHZmA8xJ7D89xaw8uyVqfT6aYg2qTJnwD45JPphIWFlfMnUdyqzJ8/X7//5ptvArBhwyeEh78IgC+JuixGNiIA4kSYAFJDRvpBhGGiiV2QAc5auFxDET9hP15+uQfQgzcGDdKl7eyAQ7MrTmkjiIfZCD9GEhCvl7U22rCBBZGR8P33zJo1y+2aoShnqlA+OhvPPfccM2bMwG63Y7PZyMnJISIigi1btjBixAhWrlxZ4PsaNGhAbGxssSbx/fff06xZM7fnlixZwpIlS7j77ruZOnUqvXr1KtZYipuXe+65h7lz5/Lcc88RFRXFSy+9xKBBg+jcuXOpHUs3J08Ve8uTJ4W3Zvfu3WU1GcVtjtljPH260A1q27Yt9evXZ0ZoKAmIy38CouRUZhpZtce+nCadg0ASLtfqq8aXTQGrV69O48aN2RYaqgt0gYi4JJseS3GwzZtf58cfOwHQrVs3unXrViqfV3FjVAFqeBS5WZEUaWzceeeddOrUia+++oqhQ4eydetW6tSpwwMPPMADDzygCw1dL9999x2fffYZkZGR+nOjR49mwYIF1KpVi23bttGvXz/q1q3Lww8/fEP7KitatmxZ0VOoNAwdOpSNGzfSrl07LBaLLogmjcvKSkHdJWfNmuVWdWJGhk5yc3P1DpsWi4Xk5GRAlC7mRx1nirJmxpEjWCwWvULK09NTP3ZdLheTmjUjFBi2ehh9+xavCeWfjxyhatWq1Nc8fk1yc+l25QrgHlI8ffp0KX8aRWlgAaoXuVUxxnGZdWkLISIigqVLl7J792769+9PaGhosbvq7dmzhyeffBKAhg0b6v1DAA4cOEC3bt2IiIjgscceK3SMYcOG4e3tzYIFC4q1T8XNzcaNGwkPD2fZsmUMHTr0usZYvnw5X3zxBbt27SrdyZUyc+fOZfLkRhw9ep8eRsnNzdUrUPLy8nSDBKB58+YFjqNQlDWyesXf3x9/f5GHYbPZqFmzJgBZWVkkJSUBokFhfmQo1PybfOcdIdgVFBTkNo7dbgfg3LlzurLuhx9+WKqfR1E6VAsLo35UVKGv1w4LI+oar0uKlSDao0cPhg8fTlxcHJs2bdKV6oYNG8YXX3xR4HukYdGxY0eysrKuej0mJobw8HD+9a9/XdPQANw6XSoqN1lZWYwdO5bBgwczY8YMevXqha+vb7GOpcrKO++cAO6r6GkoFNekLC72BRklispFFUrHs1Gs0tfq1avTu3dvBg4cSNu2bWnQoAEAH330EVlZWQXernVxiIuLo2vXrixatKjAuNyaNWvIysoiLy+Pbdu28cUXXxAeHl7ASIrKxpgxYwgLC2P58uU8/fTTDBs2DCj+sZSbm8ulS5fIyckhLy+PS5cu6S7fmxFPT0+3MInFYrlKV0NuExMTU1HTVCgUigLxQFQOFXYrLsXW2Rg0aBA//fQTzz//fAmGL5gFCxaQlpbG4MGDdR0Nc4LoBx98QGBgIDabjQkTJvDJJ5/cNJoKiutn/fr1bN26laVLlwJCbCg6OpoVK1YUe4x///vf1KhRg+HDh7Nnzx5q1Khx3aGY8mDcuHG89lpNPUdD4nK5dG9damoqqamppfLbUigUitJEejYKuxWXYuVsAPzyyy80btyYs2fPcscdd5R0vgrFbU1sbKxbbob82VksFi5cuABAx44dK2RuCkVpUFDOhqLyUzssjEevkZORXJo5G3l5ebz33nv0799fGRoKxXXQokULJk6cqD+Wxsa7775bUVNSKBSKIimtnI0ijY3s7GwCAgJo2LAhW7duLYVdKhS3J+YW4AqFQlEZ8ECoyd4oRRob3t7eBVaTKBQKhUKhuLUpLZ0N1fVVoVAoFApFgVSlZFUn1xpHoVAoFAqF4iqUZ0OhUCgUCkWZInU2bpRi62woFAqF4vq4fPkyw4YNIyAgAF9fX7p168aZM2eKfqNCUcFUAapd41aScRQKhUJRhnzwwQfs37+f2NhYUlJSqF27NqNGjaroaSkURVLuCqIKhUJxu7J69Wpd7bhmzZpUq1atRKrG//vf/3jiiScICAigevXq9OvXr1L3+1HcPihjQ6FQKMqJfv366b16UlJSCA4OZsCAAbzzzjvYbLZCb5LBgwfzww8/kJKSgsPhYMWKFXo37FuFli1b0rJly4qehqKUKS25cpUgqlAoFMUkLy+PgQMH0rlzZ1555RWgeJ1NQ0JCuOuuuwgMDMTDw4P777//lmupvnDhwoqegqIMUAmiCoVCUc688cYbZGZm8o9//KNE7xs5ciSXL1/m999/Jzs7m2efffaW82woKo5du3bRsmVLmjVrxiOPPFLgNh07dtS9T/Xq1aNHjx7FGru0EkSVZ0OhUCiKQUREBKtWreLgwYNYrVYAZs+ezezZswt9j1RfPnz4MH//+9/x9fUFYNSoUbz11lucP3+eOnXqlP3kFbcsdrudESNGsHXrVho0aMC5c+cK3G7Pnj36/V69etG9e/dija88GwqFQlFOxMTEMGrUKNatW4efn5/+/NSpU/VcjoJukjZt2vD555+TkZGB0+lkyZIl1KtXTxkaihtm5cqVPPvsszRo0AAAf3//a25/8eJFduzYUSLPhsrZUCgUinJg/fr1XLhwgQ4dOujPdezYkS1bthTr/fPnz2f06NGEhIRw5coVmjdvzjfffFNW01XcRpw8eRKn00nnzp3JzMxkzJgxvPDCC4Vuv27dOh577LFid3D3q1OHDmFhhb5eXIPZ4pK9rhUKhUKhUFQqXn31VaKiovjvf//LH3/8wUMPPcTmzZu57777Ctz+ySefZMiQIfTq1atc56nCKAqFQqFQVCIWL17sluz5xBNP4O3tTZ06dejUqRNHjhwp8H3nz5/nxx9/5Omnny7nGStjQ6FQKBSKSsXIkSM5fPgwhw8fpmfPnuzdu5ecnBwcDgeRkZE0adKkwPetWbOGZ555hurVS6O1WslQxoZCoVAoFJWUJk2a0LVrV1q0aEHbtm0ZMmQIzZs3B+Cpp54iJSVF3zYiIoIBAwZUyDxVzoZCoVAoFIoyRXk2FAqFQqFQlCnK2FAoFAqFQlGmKGNDoVAoFApFmaKMDYVCoVAoFGWKMjYUCoVCoVCUKcrYUCgUCoVCUaYoY0OhUCgUCkWZoowNhUKhUCgUZYoyNhQKhUKhUJQpythQKBQKhUJRpihjQ6FQKBQKRZnyf7mZl3dHBHAYAAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "from nilearn.plotting import plot_stat_map\n", + "\n", + "map_path = downloaded_db[\"local_path\"][1]\n", + "plot_stat_map(map_path)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.5" + } + }, + "nbformat": 4, + "nbformat_minor": 1 +} diff --git a/.doctrees/nbsphinx/get_data_15_1.png b/.doctrees/nbsphinx/get_data_15_1.png new file mode 100644 index 0000000..dd8e195 Binary files /dev/null and b/.doctrees/nbsphinx/get_data_15_1.png differ diff --git a/.doctrees/participants.doctree b/.doctrees/participants.doctree new file mode 100644 index 0000000..191647b Binary files /dev/null and b/.doctrees/participants.doctree differ diff --git a/.doctrees/processing_pipelines.doctree b/.doctrees/processing_pipelines.doctree new file mode 100644 index 0000000..50936ac Binary files /dev/null and b/.doctrees/processing_pipelines.doctree differ diff --git a/.doctrees/references.doctree b/.doctrees/references.doctree new file mode 100644 index 0000000..54d745b Binary files /dev/null and b/.doctrees/references.doctree differ diff --git a/.doctrees/section8.doctree b/.doctrees/section8.doctree new file mode 100644 index 0000000..ed428c9 Binary files /dev/null and b/.doctrees/section8.doctree differ diff --git a/.doctrees/tasks.doctree b/.doctrees/tasks.doctree new file mode 100644 index 0000000..d6634c2 Binary files /dev/null and b/.doctrees/tasks.doctree differ diff --git a/.nojekyll b/.nojekyll new file mode 100644 index 0000000..e69de29 diff --git a/_images/acquisitions_diagram_release5.png b/_images/acquisitions_diagram_release5.png new file mode 100644 index 0000000..d168131 Binary files /dev/null and b/_images/acquisitions_diagram_release5.png differ diff --git a/_images/blocks_archi_emotional.png b/_images/blocks_archi_emotional.png new file mode 100644 index 0000000..6040761 Binary files /dev/null and b/_images/blocks_archi_emotional.png differ diff --git a/_images/blocks_archi_social.png b/_images/blocks_archi_social.png new file mode 100644 index 0000000..a468b27 Binary files /dev/null and b/_images/blocks_archi_social.png differ diff --git a/_images/blocks_archi_spatial.png b/_images/blocks_archi_spatial.png new file mode 100644 index 0000000..dadf8f8 Binary files /dev/null and b/_images/blocks_archi_spatial.png differ diff --git a/_images/blocks_archi_standard.png b/_images/blocks_archi_standard.png new file mode 100644 index 0000000..d8aa97c Binary files /dev/null and b/_images/blocks_archi_standard.png differ diff --git a/_images/blocks_hcp_emotion.png b/_images/blocks_hcp_emotion.png new file mode 100644 index 0000000..9dee51b Binary files /dev/null and b/_images/blocks_hcp_emotion.png differ diff --git a/_images/blocks_hcp_gambling.png b/_images/blocks_hcp_gambling.png new file mode 100644 index 0000000..7e9a5bc Binary files /dev/null and b/_images/blocks_hcp_gambling.png differ diff --git a/_images/blocks_hcp_language.png b/_images/blocks_hcp_language.png new file mode 100644 index 0000000..67969a7 Binary files /dev/null and b/_images/blocks_hcp_language.png differ diff --git a/_images/blocks_hcp_motor.png b/_images/blocks_hcp_motor.png new file mode 100644 index 0000000..565c651 Binary files /dev/null and b/_images/blocks_hcp_motor.png differ diff --git a/_images/blocks_hcp_relational.png b/_images/blocks_hcp_relational.png new file mode 100644 index 0000000..47adb81 Binary files /dev/null and b/_images/blocks_hcp_relational.png differ diff --git a/_images/blocks_hcp_social.png b/_images/blocks_hcp_social.png new file mode 100644 index 0000000..0a75d35 Binary files /dev/null and b/_images/blocks_hcp_social.png differ diff --git a/_images/blocks_hcp_wm.png b/_images/blocks_hcp_wm.png new file mode 100644 index 0000000..fe4b14a Binary files /dev/null and b/_images/blocks_hcp_wm.png differ diff --git a/_images/blocks_rsvp_language.png b/_images/blocks_rsvp_language.png new file mode 100644 index 0000000..79a1f66 Binary files /dev/null and b/_images/blocks_rsvp_language.png differ diff --git a/_images/get_data_15_1.png b/_images/get_data_15_1.png new file mode 100644 index 0000000..dd8e195 Binary files /dev/null and b/_images/get_data_15_1.png differ diff --git a/_images/ibc_bids.png b/_images/ibc_bids.png new file mode 100644 index 0000000..d90885d Binary files /dev/null and b/_images/ibc_bids.png differ diff --git a/_sources/accessibility.rst.txt b/_sources/accessibility.rst.txt new file mode 100644 index 0000000..65393fc --- /dev/null +++ b/_sources/accessibility.rst.txt @@ -0,0 +1,66 @@ +Get the data +============ + +All deliverables of the IBC dataset are open access. Their online +accessibility is described next. + +Raw data +-------- + +The online access of the raw data (*aka* source data) of the IBC dataset +is assured by the *OpenNeuro* repository as well as the *EBRAINS* +platform of the *Human Brain Project* (HBP), in the following DOIs: + +**OpenNeuro** + +- `10.18112/openneuro.ds002685.v1.0.0 `__. + +**EBRAINS** + +- `10.25493/XX28-VJ1 `__ + +- `10.25493/YW4P-3U `__ + +- `10.25493/P21W-NW5 `__ + +- `10.25493/78KJ-603 `__ + +- `10.25493/73GH-KET `__ + +- `10.25493/ZXMK-AH0 `__ + +- `10.25493/Z8J1-1H3 `__ + +- `10.25493/PR7B-HND `__ + +- `10.25493/GDT6-BMK `__ + +- `10.25493/WQAG-ZDZ `__ + +- `10.25493/3JXW-AFS `__ + +- `10.25493/PPE1-XNM `__ + +- `10.25493/PD28-TRA `__ + +Data derivatives +---------------- + +Post-processed data are available in the collections of the NeuroVault repository with the id 6618: https://identifiers.org/neurovault.collection:6618 + +Meta-data +--------- + +Behavioral protocols, video annotations and paradigm descriptors' extraction are available in the public git repository: https://github.com/hbp-brain-charting/public_protocols. + +The scripts used for data analysis are available in the public git repository: https://github.com/hbp-brain-charting/public_analysis_code. + +Data papers +----------- + +All data-descriptor, peer-reviewed articles of the +IBC-dataset—*aka*—data papers, are open access. They contain information about: *(1)* the overall scope of the IBC project; *(2)* demographic data of the cohort; *(3)* description of the experimental procedures undertaken; *(4)* materials and methods used; and *(5)* technical validation of the dataset. + +- The first data paper (`Pinho et al., 2018 `__) is available under the following DOI: `10.1038/sdata.2018.105 `__. In this article, we introduce the IBC project and describe the ARCHI and HCP batteries plus the RSVP Language task. + +- The second data paper (`Pinho et al., 2020 `__) is available under the following DOI: `10.1038/s41597-020-00670-4 `__. In this article, we present an extension of the IBC dataset, which comprehends the MTT, Preference and TOM batteries as well as the VSTM, Enumeration, Self and *Bang* tasks. \ No newline at end of file diff --git a/_sources/api_install.rst.txt b/_sources/api_install.rst.txt new file mode 100644 index 0000000..108d242 --- /dev/null +++ b/_sources/api_install.rst.txt @@ -0,0 +1,28 @@ +Install data fetcher +==================== + +To facilitate data fetching with minimal coding, we've integrated powerful tools into +this `API `__. + +To install the package containing the API, execute the following command: + +.. raw:: html + +.. code-block:: python + :name: quick_install + + pip install git+https://github.com/individual-brain-charting/api.git#egg=ibc_api + +This api is under active development, so make sure to update it regularly: + +.. code-block:: python + :name: quick_update + + pip install -U git+https://github.com/individual-brain-charting/api.git#egg=ibc_api + +EBRAINS access +-------------- + +Note that, in order to use this tool and access IBC data, you need to have an EBRAINS account. +You can register by clicking `here `__. + diff --git a/_sources/behavioral_data.rst.txt b/_sources/behavioral_data.rst.txt new file mode 100644 index 0000000..e472cce --- /dev/null +++ b/_sources/behavioral_data.rst.txt @@ -0,0 +1,269 @@ +Behavioral data +=============== + +For most of the tasks we collect participants' responses, in order to asses their engagement and performance. +We calculate the accuracy of the subjects' responses and we present a brief description of what this accuracy represents. + +MTTWE behavioral data +--------------------- + +If the Cue presented in the given trial hinted at time judgment, participants were to +judge whether the previous Event occurred before the Reference, by pressing the button +of the left hand, or after the Reference, by pressing the button of the right hand. If +the Cue concerned with space judgment, the participants were to judge, in the same way, +whether the Event occurred west or east of the Reference. These scores were estimated +considering the answers provided during the Event+Response conditions. + +.. csv-table:: Response accuracy (%) of performance for the MTTWE task + :file: behavioral_data/mttwe_behavioral.csv + :header-rows: 1 + +MTTNS behavioral data +--------------------- + +If the Cue presented in the given trial hinted at time judgment, participants were to +judge whether the previous Event occurred before the Reference, by pressing the button +of the left hand, or after the Reference, by pressing the button of the right hand. If +the Cue concerned with space judgment, the participants were to judge, in the same way, +whether the Event occurred north or south of the Reference. These scores were estimated +considering the answers provided during the Event+Response conditions. Chance level was +set at 50%. +*Note:* Low scores for sub-15 relate to loss of behavioral data during acquisition time +in MTTWE and MTTNS tasks. + +.. csv-table:: Response accuracy (%) of performance for the MTTNS task + :file: behavioral_data/mttns_behavioral.csv + :header-rows: 1 + +TheoryOfMind behavioral data +---------------------------- + +Participants were to judge whether a statement about the story previously displayed is +true or false by pressing respectively with the index or middle finger. The chance +level was 50%. + +.. csv-table:: Response accuracy (%) of performance for the TheoryOfMind task + :file: behavioral_data/tom_behavioral.csv + :header-rows: 1 + +VSTM behavioral data +-------------------- + +Participants were to remember the orientation of the bars from the previous sample +and answer with one of the two possible button presses depending on whether one of the +bars in the current display had changed orientation by 90◦ or not, which was the case +in half of the trials. For each level of numerosity, scores in every run are related to +the trials referring to visual stimuli matching the specified numerosity. The chance +level was 50%. + +.. csv-table:: Response accuracy (%) of performance for the VSTM task + :file: behavioral_data/vstm_behavioral.csv + :header-rows: 1 + +Enumeration behavioral data +--------------------------- + +Participants had to remember the number of the bars that were shown right before and +answer accordingly, by pressing the corresponding button. The number of bars presented +in the visual stimuli ranged from 1 to 8. For each level of numerosity, scores in every +run are related to the trials referring to visual stimuli matching the specified +numerosity. The chance level was 12.5%. + +.. csv-table:: Response accuracy (%) of performance for the Enumeration task + :file: behavioral_data/enumeration_behavioral.csv + :header-rows: 1 + +Self behavioral data +-------------------- + +During the trials of the *encoding blocks*, participants had to press a specific button +depending on whether they believed or not the adjective on display described someone +(i.e. self or other, respectively for self-reference encoding or other-reference +encoding conditions). During the trials of the *recognition block*, participants had +to answer in the same way, depending on whether they believed or not the adjective had +been presented before. +*No. of trials* refers to the number of trialsonly for the recognition phase in the +specified run and, thus, not to the total number of trials in the run. Because run 3 +was longer than the remainder ones, the number of trials for the recognition phase was +therefore greater. The chance level was 50%. + +.. csv-table:: Response accuracy (%) of performance for the Self task + :file: behavioral_data/self_behavioral.csv + :header-rows: 1 + +MathLanguage behavioral data +---------------------------- + +Subjects were presented with a series of facts (geometrical, arithmetical, +general knowledge, nonsense sentences, etc) and were asked to indicate whether the +presented fact was true or false. Subjects were instructed to consider nonsense as false. +Scores were calculated based on the number of correct responses. When there was no +answer for a given trial, it was considered a wrong answer, and where the subject +answered more than once per trial, the first answer was considered. Since this is +"true or false" task, the chance level was 50%. + +.. csv-table:: Response accuracy (%) of performance for the MathLanguage task + :file: behavioral_data/mathlang_behavioral.csv + :header-rows: 1 + +SpatialNavigation behavioral data +--------------------------------- + +Subjects were positioned in an given intersection on a virtual city and were asked to +point in the direction of a key building by rotating their point of view on a 360 +degrees panorama. Scores were narrowed down to whether the subject pointed to the +correct cardinal direction, as if their error was within 45 absolute degrees of the +correct direction, and the number of correct responses was counted. The chance level +then was 25%. This was decided due to the the premise was just instructed to point to +the location of the building, but there was no explicit precision requirement. + +.. csv-table:: Response accuracy (%) of performance for the SpatialNavigation task + :file: behavioral_data/spatialnavigation_behavioral.csv + :header-rows: 1 + +EmoMem behavioral data +---------------------- + +Subjects were asked to press a button when they though of a link or "story" between +two images. The score is calculated as the amount of responses on a run, which if the +subject was attentive, should be equal to the number of trials. The chance level is 50%. + +.. csv-table:: Response accuracy (%) of performance for the EmoMem task + :file: behavioral_data/emomem_behavioral.csv + :header-rows: 1 + +EmoReco behavioral data +----------------------- + +Participants were instructed to press a specific button when the face corresponded to a +man, and a different one when it did to a female. Their responses were collected and +the score was calculated as the number of correct responses, with a chance level of 50%. +Missed responses were considered incorrect. + +.. csv-table:: Response accuracy (%) of performance for the EmoReco task + :file: behavioral_data/emoreco_behavioral.csv + :header-rows: 1 + +StopNogo behavioral data +------------------------ + +Participants were presented with color-coded arrows. If the arrow was green, they were +instructed to press a button, and if it was red, they were instructed to not respond. +The trick came when the arrow started out green but turned red after a few milliseconds, +and the subject had to withhold their response. The score was calculated as the number +of trials in which they succeeded in withholding their response. + +.. csv-table:: Response accuracy (%) of performance for the StopNogo task + :file: behavioral_data/stopnogo_behavioral.csv + :header-rows: 1 + +Catell behavioral data +---------------------- + +Subjects were presented with four images in a row, and were asked to identify the +oddball by pressing the corresponding button. The score was calculated as the number of +correct responses, with a chance level of 25%. + +.. csv-table:: Response accuracy (%) of performance for the Catell task + :file: behavioral_data/catell_behavioral.csv + :header-rows: 1 + +FingerTapping behavioral data +-------------------------------------- + +Subjects were asked to press a button with their right hand, either a specific finger +or the one they chose themselves within a set of selected fingers. The score was +calculated as the number of correct responses, meaning the number of times the subject +pressed the correct button on specified trials plus the times they pressed a button +within the selected fingers on chosen trials. The chance level was 25%. + +.. csv-table:: Response accuracy (%) of behavioral for the FingerTapping task + :file: behavioral_data/fingertapping_behavioral.csv + :header-rows: 1 + +VSTMC behavioral data +------------------------------ + +Participants had to indicate the direction of motion of a set of dots by pointing a +probe in the corresponding direction. Subjects could make 360 degrees rotations of the +probe, and a response would be considered correct of the final angle would be within 45 +absolute degrees of the correct direction. The score was calculated as the number of +correct responses. + +.. csv-table:: Response accuracy (%) of performance for the VSTMC task + :file: behavioral_data/vstmc_behavioral.csv + :header-rows: 1 + +RewProc behavioral data +----------------------- + +Participants were tasked with choosing between two presented figures. Depending on +their choice, they wold have higher or lower probability of increasing their virtual +reward. The score was determined by the number of responses in a run, reflecting their +level of attentiveness. The chance level is set at 50\% + +.. csv-table:: Response accuracy (%) of performance for the RewProc task + :file: behavioral_data/rewproc_behavioral.csv + :header-rows: 1 + +NARPS behavioral data +--------------------- + +Subjects were instructed to either accept or reject a gamble, indicating high or low +confidence by pressing the corresponding button. The score reflects the level of +attention of the subject and was calculated as the number of responses made during a +run, excluding any missed responses. The chance level is set at 50%. + +.. csv-table:: Response accuracy (%) of performance for the NARPS task + :file: behavioral_data/narps_behavioral.csv + :header-rows: 1 + +FaceBody behavioral data +------------------------ + +Subjects were instructed to press a button every time an image repeated as a mirrored +image (a flipped 1-back task). The score was calculated based on the number of correct +responses. Missed responses were counted as incorrect. + +.. csv-table:: Response accuracy (%) of performance for the FaceBody task + :file: behavioral_data/facebody_behavioral.csv + :header-rows: 1 + +Scene behavioral data +--------------------- + +Subjects had to judge whether Escher-like scenes were possible or impossible. +Additionally, there were "dot" trials, where they had to indicate whether the dot +appeared on the right or left side of the screen. The score was determined by the +number of scenes they judged correctly, plus the number of dots correctly located. +Missing responses were counted as incorrect, with a chance level of 50%. + +.. csv-table:: Response accuracy (%) of performance for the Scene task + :file: behavioral_data/scene_behavioral.csv + :header-rows: 1 + +ItemRecognition behavioral data +------------------------------- + +Participants were tasked to memorize a target and then indicate whether a probe was the +same as the target. The score was calculated as the number of correct decisions. +Missed responses were marked as incorrect, and the chance level was 50%. + +.. csv-table:: Response accuracy (%) of performance for the ItemRecognition task + :file: behavioral_data/itemreco_behavioral.csv + :header-rows: 1 + +VisualSearch behavioral data +---------------------------- + +In the VisualSearch task there were two trials. On the *visual search* trials, +participants had to indicate whether the target was present or absent in an array of +items. On the *working memory* trials, they had to indicate whether a probe was present +in a previously shown set of items. The score was calculated as the sum of correct +responses in both types of trials. Missing responses were marked as incorrect, and the +chance level was 50%. + +.. csv-table:: Response accuracy (%) of performance for the VisualSearch task + :file: behavioral_data/vswm_behavioral.csv + :header-rows: 1 + diff --git a/_sources/contact.rst.txt b/_sources/contact.rst.txt new file mode 100644 index 0000000..1d6c088 --- /dev/null +++ b/_sources/contact.rst.txt @@ -0,0 +1,23 @@ +Contact Us +========== + +If you have any questions, comments, or concerns regarding tasks, code, implementation details, etc, or if you have ideas for collaborations, please contact us. We appreciate all feedback and we will be happy to help and collaborate. + + +Get in touch with the IBC team +-------------------------------- + +`Bertrand Thirion `__ is the leader of the IBC project. +Send an email for any general question about the project: firstname.lastname@inria.fr + +Questions on IBC protocols? +---------------------------- + +If you encounter issues with the IBC protocols, you also have the option of opening an issue on the IBC protocols `GitHub repository `__. +We will get back to you as soon as possible. + +Comments on IBC documentation? +------------------------------- + +We are always looking for new ways to improve the IBC documentation. If you have any comments or suggestions, please open an issue on the IBC documentation `GitHub repository `__. +All the feedback is welcome! \ No newline at end of file diff --git a/_sources/data_hosting.rst.txt b/_sources/data_hosting.rst.txt new file mode 100644 index 0000000..2983eaf --- /dev/null +++ b/_sources/data_hosting.rst.txt @@ -0,0 +1,38 @@ +Data hosting +============ + +EBRAINS +------- + +The primary hosting facility for the IBC project is `EBRAINS `__. +The dataset is published in the EBRAINS `Knowledge Graph `__, a platform for sharing and accessing neuroscience data. +Please note that to access the data, you must create an account. You can register for an account by clicking `here `__. + +The IBC dataset is available in the Knowledge Graph as a collection of instances, each of which represents a different aspect of the dataset: + +Raw fMRI data +------------- + +The most recent version of the raw fMRI data can be accessed by following this `link `__. +This collection contains high resolution raw fMRI data, along with Diffusion Weighted Imaging (DWI) and various structural MRI data (T1-weighted, T2-weighted and FLAIR imaging). +Additionally, task-specific details are provided for each task within the dataset. + +Preprocessed fMRI data +---------------------- + +The preprocessed fMRI data repository can be accessed by this `link `__. +This collection also contains task-specific information and subject-specific confounds and event-related log files. + +Statistical contrast maps +------------------------- + +Derived statistical contrast maps have been released `here `__. +These maps are provided in both volume and surface space formats, with contrast labels corresponding to those outlined in the documentation. + +Other platforms +--------------- + +- **OpenNeuro:** Click `here `__ to access raw and preprocessed fMRI files, task-specific information and event related log files. + +- **NeuroVault:** Click `here `__ to access statistical contrast maps for different tasks included in IBC. + diff --git a/_sources/dwi_acquisitions.rst.txt b/_sources/dwi_acquisitions.rst.txt new file mode 100644 index 0000000..e2564eb --- /dev/null +++ b/_sources/dwi_acquisitions.rst.txt @@ -0,0 +1,104 @@ +Diffusion-weighted Imaging +========================== + +Acquisition parameters +~~~~~~~~~~~~~~~~~~~~~~ + +Three types of diffusion sequences were employed in three different +sessions, respectively: + +- High-resolution (1.3mm isotropic, 60 directions) acquisitions with + :math:`B=1500` or :math:`B=3000`. + +.. _higresdiff: + +.. table:: Acquisition parameters for high-resolution diffusion imaging. + + ========================= =========================== + Parameter Value + ========================= =========================== + *Sequence* diff_dw60_TE76 + *TR* 7000 ms + *TE* 76 ms + *Flip angle* 90 deg + *Refocusing flip angle* 180 deg + *FOV* 240 mm + *Slice thickness* 1.30 mm + *Number of slices* 112 slices + *GRAPPA iPAT* 2 + *Multiband accel. factor* 2 + *Echo spacing* 0.71 ms + *BW* 1598 Hz/Px + *Phase partial Fourier* 6/8 + *b-values* [1500, 3000] s/mm\ :sup:`2` + ========================= =========================== + +- Multi-shell (1.3mm isotropic, 20 directions) acquisitions for + multiple B-values ranging from 300 to 3000 in steps of 300. + +.. _multishelldiff: + +.. table:: Acquisition parameters for multi-shell diffusion imaging. + + ========================= ============================================ + Parameter Value + ========================= ============================================ + *Sequence* diff_dw26_TE76 + *TR* 7000 ms + *TE* 76 ms + *Flip angle* 90 deg + *Refocusing flip angle* 180 deg + *FOV* 240 x 240 mm + *Matrix* 128 x 128 + *Slice thickness* 1.30 mm, 112 slices, 1.30 mm isotropic + *Multiband accel. factor* 2 + *Echo spacing* 0.71 ms + *BW* 1598 Hz/Px + *Phase partial Fourier* 6/8 + *b-values* [0, 300, 600, 900, 1200, 1500, + \ 1800, 2100, 1400, 2700, 3000] s/mm\ :sup:`2` + ========================= ============================================ + +- Two low-resolution acquisitions (2mm, 20 directions) used for screening. + +.. _screeningdiff: + +.. table:: Acquisition parameters for screening. + + ========================= ========================= + Parameter Value + ========================= ========================= + *Sequence* diff_screening_2mmiso + *TR* 9000 ms + *TE* 66,00 ms + *Flip angle* 90 deg + *Refocusing flip angle* 180 deg + *FOV* 240 x 240 mm + *Matrix* 128 x 128 + *Slice thickness* 2 mm isotropic, 70 slices + *Multiband accel. factor* 1 + *Echo spacing* 0,54 ms + *BW* 2192 Hz/Px + *Phase partial Fourier* 6/8 + *b-values* 0, 1500 s/mm\ :sup:`2` + ========================= ========================= + +.. table:: + + ========================= =========================== + Parameter Value + ========================= =========================== + *Sequence* diff_dw20_MB + *TR* 5700 ms + *TE* 79,40 ms + *Flip angle* 90 deg + *Refocusing flip angle* 180 deg + *FOV* 240 x 240 mm + *Matrix* 160 x 160 + *Slice thickness* 1,5 mm isotropic, 94 slices + *Multiband accel. factor* 2 + *Echo spacing* 0,65 ms + *BW* 1838 Hz/Px + *Phase partial Fourier* 6/8 + *b-values* 0, 1500 s/mm\ :sup:`2` + ========================= =========================== \ No newline at end of file diff --git a/_sources/dwi_processing.rst.txt b/_sources/dwi_processing.rst.txt new file mode 100644 index 0000000..16236cd --- /dev/null +++ b/_sources/dwi_processing.rst.txt @@ -0,0 +1,62 @@ +DWI preprocessing pipeline +========================== + +DWI preprocessing +----------------- + +The DWI data were preprocessed using *MRtrix3* (`Tournier et al., 2019 `__) +and *FSL* (`Smith et al., 2004 `__). The images were first denoised using the +Marchenko-Pastur PCA method (`Veraart et al., 2016 `__, `Cordero-Grande et +al. 2019 `__) implemented with the MRtrix :code:`dwidenoise` function. Then, to +correct the distortions due to inhomogeneities of the magnetic field, +FSL’s *topup* (`Andersson, Skare, and Ashburner 2003 `__) and *eddy* +(`Andersson and Sotiropoulos 2016 `__) correction were used. The *topup* +method estimates the susceptibility-induced distortions of the subject's +head from the pairs of images with opposite distortion patterns (because +of acquisition with opposite phase-encoding directions - +anterior-to-posterior and posterior-to-anterior). This was followed by +*eddy* correction that corrects for eddy current-induced distortions, +which are a consequence of rapid switching of the diffusion gradients. +No bias field correction was done. + +.. _subsubsec:fodtract: + +Fiber orientation density estimation and tractography +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +From this preprocessed data, the response functions (required for fiber +orientation density estimation) for each of white matter, grey matter, +and cerebro-spinal fluid tissue types were estimated using :code:`dwi2response +dhollander` the MRtrix implementation of the Dhollander algorithm +(`Dhollander et al., 2019 `__). These derived response functions were then +used to estimate the amount of diffusion in three orthogonal directions +(known as fiber orientation density estimation) using multi-shell +multi-tissue constrained deconvolution method implemented under +:code:`dwi2fod` in MRtrix. + +Then to seed the streamlines from the grey matter-white matter interface +in the next step, a mask of this grey matter-white matter boundary was +first generated using the high-resolution segmented T1 image with the +:code:`5tt2gmwmi` function in MRtrix. Finally, using this grey matter-white +matter boundary mask and the estimated white-matter fiber orientation +density, the second-order integration over fiber orientation +distributions (iFOD2) method (`Tournier et al., 2010 `__) was used to estimate +the streamline tracts. For this, the MRtrix function :code:`tckgen`, was used +to generate :math:`10^{7}` streamlines with a maximum length of 250 mm +and the fiber orientation density amplitude cut-off set at 0.6. + +.. _subsubsec:strucconn: + +Structural connectivity estimation +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +These streamlines were then warped into the MNI152 space using *ANTs* +(`Avants et al., 2009 `__) image registration described +`here `__. +The structural connectivity matrix was then calculated for the warped +streamlines in MNI space for 400 parcels of the Schaefer atlas (`Schaefer et al., 2018 `__) using :code:`tck2connectome` from MRtrix. Each value in this +connectivity matrix was the sum of the contribution (SIFT2 weights +(`Smith et al., 2015 `__) calculated using :code: `tcksift2`) of each streamline +(between any two given parcels) to the overall fiber orientation density +and was normalized by the volume of the two parcels (using parameter +:code:`-scale_invnondevol` with :code:`tck2connectome`). \ No newline at end of file diff --git a/_sources/experimentaldesign_diagrams.rst.txt b/_sources/experimentaldesign_diagrams.rst.txt new file mode 100644 index 0000000..ba793e1 --- /dev/null +++ b/_sources/experimentaldesign_diagrams.rst.txt @@ -0,0 +1,74 @@ +Experimental-design diagrams +============================ + +.. figure:: protocol_description/blocks_archi_standard.png + :alt: **Fast event-related design of the ARCHI Standard task.** + :name: fig:blocks_archi-std + + **Fast event-related design of the ARCHI Standard task.** + +.. figure:: protocol_description/blocks_archi_spatial.png + :alt: **Block-design of the ARCHI Spatial task.** + :name: fig:blocks_archi-spa + + **Block-design of the ARCHI Spatial task.** + +.. figure:: protocol_description/blocks_archi_social.png + :alt: **Block-design of the ARCHI Social task.** + :name: fig:blocks_archi-soc + + **Block-design of the ARCHI Social task.** + +.. figure:: protocol_description/blocks_archi_emotional.png + :alt: **Block-design of the ARCHI Emotional task.** + :name: fig:blocks_archi-emo + + **Block-design of the ARCHI Emotional task.** + +.. figure:: protocol_description/blocks_hcp_emotion.png + :alt: **Block-design of the HCP Emotion task.** + :name: fig:blocks_hcp-emo + + **Block-design of the HCP Emotion task.** + +.. figure:: protocol_description/blocks_hcp_gambling.png + :alt: **Block-design of the HCP Gambling task.** + :name: fig:blocks_hcp-gambling + + **Block-design of the HCP Gambling task.** + +.. figure:: protocol_description/blocks_hcp_motor.png + :alt: **Block-design of the HCP Motor task.** + :name: fig:blocks_hcp-motor + + **Block-design of the HCP Motor task.** + +.. figure:: protocol_description/blocks_hcp_language.png + :alt: **Block-design of the HCP Language task.** + :name: fig:blocks_hcp-lang + + **Block-design of the HCP Language task.** + +.. figure:: protocol_description/blocks_hcp_relational.png + :alt: **Block-design of the HCP Relational task.** + :name: fig:blocks_hcp-relational + + **Block-design of the HCP Relational task.** + +.. figure:: protocol_description/blocks_hcp_social.png + :alt: **Block-design of the HCP Social task.** + :name: fig:blocks_hcp-social + + **Block-design of the HCP Social task.** + +.. figure:: protocol_description/blocks_hcp_wm.png + :alt: **Block-design of the HCP Working-Memory task.** + :name: fig:blocks_hcp-wm + + **Block-design of the HCP Working-Memory task.** + +.. figure:: protocol_description/blocks_rsvp_language.png + :alt: **Block-design of the RSVP Language task.** + :name: fig:blocks_rsvp-lang + + **Block-design of the RSVP Language task.** \ No newline at end of file diff --git a/_sources/get_data.ipynb.txt b/_sources/get_data.ipynb.txt new file mode 100644 index 0000000..e6e70a2 --- /dev/null +++ b/_sources/get_data.ipynb.txt @@ -0,0 +1,1069 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "\n", + "# Get the data\n", + "\n", + "This is a simple guide on how to download the data using [this API](https://github.com/individual-brain-charting/api). You can also find the reference for the API [here](https://individual-brain-charting.github.io/docs/ibc_api.html).\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Import the fetcher as follows:" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[siibra:INFO] Version: 0.4a47\n", + "[siibra:WARNING] This is a development release. Use at your own risk.\n", + "[siibra:INFO] Please file bugs and issues at https://github.com/FZJ-INM1-BDA/siibra-python.\n", + "[siibra:INFO] Clearing siibra cache at /home/himanshu/.cache/siibra.retrieval\n" + ] + } + ], + "source": [ + "import ibc_api.utils as ibc" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "To see what is available for a given data type on IBC, we need fetch the file that contains that information.\n", + "The following loads a CSV file with all that info as a pandas dataframe and\n", + "saves it as ``ibc_data/available_{data_type}.csv``.\n", + "\n", + "Let's do that for IBC volumetric contrast maps.\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "db = ibc.get_info(data_type=\"volume_maps\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Let's see what's in the database\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
subjectsessiondeschemitaskdirectionrunspacesuffixdatatypeextensioncontrastmegabytesdatasetpath
00100preprocNaNArchiSocialapMNI152NLin2009cAsymNaNNaN.jsonfalse_belief-mechanistic0.000552volume_mapssub-01/ses-00/sub-01_ses-00_task-ArchiSocial_d...
10100preprocNaNArchiSocialapMNI152NLin2009cAsymNaNNaN.nii.gzfalse_belief-mechanistic2.896178volume_mapssub-01/ses-00/sub-01_ses-00_task-ArchiSocial_d...
20100preprocNaNArchiSocialapMNI152NLin2009cAsymaudioNaN.jsonfalse_belief-mechanistic_audio0.000543volume_mapssub-01/ses-00/sub-01_ses-00_task-ArchiSocial_d...
30100preprocNaNArchiSocialapMNI152NLin2009cAsymaudioNaN.nii.gzfalse_belief-mechanistic_audio2.893414volume_mapssub-01/ses-00/sub-01_ses-00_task-ArchiSocial_d...
40100preprocNaNArchiSocialapMNI152NLin2009cAsymvideoNaN.jsonfalse_belief-mechanistic_video0.000543volume_mapssub-01/ses-00/sub-01_ses-00_task-ArchiSocial_d...
................................................
532191540preprocNaNSceneffxMNI152NLin2009cAsymcorrectNaN.jsonscene_correct-dot_correct0.000570volume_mapssub-15/ses-40/sub-15_ses-40_task-Scene_dir-ffx...
532201540preprocNaNSceneffxMNI152NLin2009cAsymcorrectNaN.jsonscene_impossible_correct0.000618volume_mapssub-15/ses-40/sub-15_ses-40_task-Scene_dir-ffx...
532211540preprocNaNSceneffxMNI152NLin2009cAsymincorrectNaN.jsonscene_impossible_incorrect0.000614volume_mapssub-15/ses-40/sub-15_ses-40_task-Scene_dir-ffx...
532221540preprocNaNSceneffxMNI152NLin2009cAsymcorrectNaN.jsonscene_possible_correct-scene_impossible_correct0.000598volume_mapssub-15/ses-40/sub-15_ses-40_task-Scene_dir-ffx...
532231540preprocNaNSceneffxMNI152NLin2009cAsymcorrectNaN.jsonscene_possible_correct0.000597volume_mapssub-15/ses-40/sub-15_ses-40_task-Scene_dir-ffx...
\n", + "

53224 rows × 15 columns

\n", + "
" + ], + "text/plain": [ + " subject session desc hemi task direction run \\\n", + "0 01 00 preproc NaN ArchiSocial ap \n", + "1 01 00 preproc NaN ArchiSocial ap \n", + "2 01 00 preproc NaN ArchiSocial ap \n", + "3 01 00 preproc NaN ArchiSocial ap \n", + "4 01 00 preproc NaN ArchiSocial ap \n", + "... ... ... ... ... ... ... .. \n", + "53219 15 40 preproc NaN Scene ffx \n", + "53220 15 40 preproc NaN Scene ffx \n", + "53221 15 40 preproc NaN Scene ffx \n", + "53222 15 40 preproc NaN Scene ffx \n", + "53223 15 40 preproc NaN Scene ffx \n", + "\n", + " space suffix datatype extension \\\n", + "0 MNI152NLin2009cAsym NaN NaN .json \n", + "1 MNI152NLin2009cAsym NaN NaN .nii.gz \n", + "2 MNI152NLin2009cAsym audio NaN .json \n", + "3 MNI152NLin2009cAsym audio NaN .nii.gz \n", + "4 MNI152NLin2009cAsym video NaN .json \n", + "... ... ... ... ... \n", + "53219 MNI152NLin2009cAsym correct NaN .json \n", + "53220 MNI152NLin2009cAsym correct NaN .json \n", + "53221 MNI152NLin2009cAsym incorrect NaN .json \n", + "53222 MNI152NLin2009cAsym correct NaN .json \n", + "53223 MNI152NLin2009cAsym correct NaN .json \n", + "\n", + " contrast megabytes \\\n", + "0 false_belief-mechanistic 0.000552 \n", + "1 false_belief-mechanistic 2.896178 \n", + "2 false_belief-mechanistic_audio 0.000543 \n", + "3 false_belief-mechanistic_audio 2.893414 \n", + "4 false_belief-mechanistic_video 0.000543 \n", + "... ... ... \n", + "53219 scene_correct-dot_correct 0.000570 \n", + "53220 scene_impossible_correct 0.000618 \n", + "53221 scene_impossible_incorrect 0.000614 \n", + "53222 scene_possible_correct-scene_impossible_correct 0.000598 \n", + "53223 scene_possible_correct 0.000597 \n", + "\n", + " dataset path \n", + "0 volume_maps sub-01/ses-00/sub-01_ses-00_task-ArchiSocial_d... \n", + "1 volume_maps sub-01/ses-00/sub-01_ses-00_task-ArchiSocial_d... \n", + "2 volume_maps sub-01/ses-00/sub-01_ses-00_task-ArchiSocial_d... \n", + "3 volume_maps sub-01/ses-00/sub-01_ses-00_task-ArchiSocial_d... \n", + "4 volume_maps sub-01/ses-00/sub-01_ses-00_task-ArchiSocial_d... \n", + "... ... ... \n", + "53219 volume_maps sub-15/ses-40/sub-15_ses-40_task-Scene_dir-ffx... \n", + "53220 volume_maps sub-15/ses-40/sub-15_ses-40_task-Scene_dir-ffx... \n", + "53221 volume_maps sub-15/ses-40/sub-15_ses-40_task-Scene_dir-ffx... \n", + "53222 volume_maps sub-15/ses-40/sub-15_ses-40_task-Scene_dir-ffx... \n", + "53223 volume_maps sub-15/ses-40/sub-15_ses-40_task-Scene_dir-ffx... \n", + "\n", + "[53224 rows x 15 columns]" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "db" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "There are over 26000 statistic maps (half of the rows because there are .json files corresponding to each map) available for download.\n", + "But since it's a pandas dataframe, we can filter it to get just what we want.\n", + "Let's see how many statistic maps are available for each task.\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "task\n", + "Audio 5852\n", + "MathLanguage 5760\n", + "ArchiStandard 3588\n", + "RSVPLanguage 3458\n", + "MTTNS 1824\n", + "MTTWE 1824\n", + "Audi 1800\n", + "SpatialNavigation 1728\n", + "ArchiSocial 1404\n", + "Self 1320\n", + "Visu 1152\n", + "BiologicalMotion2 1100\n", + "VSTMC 1100\n", + "BiologicalMotion1 1100\n", + "HcpWm 1092\n", + "ArchiSpatial 1092\n", + "ArchiEmotional 1092\n", + "FaceBody 945\n", + "RewProc 918\n", + "HcpMotor 858\n", + "MVEB 792\n", + "DotPatterns 726\n", + "NARPS 720\n", + "Scene 693\n", + "Attention 660\n", + "EmoReco 660\n", + "WardAndAllport 660\n", + "TwoByTwo 660\n", + "MCSE 648\n", + "Moto 648\n", + "SelectiveStopSignal 528\n", + "StopNogo 462\n", + "Lec1 432\n", + "MVIS 432\n", + "EmoMem 396\n", + "VSTM 360\n", + "FingerTapping 330\n", + "HcpEmotion 312\n", + "HcpGambling 312\n", + "HcpLanguage 312\n", + "HcpRelational 234\n", + "HcpSocial 234\n", + "PreferenceFaces 222\n", + "EmotionalPain 216\n", + "Enumeration 216\n", + "PreferenceHouses 216\n", + "PainMovie 216\n", + "Lec2 216\n", + "TheoryOfMind 216\n", + "PreferenceFood 216\n", + "PreferencePaintings 210\n", + "Stroop 198\n", + "Catell 198\n", + "StopSignal 198\n", + "ColumbiaCards 192\n", + "Bang 144\n", + "Discount 132\n", + "Name: count, dtype: int64" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "db[\"task\"].value_counts()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "You can find the descriptions of all these tasks [here](https://individual-brain-charting.github.io/docs/tasks.html).\n", + "\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "For this example, let's just download the maps from Discount task, only for sub-08. You can filter the maps for tasks and subjects like this.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Found 12 files for subjects ['08'] and tasks ['Discount'].\n" + ] + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
subjectsessiondeschemitaskdirectionrunspacesuffixdatatypeextensioncontrastmegabytesdatasetpath
256240827preprocNaNDiscountapMNI152NLin2009cAsymNaNNaN.jsonamount0.000503volume_mapssub-08/ses-27/sub-08_ses-27_task-Discount_dir-...
256250827preprocNaNDiscountapMNI152NLin2009cAsymNaNNaN.nii.gzamount2.921305volume_mapssub-08/ses-27/sub-08_ses-27_task-Discount_dir-...
256260827preprocNaNDiscountapMNI152NLin2009cAsymNaNNaN.jsondelay0.000505volume_mapssub-08/ses-27/sub-08_ses-27_task-Discount_dir-...
256270827preprocNaNDiscountapMNI152NLin2009cAsymNaNNaN.nii.gzdelay2.923846volume_mapssub-08/ses-27/sub-08_ses-27_task-Discount_dir-...
256280827preprocNaNDiscountffxMNI152NLin2009cAsymNaNNaN.jsonamount0.000504volume_mapssub-08/ses-27/sub-08_ses-27_task-Discount_dir-...
256290827preprocNaNDiscountffxMNI152NLin2009cAsymNaNNaN.nii.gzamount2.925251volume_mapssub-08/ses-27/sub-08_ses-27_task-Discount_dir-...
256300827preprocNaNDiscountffxMNI152NLin2009cAsymNaNNaN.jsondelay0.000506volume_mapssub-08/ses-27/sub-08_ses-27_task-Discount_dir-...
256310827preprocNaNDiscountffxMNI152NLin2009cAsymNaNNaN.nii.gzdelay2.925747volume_mapssub-08/ses-27/sub-08_ses-27_task-Discount_dir-...
256320827preprocNaNDiscountpaMNI152NLin2009cAsymNaNNaN.jsonamount0.000503volume_mapssub-08/ses-27/sub-08_ses-27_task-Discount_dir-...
256330827preprocNaNDiscountpaMNI152NLin2009cAsymNaNNaN.nii.gzamount2.921803volume_mapssub-08/ses-27/sub-08_ses-27_task-Discount_dir-...
256340827preprocNaNDiscountpaMNI152NLin2009cAsymNaNNaN.jsondelay0.000505volume_mapssub-08/ses-27/sub-08_ses-27_task-Discount_dir-...
256350827preprocNaNDiscountpaMNI152NLin2009cAsymNaNNaN.nii.gzdelay2.920833volume_mapssub-08/ses-27/sub-08_ses-27_task-Discount_dir-...
\n", + "
" + ], + "text/plain": [ + " subject session desc hemi task direction run \\\n", + "25624 08 27 preproc NaN Discount ap \n", + "25625 08 27 preproc NaN Discount ap \n", + "25626 08 27 preproc NaN Discount ap \n", + "25627 08 27 preproc NaN Discount ap \n", + "25628 08 27 preproc NaN Discount ffx \n", + "25629 08 27 preproc NaN Discount ffx \n", + "25630 08 27 preproc NaN Discount ffx \n", + "25631 08 27 preproc NaN Discount ffx \n", + "25632 08 27 preproc NaN Discount pa \n", + "25633 08 27 preproc NaN Discount pa \n", + "25634 08 27 preproc NaN Discount pa \n", + "25635 08 27 preproc NaN Discount pa \n", + "\n", + " space suffix datatype extension contrast megabytes \\\n", + "25624 MNI152NLin2009cAsym NaN NaN .json amount 0.000503 \n", + "25625 MNI152NLin2009cAsym NaN NaN .nii.gz amount 2.921305 \n", + "25626 MNI152NLin2009cAsym NaN NaN .json delay 0.000505 \n", + "25627 MNI152NLin2009cAsym NaN NaN .nii.gz delay 2.923846 \n", + "25628 MNI152NLin2009cAsym NaN NaN .json amount 0.000504 \n", + "25629 MNI152NLin2009cAsym NaN NaN .nii.gz amount 2.925251 \n", + "25630 MNI152NLin2009cAsym NaN NaN .json delay 0.000506 \n", + "25631 MNI152NLin2009cAsym NaN NaN .nii.gz delay 2.925747 \n", + "25632 MNI152NLin2009cAsym NaN NaN .json amount 0.000503 \n", + "25633 MNI152NLin2009cAsym NaN NaN .nii.gz amount 2.921803 \n", + "25634 MNI152NLin2009cAsym NaN NaN .json delay 0.000505 \n", + "25635 MNI152NLin2009cAsym NaN NaN .nii.gz delay 2.920833 \n", + "\n", + " dataset path \n", + "25624 volume_maps sub-08/ses-27/sub-08_ses-27_task-Discount_dir-... \n", + "25625 volume_maps sub-08/ses-27/sub-08_ses-27_task-Discount_dir-... \n", + "25626 volume_maps sub-08/ses-27/sub-08_ses-27_task-Discount_dir-... \n", + "25627 volume_maps sub-08/ses-27/sub-08_ses-27_task-Discount_dir-... \n", + "25628 volume_maps sub-08/ses-27/sub-08_ses-27_task-Discount_dir-... \n", + "25629 volume_maps sub-08/ses-27/sub-08_ses-27_task-Discount_dir-... \n", + "25630 volume_maps sub-08/ses-27/sub-08_ses-27_task-Discount_dir-... \n", + "25631 volume_maps sub-08/ses-27/sub-08_ses-27_task-Discount_dir-... \n", + "25632 volume_maps sub-08/ses-27/sub-08_ses-27_task-Discount_dir-... \n", + "25633 volume_maps sub-08/ses-27/sub-08_ses-27_task-Discount_dir-... \n", + "25634 volume_maps sub-08/ses-27/sub-08_ses-27_task-Discount_dir-... \n", + "25635 volume_maps sub-08/ses-27/sub-08_ses-27_task-Discount_dir-... " + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "filtered_db = ibc.filter_data(db, task_list=[\"Discount\"], subject_list=[\"08\"])\n", + "filtered_db" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now we are ready to download the few selected maps that we filtered.\n", + "\n", + "The following will save the requested maps under\n", + "``ibc_data/resulting_smooth_maps/sub-08/task-Discount`` \n", + "(or whatever subject you chose). And will also create a local CSV file ``ibc_data/downloaded_volume_maps.csv`` to track the downloaded files. This will contain local file paths and the time they were downloaded at, and is updated everytime you download new files.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Found 12 files to download.\n", + "***\n", + "To continue, please go to https://iam.ebrains.eu/auth/realms/hbp/device?user_code=UFKZ-XXQU\n", + "***\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[siibra:INFO] 139625 objects found for dataset ad04f919-7dcc-48d9-864a-d7b62af3d49d returned.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "ebrains token successfuly set.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\n", + "Overall Progress: 0%|\u001b[32m \u001b[0m| 0/12 [00:00\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
local_pathdownloaded_on
0ibc_data/volume_maps/sub-08/ses-27/sub-08_ses-...2023-10-05 17:23:53.472528
1ibc_data/volume_maps/sub-08/ses-27/sub-08_ses-...2023-10-05 17:23:53.628380
2ibc_data/volume_maps/sub-08/ses-27/sub-08_ses-...2023-10-05 17:23:53.634523
3ibc_data/volume_maps/sub-08/ses-27/sub-08_ses-...2023-10-05 17:23:53.793226
4ibc_data/volume_maps/sub-08/ses-27/sub-08_ses-...2023-10-05 17:23:53.799418
5ibc_data/volume_maps/sub-08/ses-27/sub-08_ses-...2023-10-05 17:23:53.972341
6ibc_data/volume_maps/sub-08/ses-27/sub-08_ses-...2023-10-05 17:23:53.979429
7ibc_data/volume_maps/sub-08/ses-27/sub-08_ses-...2023-10-05 17:23:54.140314
8ibc_data/volume_maps/sub-08/ses-27/sub-08_ses-...2023-10-05 17:23:54.146809
9ibc_data/volume_maps/sub-08/ses-27/sub-08_ses-...2023-10-05 17:23:54.304385
10ibc_data/volume_maps/sub-08/ses-27/sub-08_ses-...2023-10-05 17:23:54.310566
11ibc_data/volume_maps/sub-08/ses-27/sub-08_ses-...2023-10-05 17:23:54.468429
\n", + "" + ], + "text/plain": [ + " local_path \\\n", + "0 ibc_data/volume_maps/sub-08/ses-27/sub-08_ses-... \n", + "1 ibc_data/volume_maps/sub-08/ses-27/sub-08_ses-... \n", + "2 ibc_data/volume_maps/sub-08/ses-27/sub-08_ses-... \n", + "3 ibc_data/volume_maps/sub-08/ses-27/sub-08_ses-... \n", + "4 ibc_data/volume_maps/sub-08/ses-27/sub-08_ses-... \n", + "5 ibc_data/volume_maps/sub-08/ses-27/sub-08_ses-... \n", + "6 ibc_data/volume_maps/sub-08/ses-27/sub-08_ses-... \n", + "7 ibc_data/volume_maps/sub-08/ses-27/sub-08_ses-... \n", + "8 ibc_data/volume_maps/sub-08/ses-27/sub-08_ses-... \n", + "9 ibc_data/volume_maps/sub-08/ses-27/sub-08_ses-... \n", + "10 ibc_data/volume_maps/sub-08/ses-27/sub-08_ses-... \n", + "11 ibc_data/volume_maps/sub-08/ses-27/sub-08_ses-... \n", + "\n", + " downloaded_on \n", + "0 2023-10-05 17:23:53.472528 \n", + "1 2023-10-05 17:23:53.628380 \n", + "2 2023-10-05 17:23:53.634523 \n", + "3 2023-10-05 17:23:53.793226 \n", + "4 2023-10-05 17:23:53.799418 \n", + "5 2023-10-05 17:23:53.972341 \n", + "6 2023-10-05 17:23:53.979429 \n", + "7 2023-10-05 17:23:54.140314 \n", + "8 2023-10-05 17:23:54.146809 \n", + "9 2023-10-05 17:23:54.304385 \n", + "10 2023-10-05 17:23:54.310566 \n", + "11 2023-10-05 17:23:54.468429 " + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "downloaded_db = ibc.download_data(filtered_db)\n", + "downloaded_db" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Let's try plotting one of these contrast maps" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAhsAAADJCAYAAACKTvCwAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8pXeV/AAAACXBIWXMAAAsTAAALEwEAmpwYAAC6eElEQVR4nOydeXxU1fnGv5NkkpAhZgiEQAIkIAEMRBZBNhGwuKB16w/3fa/71s22WrdWW1tt1daNurburQtWrKUqBUEEgQhGIRETSUbCYJg4zEAySeb3xznvOfeGsKigBu7z+UBmuXPvufeee8573vd5n9eXTCaTePDgwYMHDx487CakfNsN8ODBgwcPHjzs2fCMDQ8ePHjw4MHDboVnbHjw4MGDBw8edis8Y8ODBw8ePHjwsFvhGRsePHjw4MGDh90Kz9jw4MGDBw8ePOxWeMaGBw8ePHjw4GG3wjM2PHjw4MGDBw+7FZ6x4cGDBw8ePHjYrfCMDQ8ePHjw4MHDboVnbHjw4MGDBw8edis8Y8ODBw8ePHjwsFvhGRsePHjw4MFDJ0ckEmHGjBkMGTKE/fbbj4ULF7q+v+OOOxgxYgQjRoxg2LBhpKam0tDQ8I21z+dVffXgwYMHDx46N8466ywmTZrE+eefT3NzM/F4nGAw2OG2s2bN4q677uKNN974xtrnGRsePHjw4MFDJ0ZjYyMjRoxgzZo1+Hy+HW5/6qmnMnXqVC644IJvoHUKnrHhwYMHDx48dGIsX76cCy+8kNLSUsrLyznggAP405/+RCAQ2GrbeDxOnz59qKqqIjc3d4f7PuKII9iwYcM2v+/RowevvfbaDveTtsMtPHjw4MGDBw/fWbS0tLB06VLuuecexo4dy5VXXsntt9/OLbfcstW2s2bNYuLEiTtlaABs2FDPkiVvbvP70aOn7dR+PIKoBw8ePHjw0InRp08f+vTpw9ixYwGYMWMGS5cu7XDbp59+mlNOOeVL7L0N2LKdfzsHz9jw4MGDBw8eOjF69epF3759WbVqFQD//e9/KS0t3Wq7xsZG5s6dy7HHHvsl9t4KNG7n387BC6N48ODBgwcPnRz33HMPp512Gs3NzQwYMIBHHnmE+++/H4Af/vCHALzwwgscdthhHXI5to0kX8aDsS14BFEPHjx48PC1cdVVVwHwxz/+8Vtth4ddi9GjB7NkyYPb+f5alixZssP9eJ4NDx48ePDwtbF8+fJvuwkedgt2jWfD42x48ODBw25EcXExc+bM+bab4eFLori4mC5dutC1a1d69erF2WefzaZNm77tZn0LaAUi2/m3c/CMDQ8ePHjw4KEDzJo1i02bNrF8+XKWLVvGbbfd9m036VvArslG8cIoHjx48ODBw3bQq1cvDj/88L00VCTZKF8PnmfDgwcPHjx42A5qa2uZPXs2AwcO/Lab8i3A82x48ODBgwcPuw3HHXccPp+PTZs2ccghh3DTTTd92036FiCcja8Hz7PhwYMHDx48dIAXX3yRaDTKW2+9xUcffbTdGiF7LjwFUQ8ePHjw4GG3Y/LkyZx99tn86Ec/+rab8i1g12SjeGEUDx48eNjNSCQSbNliV4FpaWmkpXnDb2fCVVddRXFxMeXl5QwfPvzbbs43iF1DEPV6uwcPHjzsZhx55JGu97/4xS+49dZbv6XW7BiPPPIIAIFAgFtOOgkAP5DQ3//iqac4+eSTt7uPp59+2shiH3PMUmbNOgBQJc5jsRgA55xzzq5v/G5CXl4eZ555JjfffDP/+Mc/vu3mfIOQMMrXg2dsePDgwcNuRHV19bfdBA9fAR3dt/vuu++bb8i3jl1DEPWMDQ8ePHjYC/Hiiy9SUFAAgM/nI5FQfoubJk4kBISBOBAlX/+iGFDbn3LKI2b7M844w7XfJ554AoAfn3km9ZwIDAeCHH30+3qLcrJ5lmnAO+eeSxioBB5+912kVFcoFOK4447b1afs4SthR3Llvp3ai2dsePDgwcNehP/+978A9OvXz/BG/H4/L5SWEgbSUUZGPblAETBB/3IaUAjUAUvIyMgA4N5776W5uZna2lp8Ph8ZGRn0P+kkjgUeZDYqAFOCDcJEyDGvlFGTAM448ECECXEs8F8t8f69731vl18DD18GLWzfs9Ftp/biGRsePHjwsJfiEU10LAbyUJN+DFhDPsrImAacrzaenq4sgyUFQBUnnfQ0zzzTMW8jov/mE6WeMBB0fJugEeXNSOhtE+228AOTpk1jnldT5juAJND0tffiGRu7AF5pZQ/fBLx+5uHr4tlnn2XAgAEAXHnAAZToz2Ooyd6SQItRxsYMOC9dbTQBeAmUxVEBVNK1a1c2bdpEPB4nmUySlpbG+yedRAhYDNQzGGXGBPRRAIJEGclK/VohTAM1FBAFIARUA2XTpvHsM89w4okn7toL4eFLYEecjV47tRfP2NgF2Dv18j180/D6mYevinvvvReAkpISkskkBx54MgOwU32B/puOGBvNKKOiBpb0VF9GgJebgZnk8wjFwMlHHcVDTz9txK5SUlL4NQP03nJQYZd0va8Idkd5KIOmCJVWWQHECWljYzHK8AkC3XNyTPsvu+yyr3spPHxpdIJslOLiYmbOnMm0adN252E87GIUFxdTX19PamoqXbt25YgjjuDee++la9eu33bTPHjwsEuQxxqaaabWfBJDTfsR0K8WANVQrv0f5VnAy5zIIqYBNahQyNa4Qv2OCv13MbnUG8MmAjQwFshCGSIRFA8kYsyRchRvJAtIP+IIslAG0XOXX84JmkTq4RtCG7C59WvvxvNseOgQs2bNYtq0aaxbt47DDz+c2267jV//+tffdrM8ePCwE3jttdeITp8OKGrmSOBlYODs2Rx44E9RHoUiaokDUEsEFbyIoKZ4P1DOF1+8Smurmmh+360bk1EsDoC7gQYGkJ2dTWpqKoAmnE5A+UfmMJWVjNFHO3XjRgDO7daNFwwttA5llFSRS5QGcgFoIEGIKH6sh2M4yhdS51PZD0Eg4Bkeux9JLLf3a8AzNjxsF3t3aWUPHjoXXnzxRQAeP/54k0PinCd+PH06yvQo0f/y9DdZestKvvjiGnw+m87Y0tICwA1A+kAgCM1LlGkCA0lPTyc1NRWfz6d/lwVE6MNKZgBH6k9kP49Ho2bfyWQSn89HdvZoGvA72uOnwWwVp5YQsIYxjnPJAqp9Ppa/8IKXJrs70QbaJv1a8GqjeNgu9u7Syh48ePCwlyOJovBs699OwvNseOgQXmllDx46Fx555BF69+7N9OkPko8lfzbr1zlAKbCSKv1KPgEYiOJPVLq8GoAR7wIgBLEq+A+K0QHFbN68mba2NgD9txwoN+m0WaiFcYbeT2ZmptmdPdYpqIBJQL+XvBgJtVSxkjAVRKlwtM0PFB5/PI88/HCnkj7vVNhFng3P2PDQIV588UWmTZvG3LlzOfXUU9mwYQPBYPDbbpYHDx7a4dFHHwXg3HNTUDPDaOqpZjHLADUhp2PzPwYTZZXRCPWrnfi7KdtjQynO5Wpra6sxNl4F4nFlSswFFjESKOWYY/py110fA2I8PAUsJqG3q9FHulTvp7W11XA8AK0aeiw2NbY9KlCE1QIWs8oYUTFgBMpMKjj3XB7VhsvZZ5+9M5fNw87C42x4+CbgLK0s8WAPHjx8FzETRc70A4Xag6FULfyobI4YMm9EcOWTJEohPR1y0tmWb/xu/Ys1DNbHGY2ibbafiRaQTQOLyGeRGDME+F3RVerIkUc62Lv4QKS6aDPKRBIeh/q3jBDpOj02C2U8Ffuhmx+OPOccXn2ko317+FpoY9t24JfAbjc2vNLKnR97b2llDx6+23j66acpKirir4ccwvHAHOYTZSxqslYTfUi/iuBUu4ij0lLL9Z5KIDKCaP0m1/5bWloMsfNNDkLllRSr7RmOMmFqyMzMJCUlRXss/EQZCSYPRcIhfrNPp2fD5/MRjeaQnR3Czmph/dePMkBiZh8R/U2NPoOShAq+BIAx55zD05mZO6xI6+FLoLN4NjpbaWUPW2PvLa3swcN3Ew8//DAAgwYNokuXLhyJMiqygCoWOTwY6q9M3Tb0nnD8A4i4JpSkI6V0332v1K9K9RFiKB5FEcrjECMjI4OUFMk3KEF5PiajjIVKlGlgDyD7d/NDIo7XImQe138rgWoG0OAKo9SggixFjrbXn3IKD8fVmZ577rl4+JroDJ4Nr7Ry54RXWtmDh86D7qNHU4AyKAqxwuASkLjnk084un9/QJkKWUCDCUJIllkJ0Qa3V8NCwioRlC8hhE1Rlc8Vli+vBo5HhVgG6m1FfFyhR48T+OKLl7Y6SjQ6nuzsKv3Or88iZvaRTz0Fuv3OLcJYZRBRCsnexpl4+AroLJ4NDx48ePCwayGl4bt06UIBaoL1o8wC8VeIifADbWjI58oIKUaFQbQaR04hoIyNZDJpskt69BiKFTMXH0kI5XEIISXUtmz5TP8mgA2vSLm1Sv07MVCKzf5TUlLaeTec1Vpsa53tf6imBoC7iooM00P8IHK0sQUFeNhF8LJRPHjw4GHvw6OPPkpRURGgJmuhUAawJdJEnxPgo40bDW+ud+9BKGNAyJ3FAERrN7lCJ83NylTJpp6o3kYZEAXYGrFj9D78wIcA7LffPnz4oaTTVjtaEzDHggKz/8zMTFdIJRpVr7Nze0KiBAkOffbZf8z2F3VTJc3/sHYtS/r2NR6OEMrQCAGXavXU6x95xMtO+bpo40vpaWwLnrHhwYMHDx48eOgYnmfDgwcPHvY+BAIBo32xYdQoMw/EUH6EZQxDeR7UN1JQEWDjxk/p1u02lEeiBHKUiFYyGTXhjGQyyaZNKqQSpQ8wXh9hMrYQfRAohSIfhMDv9+Pz+XSmoaSwisJGAOURsfwQ2X9mZqbruPI62rCJlpYg3bo1s3HjrSaDMZlMGrLr4337UqKPIAGbGlQ4JUdvc/c553ieja8Lj7PhwYMHD3sPROemd+/e3D9pEgAzUHkhMtkuIx8V3shB+A6tra1mslZ/m4E40WgOyWSU9nBO+orTcax66Z+ibAabxarjNcu54IIe+sNW7JQfRmllFKAyWSS8UoLPF+/gWO4slZSUFBobT3JkuahzeZN81RzqTWXYMFCFCtw4K6wEHNfNq5/yFbGLslG82igePHjw0Ilwy6RJRuZKaJqyqlfTqwTYVc3Ubt1Ocv2+sfEmGhu/v5NHK0V5QYYbZ4jhi9YkIf4f4DFUTdnPgSiWOSFaH5IrImoYefQfWvxlTtmgW7ffo0q7Hcnr9GEB6rwl30W4KnKkLFRROg9fA+LZ2Na/nYTn2fDgwYOHToBAIGBeOzU0xNFQBBzEGspZQ5Rh2Dojda79OD0FgtTUVJMhArDPPvsA8MEHJzB0qA5cxLpZN0JNDCVG/jyqSoof+ALoqvcgBoY7a0WhABI9zTGcbZBy9ttqp90HQClhainFTZBNYBfi0oqbjz+e47xy9F8NHmfDgwcPHvYOPPzww3Tv3p2Zxx3nSAZVTgYpJT8ctcpfAMxlpQiRAzA9O5t/NTZuYwLHZWj4fD6j8Jmfnw+8ob6oGaQiM6GNKENjCcqLkdAtqQW6oNwffrRVgmKSSHIq6rtECamp/V0hFGcbOmpfTs7PcZsSYYKoHBc5WgRbCwaUARJEGSMPP/zwHi3ytWXLFg4++GCamppoaWlhxowZ2yyg+Y9//IMZM2awePFiRo8evf0dS9XXrwnP2NgG7rvvPvr06QOoB04ehFAoxA9+8IPt/vaf//ynyYNPSUmhvr4eUOXaL7744t3Yag8ePOzJiGBrgoAyNAL/B5TCoAQMqoDJL6tJVmSzKsFVKfXLYt26gwHo1Ws1xPOw6axZKBOnFGVg1KA8G6L8IWofogIaAmDDhkO+YskKIZwqYyOblRTpo4nDRbw8lh2iPD4BYPF558EebGxkZGTwxhtv0LVrVxKJBAcddBDTp09n3Lhxru2i0Sh/+tOfGDt27M7t2PNs7Hrceeed9OjRA4ADDzyQ7GylQzd48PVIsHL16kN56623AJgyZYrr9/J5WVkZgwZN0p8WsGrV0+pVQQGPP/44ABs2bOCaa67ZjWfjwYOHzo4HH3wQgH333Zc/TptmpvVp+vvAaP3mSNQSfjmk+2H0P1SVVoB6BiDT77a8B21tbVuVlgdFKBXS5rp1OSQSMfr2rUL5CkborWKo6fwpQPbRiDIuGlm7di5+v994S5yGRjKZNPt3kkWTHYY8RFE0AroFQawnQ4yOQpSKCMDQLJS1kQ5F5fZ6XnjhhR1eh84Mn89nso4SiQSJRKLDe3r99dfz05/+lDvuuGPnduxlo+x+DB78sn51JIrdHWfQoFeBCG++ObnD30ydOgeYo98FgHQGD74cgCVLfrNb2+vBg4c9C0OGDAHgtMmTGYFKPp0AFPfQG5RgaQwODmYC8SOASjnd2lWelpZmjA8nV8KJlJQUE3qRdNu1a7+nDI6sMWrGDwGJ9UAm0OT4dTMQo2/fUwiFnsXv95t9bsvokWOlpKSYAnCCxsYXSSaTbN68GYDjevc24ZM4NsG2FBgkua/i2gC6hSHjoovoP3duh8feE9Da2soBBxxAVVUVl1566Vbei6VLl7J27VqOOuqonTc2OkNtlO8y7rzzTvNaLO68vDz23XdfAEaPvsuxdQLVnaXscR5Tp87h8cfX0tLSwrp16/D5fEyd+jiwnFyWkQXUkouttwijRx/F22//E1BW6J/+9CfA/aB73o69DzNnzgTU6tLZFzIyMgBIT08nLS2N9evXk0wmzfbgThWUwfmHP/zhN9V0Dx48fIeQmprK8uXLiUQiHH/88axcuZJhw4YBany55pprePTRR7/cTj3PxpfDnXfeaVxKaWlpdOnSBVADubCie/ZUDOkZZWUMcPxWahuGUZSoKCcCefTs2ZMjjvgIaKGsLAtYzAAdR4wDjTQQNQI3MICoOVa/fv3IzMzkxBNLgBh/+cv7ANxzzz1m0kgmk57xsYfi/vvvB1RtC8ky8Pl8ZvU3Y0Y9sA6rIFDHyJGZtLS0kJmZCajVXzAYNPuRlec//vEPPv/8c2DPdBfvLfjb3/5GSYmqFVJLLhNooAQodpI2slBrGSFlVAOVytkg4leRyLOAO2QBqtR7R4TR9uEMWYxlZGQYMS7IUwOjeFbK8wA/Q4c28sEHQWwdFaW1kZGRYfbTfv8dufpbWlq2Iq0KZOz+dyRCa2srd3bvDlgx9XSgWbNo04W/qgvHDATSMjL429/+xumnn77VcfcUBINBpk6dymuvvWaMjWg0ysqVK034f926dRxzzDG8/PLL2yeJtuJ5NnY1Vk+ZQhgllBN2fF6Aep5ODIXYsmULAwY8DkzmiCP8wGYgkxUrIkCO8WT+PaScmAUFhyHc6GaizCgr4/kVK9x7z+nJJddNVG/jcOdvnV4VD3sTZsyQdMB9UbNIDWpmibFs2UygpeMfetjj0Lt3bwCuGzeOAVh5CxcSwAb9Og5UQOTVjRzn83Fsh2XcrTestbXVTOhOw6O1tdUYvU6kpqaaxdLGjfvQbQxqEi8AIj5YBx980B1lgeQhOh8bN56Fz+dzcTNM8xMJY4S0tbV1yN/w+XwdZq2I8NePNm7kp926GbtLirEBDF9dTyQ/34h8paOiAnJt9ySEw2H8fj/BYJDNmzfzn//8h5/+9Kfm+5ycHDZs2GDeT5kyhd///vc7l43ieTa2jzvvvNM8QH6/3zxAwWDQPDSBQMC4qyuwvOkIViQvAlwYshFQI3JjcshTzTcJ4O41axzbSgY41NJMH2qZXFbGnKVLycjIYOHCNYy/vKetm1xjLfdEIsEf//hHwLrAPHR+3HvvveTkqKByXl4eWVnq5k+e/Ca26mWW428Oytx9Gohwxhk/AuD71HPd228DuCaHrKws0+/vu+8+V+xbjpWRkWEG9qamJlOo67LLLtuVp+phF6AYNZ8XolkQcQjIakjVKWPjpQ0degh2J9bNV1l20WiUDRs2cMklX+D3+3n55U1AGhkZI7Ak0t2L327c2KEx09LSwj6ffcZTvXsb8dN+30iLvnl89tlnnHXWWcaIPPHEE/n+97/PDTfcwOjRoznmmGO+2o69bJSt4fP9CVgMlOtPQgymgWOB84BBPYA4vP7Cv03Wibikxw0dSg6qM9aTDWSRjXqYioBrdSxdxdSrUAGVau66q457743x8ccbgQhZehuJvYdCf6egQBTs4mbg8Pv9xsihHCvmvyHGxRf3A57kgQcONgNIIpEwHA+AK6+8chdcMQ+7G/feey/gdmH7/X7DGg8EAsa4BFixYn8AysreQ00tktAXRIkmbWSqLgU+HFg+cSIHLl68VSqhhGbS09PNcQOBAD179jSfS9glHo8Tj6vR5OGHHzb7aGlpoalJEf4uv/zyr3spPHxJZGRk4Pf7KcLqcIZRq/YSJRdKAGh45nNoF45wegna2tqMwWnrl7g9BmlpaaY/bNq0yRilWVlZZixzTuJtbW0uTlH37t1dhqxsI0av87epqammv8XjcfMsSH0VgfwmkUiY185+3t7j0b59zm2eRz0vQWDOuHGcMW9eB1e8c2P//fdn2bJlW31+8803d7i9ZE/uEJ7OhoXP9xBqQK5GGQLqScwlaqRkGoGNG+QbN4YO/Q1wGlHiWAmYZqKaz/166I/t2NozUYll5cD1AATZRP9ttG8kiwCbLFYEPFFWxlkffADAB8srGDp0vd46jJLlWQgcvJ1z3ggDu+kqRC+STB63zW09dBbkUVYmGVABVKC0HOtzqyVIK2Mcvzhw8eKt9nLzgQdy+b//vbsb62E34Z577gFg5MiRPHDAASbNNYIkk9ow77CFIVISHfu4nZN1NBpl6NCfks0rnAf8bN06/H6/K/tDXm/YsIG3tcds5MiRjBo1yuxnW6JgHaeqWrS1tRnv29KlS1m2bBlXP3sVLGxmw4aoaYMYCW1tbcb4+fzzz/n90KEMR+XU7PvZZ8DW4SHnscRIbmlpoaRkKrlYkz0MnIG9zp4hvQPsyLPRZTvfOdCpjY1Zs2Z9pd9lZGQYK1zihTuCrAwS7R7sjlK4EomEscC3t3/5blsPcGpqqnlAMzMzzWqgozS1WbNmcfTRR+/oNDzsZsycOdMMgs5qm1u2bDGhCrD3PDU1das+9WUg+3HGxAFX4S3po8Fg0GjHgO27zj6amZlpvCIZGRmmbY899hjRqJoUvFDLN4fRqBV5AmVyLgEuqKz80qJYQ4eexDBWcgVwFpA+qhcNKz7f5vZXX307yp8ygS++mLnN7b4W4kBO+nY36dXrWKCSg1CenQQQ0nyLYqDfunU7PEw2tTSQTTVR0lHGxpGTJvHru+/+Oq3fe7Ajzsaeamy89JLSxdt3330ZMmQIgwYtQEUzKxDWfi62kmEz6jqVP/QQ5cDACy4g6RCwGTToEFSVxCA2U9uWZxaIpfyT0lKmAgFWMdzxvUTYw8CJpaU8/9FHgBqwX9WW+DO9e5vt/MApQ4bw1EcfkUwm+egj9QANGfIUMJeHHrqOF88/n1OffFJt73AxnnFGf8jppppZDCw+jmOOeRE4lxdfVBUajz322C9xVT18XTz00EOAmqwlLJKVlWUmhcbGRkPOSiaTNDdbv6R9LcqLoLwaKv9pKmsoBl5DSSYJMbwGmNHO2PjhyJHEcK80pT3Z2dmmPS0tLcZobWpqMv07KyvLGEgmzKdfy/uZM2d6HI/dCLnOmzdv5ligZ47KrhAl0M8++8yMBX6/39xT56IlNTXVvC8rO4XBrOQstKExHKiD97t3Z//PrcHhNIDVCJXgllvKePZZlc0yffp0V8jPudCSsI3P53N9Lv1wy5YtvPDCC6DbD5iuLsdta2tz6X6ozBc1Hs+ngmrWUIJlNZUAeb16UY0STy9HDYmPvfOOMZJvmTRJR6ijrjoyJbj7t4ftYEc6G/ts5zsHOp2xIZhQVkaUC1EyN836X4R8ag3zOIEyAg598kliMXW1qh56iH2BO7VYzkhgGXFUN83Sf/NQvOWtr3AYRa8QmVxZq24BlpELlFC7jTtTghowqlFmkbPGwZAhT+hXFUxlFXUXXMBAx/cnnPAXMOZNf8teNYcKAnkcd5zidSSTnrHxXceJ80/g7ZMWOD6RdRe8++6pejA8h/nDhxME/gtsQgXZAH7/3/+69ve9kSNpIBfIYtq0EwF49NE78dC54Axt9BwNBCF9OcQ3KEZaLBYz3qimpiZXGqlM1hkZGXx/shIezEWNVzmo+T2vXBkts4H99TGdBoKkU7/66iOsXr3aGMl///vfjcjYgQceaLyuTsO2fabJu+++C8BHH31kjpGdnc2gQYPg+iQQ75DY6fP5tMEgTItSaqmglgpqsAR8P2IWwXP6eThr3DiTuRNBjbNR8vFTTx5q3B7Dtj3KHtphbySI/vvf/+Yvxx0HQJQ+WMoUzJ49iunTH6cAm4IOqpuK5T/tgguY/Ze/sGXLFqOd7weCNBChwUh3rdFGQyikLPGWlhaz+qxDze/pqA4+sLWVZDJJj0GDaFjdE6Vf5zfbt7S0mOMPq6nhpaIiU1kgBgwZMoOPPnoeu6KtZDLQ8y9/odDv58lTT0VF5fOxVRzDqqG6VEBN1acUFZXrPaqIvs/3S5LJW7/CVfawsxDpY5/P59JtkfudmZlJerpyE/v9fhOKi0QiypPwPLT8n005/Oc/m0gkxgPwxRdfmEF/3HvvsXHjRoLXXEPmli1c+fOfm2PJAD18+B+B0xytU89FMpl0tU2QkpJiJqktW7YYVUanHoIzju9cQXft2pW8PGXSe5oeHjzs4dibCKKvv/46AH379nUQPEuBQtau/T9isRhDhpxPLqsoxOaj+9GJpyeeSPSvf6UZ2P+SS/j0ySdFwRaw8jMWDcyr+6cZaJubm41npBFb1vnwe+4hFos5VhRFiHawbJ+VlWUG+bS0NBZjCy5LGGzIkFvMkbOpJwCMvuQSwsDF//oXFwNnHHUUDcYgaYR4HcT9fPTRRlJSAih/iXho1PWR63bYYYft6BJ7+JKYOXOmmcTFKADVVyQ84WT+p6ammu1isZgyRkOvMnlyHQsWKNGdQCDA+vWKKLxhwwZXyur69etpbm52ZQGkpqY6VoOTsSlNlkJ4zjkLgQUMYyX3zZvn0i6QlamT7+HMYpD3oFapEjrp2rUr3bp1A5RBIjyQxx57jMZG5a+74oorvtwF9WAg9z0Wiyl3aAyIqye8ngFMnz4fec7femuEiwwpY9YnkyczXe+vHNUjJEdPRgtncbbU1FTjwZCx68gjT2fJktf55JNPAPj0008pL1d7KS8vZ//9lV9k//33N9ygZDLJ/PnzAXj//ffN/rOysujfX1Ho+/fvz+jR9SgGCmzcmAtAjx49XHy0/fZ7ALV0FK+zH8gxfo0QayhFjbhBff6HH36F0W0GtYSLapWSBoIEWUVQ7629HLqHbWBv9GxcNnSoI0CRx9q1J5t3fZhvpGQCjt80olyP/vPO42XUcFxy6qlmG7mGoqKRxfYRZRhRItR3ePULUd08xPjxP2fhwq1roTyzerUu0hbQrR2IkyMSZTCvsspUdyw/6iiWAA30wRZBCOuzAtgPgLVrr6Vv32uxQ0oY+Ip51R6+IdShso6Gfa29HHDAx1hDN4HqW052UIKVxLlh0iRue+edr3UsD7sfztTU5FM2P0092c36lbq/U6bMBWK8887NPDxuHKeg7JNM7KKrAmVcgBrn/KjRxhnsdXpvlUGqfLejR5/NwoWKj9S9e3fee+89QHGQFi9ezK23nsfMmf+ld+/exGIxmpqa1OcbbuGXPRYbPZmRI0ca43z06J9jw9V5DBwYoKLiMFpaWlzGsAo2h8CYBwL1OqLPIYga1888/HAg2+QTgnoSagkj+qJriJFFLee8/LKpxu1h+9hFjo3vvrHx9NNPM3z4cEpLD2o3JIeMRb9lyxYORXW4dNx0hhrUw7aKbPoQZTKqA4pnQ1JjRdU2D3jr009dq72mpibi8Tjf+96/UWGKOqDRZYWrbYtQQ4PaYzwep6mpySVHrXQ9hE1SgFXbs8bLmwRZwiIjkR5lJO6KS9W6Dels2dLfwUzXEoIALObww8upqHiQp59+mpNPtoaZh68OCZ1kZWXRXcsky/0FpRsgq/vm5mYOuXAqCx5f6KrIuM8++xCJRIADgSozyHfp0oX8/HxA9elwOGz2GYvFDOnNKfusPivWRw87/kX0Z/J3IG/SzI/GjeOOhao9sh9nNoqTOOok7MViMbMSDAQCrpCMZHZJaAWUHLucl+fl+Oo4GBu2BRhALTFqzfcJoIF8xo07lXxsIDWBXUCFgXrySVBPGFvhyenddWL//X+F4kqEgAjjx18FwBtv3LbVtr/85V85//ZbVBdc+ztIgVv/ewu//N71W207fvzv9KsQsIRsGnQ4vBDY2vu6du19jO7bV59jNmqEDyBeuxjKHJHRMw8IEjUq6iD6JFHepFJfmRGsdC1HPewIu8ix8d03Nk45JQr8HZjASpbj9EU488idueiyRQzVrVcxGBhOLTVUsIhSrJ0sFpvqzHmsCikXoNPYkAH49dencdhhUjEwnauuepXbbz/E0VqR6FXWeGtrKy0tLa5iWWlpaYRCKzmsoICVZiJwPvZ+YCBRchy6H6V633V6mwqUzybPCN6UlLyJehjVGWVTS1STr/r06bOjy+xhO3jssccAd9pzly5dzCTrzBRKJpPakIAj/nQ4VG2ktbWVtLQ0M6nn5uaqNOasERCP0NDQAEC3bt2M4dKlSxczuX/xxRe0am6Qz+czE31KSgrJZJKFC1sZPz6VJ56o54wzVqD6h/SVOKq3B3nrrb+RkpJi+oycj+wbcPVX5zlv2rTJVSOjI6SkpJhtgsGgWck+++yzxkV/ySWX7NxF34vRr5/SuNxnn32Yz09QY8ICBvCmZoRZr0QYaCAOFFDPQfydauZSSyHWzKynD5BHAwEaaEYm7EXhVeZeNzc3OwzIU/Q2i1EVrNUC5pBDzgRGMHv2pSxcuNAYotefcgO3zL1ZLYE3YQyNlJQUU5fjkENexhncmEqDTumtpZJa0tPTXW0wXCSUYZRFlARRwriNqAW4i94GUXaPUOnzEG2NelYS09/kcMwxP+ff//7DztyOvR67SK38u29sqE4vgctmrCyX+/Qznn+efWbMcHEhQLp3BLGGa7DWMKiOHEQJgC2rWbkT7Yk42tL+FkhoZPu54wD/qqnh10VFzKWWVcZqB5uCK54P+Seca1CPW5znn/+xY4/HogwQNTBEtTt96NAfkMsqjgUe3oHwjoft44JLzofEavUmaxBvvbp1qeopD0/mqelPqzezm4ElTJq0mIULD9lq20f+/KiezHts9d1XxRNPlNHaWmr4GGlpacaj4qFzQO7X+PF/Q038xagQwBIm69KOMuFWAWoxImZICbWEqaUay0QLoMakHMd7NZaI98lZcI2CE9TwtiEHqCQftQAroIEwteTmXk/Xrl354osvAM19CKtd9uv1CTDJnEdubi5jx56Ju7JLIyUollFQn4vwj6Q9fr+f1NRUwnobqU8VdJzVGgYglejyqTXsjiJ9NdDbx/UVXEkINZekAwHvudhJ7KIK899dY0PU3aD7NrdxCl45XdlfFU5FPbAEIr/f7xJDckJY/R2J7GRnZ+P3+41b2rkC7qjQ0ZdFIBAgMzNzp/d1zz33eGp5XxKPPfbYNlPktlW5sqP74dzGGYJZt26dS39DQhF+v5/cXEWca21tpaGhgbS0NHw+n6nr48xGASsNnZuba7aR34Pqz+KpSCQSJgwpxFPZRjJTUlJSzOtEImH26VR6lO9AeT/kWN27dzfPTFtbm/ECzZw5k/PPP7/D6+mhPYRKnodakZcS1uHVar3FGkYC00BX/qiq+j4DB76CWnRIkrSE1fxYQ2M704dEUDYEsX4DFezIA84cO5bzfvc7929CQNPWu/rZ2LFaXiABJpk/j7DWQioBijUBtSMs0+HvIMqsEAF/hdH6vKtYsnYhALf37Ushdrkn3JQsIJdVNJhlZmSbx/Tgxh7v2Whra+Oqq+aCCZDI6frN++LisYRCy8jMzCQnJ4c3UZ1LtpQgxDDqCVFPAwNoYACLWWPsbKk7sDIU2mZ1xMzMTMeEE9L/3A+rcv9J7ecAH300znzekZSvz+fjxlCIZwsKqCRKjX74KqlllU69tZ4S0c6Tcw+yYMGDpm3p6elQ1BNqJNziPPtm8lCP5bQrruC5K67gBM/DsUM88sgjgDIYT7hhhvowodMCCEIQJl9/sKXJxCVsUU3GCRJmqED1kzwmTHiXt99WackpKSkmm6OpqcmIHK1fv96sLrt37+4qHJiRkUF6ejo+n8+EMQ44oJX33lPbz527mfXr1RCblZXlCnWIAZCamuriGYmBHAgEXKnaksqaSCSMIZGent5h+KStrc0YJJs2bTL77NKli3mdTCaNsdGtWzcTlmpsbPT4HB3AirwtBl7CjmjDeYUKBhDVq3pQ/oHh1NVNZPPmzTqTZIL+jSKR96HerPAj+ldZKC6QLKhcY1wYXTa1AmeBhwg2VJGfn288G8lkEhrrgCbXWJefn08QO2pZJkUeC1jDaKCsqgq/w5h3GreqvxVQS5g8GkzTGsy5lyCeiqamJrp06cLP6+pc43hKSgrP9+plCPcNVCIUWaewnodtYxdVmP/uGhsKc7Gs5QiW/CbIc209dcECLpwwwRgSQWwFZEUyAlBciVL9LosvR35ZuvR6Ro06j/argxUrNui2CW9j53BiKMTKggJTEtkPhGmgwSTuStTRSQR0t7igoJdeNIj6KUAjK1bMpKzsWEpRi5VBfggm4C/6YbzEMzp2Dh+KIVGJucehPGgOqFsRX40l5sY5+ug8Zs1qQvUuGRDjTJyo+sXChVtcu7/iipO4++5ndqopK1ZsYOLEl/S7PGDr8Mz2cPjh3fj3vzd+qd94+Obwl7/8xWHUScJqOjaQMI01xAiFHjW/CYfDbNiwgS+++IJJk0Zix0hFVBY5ACHDCzZt2sS4oUMBiHIYa9f+FYBPnqumf/8Iavxd7FT3IUvv7+9nncWBv/yl2dfs2Sv4yU9iRBwHyMzMpBLJDnRO7HnU04cqal1Kp28XFiKByQXAa/X11Nf/j/z8gcTZevS3IW0YOHAK5eX/UnvPy6O+QM0Ci/VvQkAt2YCfXGopAK6fOhWAU/78Z49LtB3skZ6NJ55QKpridj2MBl5nLpazESZfV2JVxkMJBQU3UlPzCxN/W8lIwqjKd5KDbc2CLIQLUaONjSBwm0Nf3ymdK0hNTTUDwJAhd6Im/QhQQ2pqqrakN6EGBpVpMmRII4qkF+LTT3/a4f7FAh8WChHSD4eyvrOxXA0xNsT4gDlzhpnzVR6VJVA1ULcpCEB5+YNEIhFytcRvOhBLWKesh23jkUcecWlozJ37MQApKWlMmlSBMt2aIR6A+EZsZVYQ/vvRRwsNOWDrPzSKlsVCl0DWPfc8C/iIRqOuGjySNijCXCtXfop69GVIzqK1ValETp58I88+ezHg1t8YO1bqP5QChwIDOfzwRubM+djlqRANkLq6OqOzkEgkXFLn4hU56PRJkIA3//YGiUTCZN8AJjyUlpa2ValvUP1VQkVdu3Y12T2eIJgHD99N7HHZKM8++6xJ+2tpaWH5UUdRCsRZxXxCQA752iIFZbMrc6EIv99vSsXPm3c3kyadBShKlR81BetINWrazeNlR451RyJGra2txqXX3NxMWdn9WLNFQhsRxxlsQa18xRsByqYP0K+fkg6vq3ulw3NPTU1lum7PzPx8MMlbeWYf774L8XiL2V7OV7naX9LbhnQbYPjw04AIB2FDRRHHP4BDfD7e8LwbWyElJcXwE3w+n0s+et687zFpUk84uJuy6yq6QVUpbnWXoP5rQy7K2WU1BKSfTZ++BhjAzJmf0b17dzMpf/7554bLkZ6erjlJCaAFjB8saIrzOV9v2rSJgw9eoT8vRT0t0gjV/6dNCwP/AeDtt6cY47V3795UHHUUOQ8+SGpqqjF4AoGA4Xg8dduTylCuryeRSJjnpFu3bmY/ToOnqanJbOPkN2VlZZltHn30Uc4+++zt3Za9AgUFBfToIYRhGUeqUQat+Gn9FBT8Qn8XY9GiawFbv8lKdik/gPhZc/S/BPCbKpVyLZTRKCH69r2fdesuJyUlhY8/3od9913CMC0yCNar4PSfCtLT07fiNsViMVaSywAaUM9CyPFtI7/8+GPS09NN6HA4NmgTAo7Iz+e5qiqO16RY4V9ka1mvKHP1L2LkUmtE55LJpGsEBpks1V5KgPGOcwhdeil4no1tYo/W2bjtqKOMMIvyTkQJ6w4nncdpPBQUzGf16hHm92+99TAAR0+ZQiPqQtWjyGoffPBbF3luZ1BWdgOq+4s7vRD12NoV3f7778f770eQla2CcC2CAHy/sJDZO6hS+NTatSYWOnTocpYuVQPPttTuCgquQj1wESCsH2xb5WUyKrtcoqXCOAFnNNbDl8LwQRj1pEpgdjdYojgYhJqxJp0a4t54ZHU7voMyOiZOfE6/j3P++TnomnsA/PCHuXrncxFNglRg6P77c99995nthFPxz3+eYfrNkUe+juXjF2OVGsL8618hjjpqPMoEHaPb8RLKcV3BMBo0C8DDNwnhCF157rlEGas/HY/Nv6jWnwVx8jEGUE929o2AcD0WoPrMcrI1D0wYZoXYEWnz5s3cUFbGBL3XMCspZyXJ5GWkpqZqj1alq/RDDJsn5wdaHQJc3/veM8B6+vWzJHilhltAjAZyaaDBhBoTDCZKly5dTPp2r369acHmy8hya/Pmzdy3bh1VvXpRpc/aGj8ribDSjLB3jx3LLyrUMSy7zdayytK1UcZgK66gr6xc/3POOQcPbuwxno3nnlMDbkFBAZMmXaw/HQZEGEwto1F0hOE4K6G0t9kjZqW/zz77GAs/ylSiRFi16mlXnQqnoJFoDoC7XkQkEmHo0MccLa1jsC7yFtaadH5sWERZ1RIdDOp/QhpVNvQysjm/Vy/+Wl/vKprklI52ZiHU1BxkrHVnefKMjAxzvuhoZraW7pUpJk9ft0OB4oG6ORUQjluN0TX0wee7mmefncAJJ5yw1b3Z2zBzpiqlHQwGzeq7ra2NSZNEdrkUcqao0pmTIWs/iIsQgDyNkXTz+u9/r6C3LoftxIQJzyLpy7ZXJzj11DhWc6VAb1PBSBoIogbaLVu2sGrVKkCFKs4881IefPAPbNmyBZ/Px+WX/wpFECzW+ynGVpNNEI1Gefrp1zn5jJMgMVlvk4UakoOsZC6lROHCC9nnuedMX0xJSTEhlYyMDGprlbCUZMkAhsAqcEqdyzPZniwt4ZucnBz+9re/AXD66advdc32FihpbXmKJZ1VWMgRAIZRi+TzlDh+O2mSeF+XkM0aouTqffrJpt5MyvetUF6vAqxRWYwyJu7t3ZvL9YIoFFrJrwsKTIBQxMDERzHY1fIC/W17T2nEZJLkofptFvCvUMi11eqESuN1ChsUO76fOBAm1kBBwhobddgRN4Jbfj3iaHMQZWC0D0rLXFIDDMHDtrDHcDZ69uzJlClpqK5ypP5U8TNWsYQCzTkoxQ6/oDrKIhM2SNCv3+N89tlz5OTkGC/A2rWPG6PCaWCA9RT4fD6XRK64oocOfRFrZ+cBxYSppQiV2dGoW/ny1VfzOeJmykdN5RHd2gBOHoWwyePxOFlZWR2m2To/S09PN0ZIVlaWGbDT0tLw+/307v1z1KOrRMCqWWamqtFoQ2M0yr2RAKqhMm4T4mAEUEjPnj3xYK+9M5tDrRbF4Vps+b81EE9HdYIIDg2takTd9bTTSpk9Ww3cbs+GpCIKYog72A6BpUhRjIA+ZBpQu3o15557pd5GNeavF17IQNRQfxAwn+XgoklLX8zi5JO1PL4fyNJckvgIMGUIq6lhJRVA+gkncOzbbwO4jIguXboYI2rz5s2m727ZssU8P36/33hdGhsbXem0YnA4Rcycab9/+9vf9jqDQ7J1BrDGkSsnvlzF/crVRuexYIyNz2tqTFbFAP4OWNNypfGwFhMljxpWEkaFuC4tLWUyagQAGFMAFMFxFVCPzVS6ft06nu7Vy7RGQhlLsMaGSnEuADL59NPN5pw2bdrESGoNd06MhGvWrXNx11paWngKNQNIgFD6/IayMvapqVEfDIQJH1pavjDkalDjcQBb8DBbGzOhggL8wP8tWkQymWTQuHHE9W8X6/3MAUZlybXy0B57mM5GCMu4BrcOqIXTuioEhrGGlTo1LF+HD3Ztm4L6dR4wnAaghvkU6+PLPPMB6obY5DJZjTjDKaBMAOda5Kujd+9DcRNH86gnnUoWAcpsK85BLXIlbz6uHuh6I/w+EMhiypQ6POrGziCmruNirD0po92HcgH/oz9QvJ7p08UshbffPhSAefN+yKRJv0WZqCrsks0a7TVT3J2oWZP5CaF6UCvqgc3X7vF6TSKu0LlLQXOkBNYsX46arIJYoykBiXC75YrtR3WoySQE/G7iRQAm1drD7oF4gaYBi7WI1jIqgCzWrl3oIty2trayShfsa6qro7W1lY8nTWIyGH3QELDShHkTQIAog4miDD7xBphYvIwVJW5yfDKZ5EQ9cb9eUGCyYk/5xS9MewYOlPpOmcBmV8n4s7Am01S9H6dXt62tjaamJmYCtUa6HCBMjDWkA+du3szGcuiWAwE/lCRkC/WkSc8tAV4vKaEE6K5ruNQAV+nj+nw+uhVBsEYFmuboI63kINc5e3Bjj/BszJo1y+gN7ArIA+kMl8iKzCmlnEgkXIJMAp/Px8aNuz8tcOPGjQQCga2ODerhc4pCOUMtTs/MrsasWbM4+uijd9v+v+u4//77XdV5nRVRdxXkXnaku/JdR/s2i6emZ8+e5hqlpqYaqfbm5mbj8WhqajIr2W7duhnSqcTsBRIa7NGjx14bQw/iDIU2EKaBaX37qrIL7UIPgoxJk1xaoUH9eqw2SitYZGqQfPSRCg2vYixZLDJT+4gPIVDMditRFuiv69p9fuKJM1CmS1fQxxRMRxkFO4r513IaKnAira9jGUtoZD7nAps+/ZSafv2UCJjeIoLyaMhoWYL1fr90wAGAm5fWa1JvyFJtqQRWGt/MdDxsG3uEZyMrK4tJk27ArrqCjm+VSzmI6hxLcDufJf44knrSURZ6Xt4A1q//uEMrNSUlxbgbU1JSzCDnFDlqbW0lFosxadKxKC9Esf5G0lAjhJhPRLdWYoDzUdZfNlGtdVeELQnnxzoQ/UCMUaP+ymef/XIr8RlQE5JTstdpbFypXdfKIh+MeqycLHX7SOeBjT0BVEN1XNa7TpptDVBOVtaJW12zvQnO1VZTU5PJqkhLS2PRIhWcGjs2DPH1UN7TjliKd4n1JFRgU1ODWBmkoDmWuu/NWE9YyCVMD1DJMurJBRLE9F5S9V+ZjOoJAaOJ8n3mG/l8ofAJJKwHqlcITdDv2EbaF0JVq8jWpb6Gg6YQTpy4XH//Kv/735/Mc5OTk+MqeS+fb9iwwfA6mpqazKJin332MaGT9qt157Moxt6f/vQnrrzySvZ0yHVwTuQBVA9pRHE55PrE43FDBn5+6lRKUL0pBzecZMsQtTSQZ+r0wAyW4adQe1HGAxN1t3WGupzF+HqvWAFlZaoSS05OO6XcIGo6SeUvt97K9XfdRSKRMON3GDvWOsfflJQU/H4/NTW/oahoJfYJSLBq1cUMHvwzhgwZw9q1FTyGCiEJz2SEPupo7JMkJFIJFdeSzYkFBVyMMinSEco1WC9zCcnkrli775nYIzwbANm8iR8poS43vxhIGDZ1GDWELyIX6woO00eLz+bh1NH4+hhGg45ayyUWHraaUMR1V4KthNIG5AKrzLZ5HbQnzle9be/36uUiTEnBt4OIasmzBsJY8ygLrDJxBbBEDWQ5QLYOtdg1SAU/nvYgSzvhintX4ZJLbuKhh27dwVYR9S/u4LiE0TNERH8QB6roQ1RPEvm406EFNYjMUr6OxTvN7SKUuBtY6WVZxclwrJQk46jhdwT2SajBDrdLGNsudRDcibp1iAs7T7dJzOhSx1ZLUB2pirMPPpiH/vvfDq+Qh68GSXe9bOFCfjx+POCuhgQx+va9ATiG1auHMnWqykgai7pLQvVVSwc3/8EuQUKGBL969bG0tR3NtCGKGlkFBOPQc/162hyVf8F65PbZZx82oEbBB38ymyeeOJczzugKLKGyshunnbaZRKIry5bl8v7VVzNdtycMjKqpcS0C5bXNfAE1Ouk5oEcAn6+S1at/a9ryuN7XWXrracDQHGhuVL39P6iF2EoXyRbms5gg9Safpwohk9qEztNOG82//12Nh63RqbNRRLyrpKTEDJ551GquMqihsNmsv2KIxV+CtdezqMVPuk5nEjhDDclk0ljQLS0trg7udAvLNk1NTcb16yzlnMD6J7Kww3kQO4WI8bGKcv2uGCsrJmg0e2tpaTErwra2NldlWGedCpODXl/PXK1D4jetkiuhINY9qIdpylwMsWTufW/RPGUKhdjHcJEejsYSZTj2vpxxxhnsDXjM5zM+iAHABRfchJq0S7FF/xK4swMSNkDsR+XNufzKQZTEstPlHQdqzD0++OCjsCnSWSS0USGxZxy7lwknhuIG+XD7ydQ3soVktlQjw+lIfW+Dji2lL8u6NAJUUEs5tTQwGGXqiOHxHwCGsczkRjQCt37ve9zw5ptbhfaccutOsS8hHFoRPHe/b2lpYdOmTYAimjrrEv3xj38E4KqrrsKDBw/fLNroxDobkv2QmppqnMxZQERP7krJP0IpVqszCNRSh5vFH2ANuUCDnnDV8CnGQzKZdLkDnW6/joS8pNBVHEWdtE42K5guLGYh7eVhb0QWMIAGIrxOA/n6F5OxBodq5VtvHUpaWtpWRCxwp8G2tLS43I1Xa5Gn53v0oF6HT2QFEwei5JKrJ64AEE9A3kIoevNNMjMyeBs1DVWboxYBCer09fvBXpaVsgB3rYhsanUYTD6Rv+IPCKt/wusEtiozbKZkoeCFgRh//etlHHzwCXob6fVKbq2BdCJaqIh2W4hHwg+koCS9pM02jiq0vUr9upx8bbqLySCGhhPy+4j+p7yLpbpdAaCCPlqN90jQJcFV/wkB902d6ko5fGrePGNsJJNJo2fTvXt3049jsZhZqWZmZprXX3zxheFLtbW1mf3ss88+nZLj8mXwyCOPUFZWBsCYMaeD1gTqQ1Qvu0AyhaCKtLThiMmZh1rhDxoI+KHwQzU+laMkyBWCQCU1NS+6UuiTySS1Oj12AQ1M1yn58p38lXuUSCTo9eGH3AG8st/vOOOMz4EvgAoaGqY5ivjlUaG9zmUffkg2yoB0jr/OELK9vzWIb/bDeWtRgUNb+G9ZTQ1FRQeSrknUBcDQhOq7c1Gejwa+j1tFowLwU43kdqm/qu8L0yUElHD44QEefviRvY4ntCN0as+GE8K9kEERwM9KClG50aXIcA0BbW4A1JKP6pgDWUMjdXX/3GVtatZ7PkW/n1gEFEHoqToObmjgirIylpFPmHriwEbUYyHR8TxgRd1SAKLRKEOGiE5DALcD+6thweefm0Hjs88+MymGbW1trgqfTmPmuPHjCQKryMcWtysFaqglQaPDr7Q3wOcrYyzWkxBE9bOo4cAUorxo6bj9RX7YkAdzhqqPGmO45cqHoyLgjajJfwFQyXnnPYg1X7OwKakJIGw+EURQfb4RNcVEUP0sicSbJfQRxFENDgiRzxoTTpPWy7Aq/hQxQMAyf6zvLoj4VZwGShY2pCPTRhhlZABMmvQjFiy4Cw9fDqI1MmbM31EcmQiAVvOJ4FR+hSwGDFiC3L0C9PQsXQl1P6OMRETbpGe1tbVtpfK5YoXy7R1bVsZNDgKzGAOtra2GQ5ZIJMyYsnr1dQwa9BKiWjx27GxUgCIVGESFbq2TJydGRUZGhqtIn+yzrm4KhYVP6pZ93yVJYMeyEpZrY6Mc8MfV3zlAA1NRiztn+K+abK2PpJ4053JVPG+ViNdc7oUHi07N2RCCWEpKCg1k49e8AxnAgqhnR/iN8l0edhVVSxwoYvXqW00pd3DXHElJSTETcfvVUUfZKCkpKfTv35+3a2r4W1GRyUFnGnx2SwgfKtSijlRAlOFaSn0N0MJGBukfrDT7DwaD1NVNBFTeuTDunQ+fsz1OOB9Ev9/v8tJICKawsJBoNGr276ytIb/NyMignkuppwI1NEmEtwA1iIWIAkcccQLz5r26VTv2NPh864Hh1LHSBOUkZGH1JiQsoXgVmO9QnzdGHN/JRABQBP6eOvtZOBQR3MTNPKzRWQ1UGVVDOUINKmn1dfJR9ysAPA80sZH+eqsCbIAkBNQwgKj5VBDCzdWQgdc5gATMljU4gyz19AFgObXGkydXSH5/yqRJ+tUAJky4g3nzriGRSPCjH/yAn6Gmz+hCVQLcWVV2w4YN5vlsampy1YsRgm5mZqbp988999weLj6XQHmVZLKUtbiQf4NY41KZhxI4K6wCIsqrocZIIQIDNPPJJ7/d7pHn1dVt9/uOsRzbZ9wMkyjZ3Ln6PaOvsrNYseKIHWxRaZ6cCG6Oip0tChzb55n/tV+SqMlCcZrM1dCOFedBoVNno0iKobhK/ajHyXlC8pl0Xxm2H6pRyUyBQMBY6T6fz+X2c07izvCEE848b/EG+P1+Q1a6YP16NultNqFc2HLcn8yZw+vTrkI90HnAetQtKQYivP3hh64VhJynM823ra3NDLTOuLfzd87zcsKpdJqZmWkMj9zcXFNEa9OmTeYcy8p8oCvNuImrzvWtevDk3uyp8OWBug7pNGLXNjHzOojVSYnob51ORJnYE45/ebjq4YiwQLwA1UeC2EwRsDoYdUAFhxFlNIotUq23WAK8Th/gOpSPLY7Ke9qINSWc5oIiVCsWiEUEu5pzcqAkF8a5F5VNFQFDjhbVButNKXTsV6ZC22Mi/Otf5/PFF1+oLANstZ6wJj2WLVxoVo8bN26koUGF/bp06WJSYrt37276tPN5cJanP+uss9hTYEOqIdw5SQmsTqb0R/GiqXGwES1uFbJh3rDZs+3d/fufS3W1SifuaEz0+/2O2ipu9VcZH51ja0pKCgP4Ozm6Zat05pRiFdUxgKhrQdU+HOO8r85FkcA59jklDA6j3mUSiAHRoMNB1nCQXhk3VzICRBmGHQvFwAohfT4tTcrXexB0as+GEwOImoQJodT5Hd+HUcPd91asYEQ7F+C3iT6spNYEgMTFJ6vY7wbKyhai1pWyDhUTzklerEQGtNx2OfJ7JDZsRJ1vAVGmahVagRhiMrDLZCs90u94nXC8dxobWXZGN5NDHLcvIAJUkkutKUAf1EeTiaIaUObHBBjYDSLdYEMXlLFRCUA2DXqVloeqIDSAlUQI6+wW3RojXS+BIQm6yJlHkMG6SLckgnKJg8inNzCW16lmmGFUqbb+/oknjEfCrZIKM+fN44pJk0xWBMDj48dz61tv4cEiHo9r76QEtGSijKDuVLX+XIyHHKQvLdfv6rCsDtVD3QEDmXTbL8yU+qetsSOfy3vn5/J7+fw83dLFqGy4Bt2qPtQyELeekRNOY8MpcZ9IJEx7nB4R5z4mO/YTRplcqn8F9SdzUaFLMblrjDdO9fcS1KKwgmzNR8oD1lAJDCceP3er9u7t6LSejTvvvJOJE1VYoX38cHsoKChwdbr2nbg9sQncbHchGck2zjCKM9e/I6vfGTPcZ599TDXObcFJamufs97Ra2ebW1tbXWEgpwdmW0JkTlEleVjl75dFMpnkzjvv5JprrvlKv/ewd6Fnz54mLh+JRIz+g5Sa7wjO/iok0vT0dONVTEtL63Dlm5mZuUfH1OfM+TXTpv0aGyyuIZ9VRmxe/G11RDVfB5bRh5Cuhi3BlQaysYnuYA3IXYvpS5fy/qhRRkjsC8BHi0t4a1cjiL06FShjY41JL1fXqxh39kQQdTXqGYzyGsWA5UbKSwks17OA1wHP2GiPTuvZSE9PN5Pl8OFTGYmyOKuxlVnF2yEusrt1jLd9eMRJIHJmoGxrm44+T01N7TB7xfk6kUgYd6/f7+cdLQ28kPm6ozcCPrJ5kwLg9MJC8oCfLV1Kbm5uh2qUzqwTp8HgbJvzN87QSUfn0/7cu3efgI31C/dA/PvOFXYYCJFNPX69jz0xlPKkKak6HLUeG42tPwLqHkawq0lxVztjwELwFW9GkK08Gz18avcbmvV+6rDFUyRwEWIYUeMjEVWMQtpPCWGgHKokiLgJ+ILD9FAaAMpZxRqTQK5WvPVEqNdu9oNoYDpKDGloD9WE5+LqeVuDGKTiui/W51OJ9eDIuRcDeaxkAYNpIIbic6Smphp+RZcuXUxGSTQa5eijHwYO4t63f+vq32KQBAIB+vbtC6j+LamvzpTv1NRU4zlxfv7AAw9w0UUXsScgGo3S3NzMr888kwHYKy89TGjKzaheGnb9Oo96CrV0vWRClQNzyeZ1tX9HlpoTyWTScMi2bNniyiRyejSckgGC5uZmZo8aZdqVhaKG+rDMkubmZnPvnPtxcun8fr/LsyHtkRo77aGfCADmG20m8V1U8vC//kVzczOh448HVE+uAJaRi3rmg0A5w6g19NlSbHjwrpNP5qSTTurw2Hsr9pBslAQh0EqJechQW6S1AbJQhsj53bszUxsc3zbeKysjC9XF1VRUbyztadhhu2Qbv9/dGG5WlPnYmogSHhCHvTNQpSbKKH5t4u3pEANCtCQENVjtwTCDdbigmgZthoGbdhnEpoh2BDFixHALk69Tu4uxxL4qlPBXPvWMQeWxgOpLfhbxOhGU1kU6sJ5smg1xWQJia0xyuBiTASwDpYExwNDx8M8f/YMf/Of/mHA/PAXYla8Q65x0WdlX0PGZfbWts3ZDJgIPHeGvf/0roDyhbW1tRpVHVuUSCPGj7n4Am5ln+5UQR0cDM1DT5mPk8wLHmi0a+FV1tTmuTO6BQMAVrnDyNJxaP05dFEFra6uLZBxA8dpkCdQMvDJ8OMfoCrNgF0MtLS3GCEkkEmbRlUwmTXsCgYDxmDnhDP+5dZxVobqsrCyyjjrKRSJVxtkERA0XFnR4ndF7k/ty3nnnbXX8vRFJOqnOhlPUB4p08qgakHJ1+mUpqlvIGgtseGFbWRtOD4YQkba3vVMGXNJIm5qajJvWadk7ZcOrsbHRSv26BXUhnVN4BKvt4XxI5cGKRCImxt2lS5dt1s3oqGS3U5RM/p7SvXs7sp44Veuwk4eswOOObYodr2O7vQbLtwVZKf3zn5/wgx+MhKyeyskhN+1/RShjo4YBrGQa1mX7gqmwUMzW/A1Zb+rHcUMJ+H2o6xnAGnoJ4yOQ6VelKA4ABlJPI5Us4hj93ZGoxMUgq5jNKqLkk4pa+QutzVJY40Alzz13Lyec8Bsgj48+UllFUn+kFpiQlsa6CZ+R+iv4czLJXXowb2hoYNSoW7BZOMIzkVaCRMhFtRcgRi3Tpt3IG2/cDKgVq4RPVI0URV5paGgwVU1bW1sNEbFbt27Gi5aSkmJCKlJfBdw8kEQiYe6jU+HSgwcPuw878mxsPcN2jG/c2HByEmxCkuLGy2AstirYVdSfevbkivr6HRbGchszlhfi5GO0tra6XIavv67cjRdd1J/bb38RgKOPPtpkj7S2tlJQoFp33tq1RCIRysrOxPo35gEx5rKZPGD22rVm/85jbdy4kVmzZgGq7PZttz3BAw9c7yqA5hxE20/8zhVG+8FWPCl2QotSwXxN+VRpwnbFK6vuZty6lXGXsban4OmnnyYzM5MzzsgASU/243ZUABLQmIbyLMi6vlpfxWVEwBRzT2An50bHPsJ6uSfX2PoBZLXlVO9YZUioQVZRQVwfK4i9K1JEq5U4UVr4OyLvnKC6Wq0cpZ+sXfuyazXqNKzbE/bkN/n5+axf/wCg+l9tbS3HjxlDggbqzbn5TYlzuXRBIM58DjnkNQDeeOMIs3/17MSAGo4++mKeeOI2AM444wFmzfqp6/jSTjHEu3TpYtJjMzMzTTp3PB43xkZaWhr33HMPAJdffjmdETKmyDhTjTJnJUwyHyndHmakNvJqkHRkIUBGeOaZ6znppA+xIb4ajgUuNlt0HHZNSUkxhp+TI9OeN+YMecjri0eNYjrW65JArYBbsVpDAWBzWRlpS5ea0u+yH6eOh3OslL7apUuXDrNRKnGusiWfLAgkyALmTp3KcOwTWYNQnUvBfBM0OWWylwjWR7Ty/PM57NU9XwJgZ7EjzsbOBt2/9TCKDNjZjpSmYtTUKGtDv97y+vx8btWllXv2/A3r1/98l7Xkoove47rrqtm6nFHHWLHiceLxOJmZmZx33nm0tbWZtLydx65xMd/Us6eRt5aBKogajt408ukyRQjNDNRj5sxUcSZN7omYAD0K1ekWY6ksBjEgYcyvQtRVEU9CFfO1IkyeY3vxasgQGMGmxIoxJ0JZSoxIjJhmoJJa6hmBKMsUv3gdAP/1+VTfAn7o9+P3+7n88stJSUkxbl6Jb+8OvLB4MTU1NVw4YwagSIcN5NJAgpDW8pB/DSaQqDQSpk4tQcJR//rXdWzatIlbTBw8n6OPfoBZs/YMvsVXxcMPP0xpqdLTSE1N5cADnwT60Eitg0sjAlUxlrEANXVKCNB6J0866RakOL3KKvsd5wMj9MqjOQRzi4rYb+1alypx+7BIRwR0wKVoDDBq1O0MwC11p+pWKYpoJQkCWC3anueMYvmjy9oVbsMcS9rhbIOzbc7SDlWuX8ewCdiqNWFUmrZ4xMsRgTNhwIAqEWDbH8EtLhlEGYAPP/ww55675xFGX3vtNa688kpaW1s5//zz+dnPfrbd7VvZfjbKd9bYcMYDrc5DnBzsqrwE6NkDaIRwwpKCXgUe7PlLvdX59OyZo38/F3iZXB7hWCRZT3XBH4fddCpgqzCBsu4XcNttQX71q+8BajU8XmsDjBo1yhVWkVCL/JX4o5Mp79TuWLpUqYkuXLjQlfIFVbxz0UUuz0b7tsnDN6N7d0NoGoh9qKqx1SGdUjZiWvQhqgXQnN/IQx/DPmo5QMCl/7Gn4JRTXgF+BOcVqsWNcD+d1lmBD0JjgGnM5lmKUfwJGTTR71+nHFu5RK68U7XC6e2Qz9UEUanDhAVYNc4gUK9qFvOf/9zKoEHK8+Jc/YEafGW1J0W7tjVQb8u7ty2vlfPzlpYW+vRRmQ79+/dnju67VVVVnHji40AODUADIVZSTj4NSKLuIYdIqrUQbBsJBAJ06dLFXMMI9dQSJh6Pk5WV1aGSJGDIovX19S6PR26u0lPIzMw0mjKdHQcemIeUV1f1mOT5zEI9l0WIx8IartLfgvpvIzAbmMMp+hdi/8oouyswatTPgEaE/lyDKlevHqxZQJJaehNgmXl+KIARPx/Jyt+t2NZudwp9+04GhuHUmpY6WqAqvM7VGsD1RrirCLuyiCALLPHIgHtJsKcvt1pbW7n00kv5z3/+Q58+fRgzZgzHHHOMMXw7QqfNRnEWWVJQk5+M/fIJcUgmrBBRBVDP97FS24oUpDLNnwdeZjRuv0QMOCwvj2Xks25duTmuc0LfvHmzrksQwkl7S09P57333gPgk08+Ydo0ddzs7Gyzn22l36alpRlVzzlz5phMlvT0dNe5H0aUgboNtvKh21A5MS+PN8l11bv1o4b3BUA9I4EA86lmALXG0JIJLQ+oda22nUNPBFvRVq23W1paXPUTOjN8PlG2nAEHj4BbIKs3xD/DzodCxxgNvKwcw2sI8nse5CxM6TVArr+E/YJYsa4I7jwS6bXi7QgAxdSjBINqdP6ImCWQYN6877HvvvuaPThFkJyftX8v/SkWi5kJOhaLGaLd1cceSx5w5ZNPkpqaarwhOTk55OWpM+rWrds29RAKCxVPv0+fPsydm8/kyctQvSvEW2/9iilTTsI6pGejBnNJL2w2bb74zTcBOHvzZo488hecdNITPPnkqS7jQc5l8+bNhuPR1tZmwgzOzK6UlBSz2v3rX//a6ch8PXr0wOfzMW5cVyxFUXnVJHOkgRqsym86bo1lCZxK/6sGFjBS19iZAxToDP0waqw4r29f5ldUuAiZHWW7OdP1nWNlU1MTl/I6lVgxsXoOQ3lgClGLPuU2jLDMNaYTpsOQsPO47blosr31xoxGPUsyfsWxQUk/UMBKslD6NKMd1yqiX4uOScSlniNLgpA+ryxgks9njPo9Ce+++y4DBw5kwAA1Fp188sm89NJL2zU2Oq3OhhOLF9/LmDHnAAHiNJhBvRqIxS3FUdKXpHCY/jWqe8ylDy9wKOqxdK7bLXaGO69+edNNVfzqVwO/6il9KRSy45SiCEq8KYi7SqeK3Q5DJTWWANWsYSFrWAzASOop0ceoYhVRszpycq9rUI+a08bfkxDUf0vhUCjorUmfTp6sLGWKgWk+qDgUQnnU4+c+/sxkrAGr7BJ5F8Ot9Ojk5Qex0fcQ1nvUbH4Z0v9qGcC8eTfvkrOd+MYEqITXT/+P+UxMo5+ceqpppSoSWIolB6sWrVv3/A6PMXfuSCZPVpPAlCkqbJivJ0eVlxXEmclyzNSpvPTGG+b3px95JKowup9TT32IXN7kfODIvUzoKzs7W0+gj4PxE6jJzgZXF+kk42IscVf6l0wOQewIqYy+StR9lt4ZQWqHHER2draLQC8E3fbGrUz0Tk90amoqd6JMipeARWSDq1xmG1KBxBlspBKaq+wiKiUlxZUS61ykdcQpys7O1pkpR+rrUGF3bFyTwt0YiPJBih84gVqQ1mA9GzUuHWV5HsNYD0fluHH8zNFv9xTU1dWZdHNQi4hFixZt9zed1rNx3XXX8dprilDm7HC7G04ikrwH1fFFJlmQmppKRkaG8TY0Nzfzz3+qQm/7778/w4YNA7bt2Vi2bBnvv6+KrzmFiDZv3my2kfow0gZpT2pqqosw9U1j06ZNXHfddd/4cfdmFBcXb/WZU/StfT8TUp9TXtpJ8PP7/caz9mXwxhtvUFZWZqoyO0nVyWSSoUOH6i0/+tL73lHl1oaGBqPXEQwGTWZKLBYz44Tf73eFXbalXNq5MFP/DQI5RBlMjg63qYjfSlbq2iNKMTYXIY1ayLozh0ZdKVaxjxSEAPl1p4wRU0aCH0oSyrzIJUqD8Qks10faRD5rGYPDRKpSxslLurLtEuCd1au/1LH794+grogz+bUcWTEsWfIvunbtypAh9+Mu61mJDTgHgQgDWKPLrimIh1FxZfzk07ALymXuOWgjjShf38vzrXg2anR9EzVYqrhblGwimom/EOuYTqC6iZJlTmAlXZTPI5/5HIuyY0V3QJjIEaznoLDwdD777EnAHR/OyMiga9eurF49j0GDzgUa6d27N/vss4+Z9F+fMYPBf/gDAOvWrWPwYBUPjMViNDU10dzc7GJtr1u3zqwYunfvzplnSvnwUh56SLXulxdcQDFqlXBpRoarTopMLmcXFBBA2epB7DrdugCV61C5DPNwSv6EqDePXBZSzVTWthG9pziWfhsD/NTUbFv5sTPhxRdfxFUmvg5C70BI9Loj2IUOqM5jloIjIH4sURK8whzyXeXfEyiaWoXjPVi/k7CPgvq9+DCUPLmgBljDAGCy6Wc+n88VZmufUSKrTxHFys7OpqTkRtSwfiYcDARg6tRi1q5V3rmllZWsW7eOSZOu0HsNk0stF/7sHAdHQnEGli9fzgcffGA4GwcddBD5+fmAcmWL8btu3UB69ZoLVJCv9UgA4jToJ1jWtTGiTOWQQ850XL9h+lpFkGq3MZRh4yzQKMZJdna2SwvCKbQnxlZnzJ4STs5Y7btYCYhcvLMCcx5K5NCaFnLtqvV7IUjmASWEdSg1gO2ZMVRvrKVyq6wQed3esHRmMK0eMQKAT7GJ3nVIj1eeu7vvPoi7715Eamo3Rq9aZXSSRLBuJrCMYbpFBQwa9AM+/PA5171ubW01BmRaWprR2VBhHxm3FqBGTRimTZ1nl7xJMBhk4MCJqDB7GPtgL0D5dSJI9uNo1Jgq1+ecUIipBdZLKd7xjgitnR2FhYWsXbvWvK+trTWh0m3DR0fxgi+Lb702irrlKubmzOgPYe31WsNYEEIjqG5cwXCUvTsQNQVUYhnLEWTqbcYdU+8Y8+bdxkcfqVXbyyedZByVWcAL115LBXCHzgTYGVx7bQMqA0H2VMoFFySAOcybN88M5NuCRGyD+r1cnwIkMCQPlVMfQZ2x3/FOcQXyUNc5jDXBYnpLcfUDCM9hT4A8ICFFJxDfchB1UxuxY7bYYiY4WYTwg+r14JbLKvpQTy1zsVk9eVgOB9iQnQQAl+t/5a6gSwTYukLll0dd3e/UmfrVHe9ZKu1xP9rz5t3NpEkz6KPDazvCxRevYsWKg7b5fUPDxcDFfPDBB5yvq76qBYGYxnlYIRNRbUW3rYb//e9qDj74KJP8vrfB7/eTmprqCPtmIU92VD+fS1DubaXFIjwgp78C7NKjAEgQZQBh1hDAdm37dFu+i/MvbF1Kobm5mbOHD2cM9ilyjstxbEXg3/xmBE1NTSYl/8x//5sNhx9OHDWGzQWWkY8NbeQBAfbb72w+/PDRDlWeU1JSXO1bsaILZWXXkM18hjuu44tVarRPT09nJPUsM7yRoN5iMX309YjRQIluRR52uXB2QQFz162jtFcvQxwNs2caG2PGjKGyspJPPvmEwsJCnn76aYe68raQwq6gzn4rxoaEFZT1XAPk6KQ6BSEWSVjdMjGcsfFmsomaHG8ZwGuQNCy1p3yi9KGWWocWQmtrq0uiXMIl/fr1o1+/fkwsKuJYbHc1nQ948LzzGF+huulrr71Gc3MzDQ0NpKSkGEt8woQJ7Lff37DVB51cCDUI9+vXz9UGpx6IWPch3EXOZbiOmH3JGqMa9egsp49ehQf17xUruwTLaM9z7EGGDete3VNqTzQ3N/PEEyoV84wzElCzHub2tCKq4uGo1j8Q+ZEaILEeNYjL4CgG3CqtiRllGeV6R87cErAlnypRPrrlKHnkqOlPdnKto6bmQvPOmZYINv1vy5YtLFy4kMbGRpqbm5kzZw6gQnTSd7OysujWrRuvPg5HHpmkb99KvZcKamp+AcAnn7zD7/v3xw+03X47qTpc5pxoEokE1dXV/PSnGTzzzDPceutfAKipWebywIgnrrS0lFnaJT5o0J+AYt56q5ApU5zX0MmkCgPltLW18b///YsLDj6YcuDZE67m7befAbYuJyDHcuowxONxQ2R2E849ePCwLaSlpXHvvfdy+OGH09rayrnnnusIj24LqeysZvB2j/219/Al8fzzzxsm7G0HHshYIKzVC2SN57Qna8wnQSz7H9SQP4AK1pCD9WLEUIRKtYVaZylKlY1hOwezRCJhjIREIsHQoafTB2vwgM3qENfkpaWlTPv1r83vxL39wgsvAPCLXzRi62K0z0IuBQooKkpl9WplSqWnp7tExgSLP/qIMUOGABAlm2x9DkJiUiXBxY9TBVQ5xLGFQFqMpWql66ssOStixkQQk23AgAE8//zzzND6Cp0VLS0tnHGGuNf7AwlbGV7mPqe+WQKIK+UL9aYUctL1AlIVWK8nmxKz7gRLvxSTF9z5UxVAFQO0N0H6k/VsNOL3+11hAGf6p+Ctt96irq7OhBPkb1NTkxG5isfjnHaa7Nl6uKCCqUVFzADOWLGCq6uqeGSgJkDfpoS2/Dfc4JKGdhoegqOKinh0iboO/fr1c7VXMko+++yXzJs3jylT3tbXRLwbpeDXPTMhTn3121UMAAp44omLTFgkIyPDtKG5udkIfCUSCROeTE9PNxyPzMxMHnhAiZLtjnopU6ZM2eX7jEajLF36PplIz9mCIr2nglaKVXHyFP3dZpRWcROwBnTFUuiCDOPjxhUCPYGevPLOYqy2Y4rebzPH65ohgo5qQYnx+RHKFpfeGQdaNK9B0QbjwBIee6yS5uZmPvvsM3w+H5deeil+fVTxKadSTyv/0HtK0+eS5IwzznClZnfk5ZD29eUdmpCQE5SMGcPpp59utmkEelFLA8/SbEbCVj7X3/n1+fxNt17KaTYAxx9/PFF9xepJBVKYNG4cEyZPZlfjrW+ZDH3kkUdy5JFHfolf7CiMsnPG/rcaRnHSeHJwn454tOsANWGWogYpmR3UanINEGMNVVjfhzP9U1zGO++qzTPHd4Z1ahxHrsQm4HaMBUAFfbTRU+vy1TcCkyFnKLiKb3eMKPlkU88wbZAJxIEfZQFqkIqTS1QXhgMxahSc4SfJFJDXAnUnDjzwlzz33AU7bFfngAwUOospHoOaZtxpwNLrJPAUAoJqcgwCjeLeBihlPhVY7QOw8RdnmquErPIQGtr0Bx+k4sILzS/sMXcNrrxyI+ppqUANqREA8qmlFHWn/1tWxverqjinqorficEBhG++mcHbEPa57jo1eQeDQUaPPo0lS/6+E61x8lj8UJTuuFwB+HA4oAjUc+Y8yLRpyzrYx56JTz/9FBDV0Ey2sBl1d7ozfrwy2hYulInWp/+lo6b8TUAzmWyhTZu8zbTo36fwzjvVAIwbV8y4cWN45x0JFCRxKtluS7xL4CxDHwOaEWJ9F6Crbk8L0MaAAV1IT083xqpUqm4aMIBua9bQBWV0+IHNKK5RQv96C2kdplw72yHt9Pl89NG6Rx21u6WlhR4oUywdiOjnsVmfPfrzLCBr/HiaFi6kSO9Puubo8eNZvHCh3iqNBDFzv5wG9t6HFLbv2WjczncW37ixIaXi/zJGuagjqCGyDrd6dBxhBw/ETpxB3CmGADHq8VOv6aD5Dne1rPPEcOiI3Z9IJMwKTrljg3qv7lLGK8nHTg7FtLW1GW0AschlVTqS+cbIiQNBrRy5kuUYBcDG4ea4Tlly54PepUsX+miiZynWPyHGj/LcKOZ0BFl/Cz+kBBtYCgMRR/5+H3MelnAmZNG4kVHujBDv0mmnjYasYvVhHlAj65ptJRtLDCUMFEAiC2ryUAahmJniKwtiw1GhdvspRA1fwxHh/R/95QISiQT7anntCiOvnXCFBsAtSS/9IBQKGT0Cn89ndCcyMzMJBAKcdtocrPR/BQNYY/pKDragYSXw6sCB/Ortt7n6ww+5YL/91P6BF25/kBtuuMzlzXCudiORCDfffBqjR89HDLNw+FKXlzA9PZ1DDjmEDRsO4a233qJHjx5MmZKnGiActARAug6zlOvWTcDvryesBfhOOv1EXn1pNqA8ABIuCQQCRvtAdDhAhZl2p8DXrlyJ3qa9SYceeihjxpyEumfjgVtZGB7DZ/PWmevZq9dfsfyDGLCQAbyCc639EptoMIn/akx87rnLXROys7I1uEs4yP12lkAQ8u1lI0Y4KquC9S5bRtjFFx/LsGHDWLZsGQ899BBdu3blzDMVIfjaH/8Y335ADayOW0mbCtTyaA4tPP7444Yb4WyncFqkbYL2pRrkWm3cuJHc/ffXYghwWLlKJAgGgy5tlo7g7MO9eg3HVuaq4IILTgXYyzP0OilBND09nfT0dLPWV/r/Sqyl1ghDN6KGyUJsISvJhBYjQ0IrMjEqxl89fuJ6Uo3hJEmONQ+g03XtLE+flpbGihXXM73sWVcmy0qyUZNHUH8SJDMzk379+lFbW2tSZTMzM1n84x8zA2VFV2O1oxTELCjngw+GutyH8jo1NdW0LSMjg5eXLuUPo0a5NAPDwN91Rs+NRUUUYQ0jC3X0PtTqXAOre7lYZ0UspFbX3WjUaWy5gL9Tl5j/wQ98mLN1uoJcqSfSt8CSNiqwrPVCrFpjBDtMChE0D6viKORceRgj+vsJQIKZM1XIY9OmTeYen/roo1SdfTb5ROnbdzSffPLOVueRTCb57LPPzPvs7GzTz8TYiEajnHbabVgiZjOicSBTsRjaNSiDvpZsXp94NHPmPMt8RuqtCoEsbr75I37xixLXs+GctFSYY7raPivAmjXv0r9//w5d35MnT9bckvHMvf9/TD7vYHt5qMPSFpWnLTs7m/W6FAGJtznyyLeZPfsgevToYSYLZzq6M/urubm502SkOK/nANaQBTq1tQKqFPVRrmM4fCHJZJKePR9C+FkDwUWQrAAWEcMGeuvo2/dM1q593HC/nMJZ7SXKBc7PhUOmRtYAVo8FbJiukj/84SeAUpeVbJbNmzfbcS1H/7wABoUg6LDzqzu4Hm1tbS5DyLkAc5Ja5XVLS4vh6zQ1NbFuyRJSgKO7djWZTU6Sp9PYcC4O3eq1AZz6vjtK2d47sGsIor7kdq7m7o1VtpAAWukB5OKMVar4ZArKFkpiY5UtWA6EjVWOHVtgHqZ3OoxVZjJ+/JAO2+NM+QLV8T5YsoRMtherVK7N0tJcmpqaqKurIyUlhV69epGxZo0rVtkAtLKP3pONVY4ZY9uzvUqrzc3NrH/vPZqwjvrBY8e6XIzhRYuI62M5Y5VdaDXVaHOAfdg6Vjl87FiWLlqED3GXpuAnsVtild8E5s4NA9mQkQkiv9AENEkfUv2BDHTtZEmrrAPq8dNCwjhc01D3WnpCUv++C6ofbkbd5WZUXwOT+poRZFCRNRbay4nXrVqlK6qkAtmMHTuY9mh2VGVNS0sz/Uxcuq2trVRW1gHdsdkxn5NFwji+W1E9rpk03eY0IJX99+9rjMolSxodW0Lfvtt2rwN8uq4/dIVRxVEyMzO3WVl548aNfPhhHiNGfMHy5c7+nUBNnptRNyKbsrJUc76rVm0CYpSV9SUlJcVMKCkpKa4JSCYIyZ4A6NWrV4dt3x5GjBjBH//4xy/9u6+CW265BYDp06dz9Zgx+FEep1q+D0xm/fqzthvmuKlnT2ZgTdungD8zAGUEBoDl5PI6w4Gn6+u3+n17r66TqyPXubm5mfVDh3I38AIjsaXZQfyq2Szi5rvuAqy34f7778fn83HZZZeZz8+59lpDjRKfWTXKrH8KmPfBB6YfOnWX0tPTXYZCR9cimUy6DE7Zj9NDsq3K385rIUq8ZxcU8DqD9bUMAjXcfHN/AK6//voO97E3wOfLBQ7b5vcHHFDFEs3n2h6+A6mvXx87qgTbEToKqThd1zsDpwvcmVGys8d3PhDbEnDqKP2qra3NNeh+HXyZNnv4cnDKiTvll7e1rfNeJJNJo6fh9/vp0qULqampKh6uiZRfxwPV3Ny8zfbsKKYv2LRpk8vd7eyLPp+P7GxVUExNCNs+d0H7vij7dWZtOZ83ubYdVUHuDPjL++9z6P774weyeYUoFfTsOZNweP42f/Or9etVhVydNj8GGMkalrEEHS90pct/VTwPWos4iJv8XAD4ie6k2vDd+m8Q62h07m3o0B8AUFn5Cvv372+2Gai3v0u8XbsAj/XsyVl6fw/17Gn8leIjD5iWlqK8fQXAul12/M6Lb8CzsTuwePFiDjzwZJSFPBCYwerVJ+D3++nf/zG9lcRyA0gcOp9VLt28WsaiCIBFrF17jNn/6L59qTepr8ORtM+6urPNxO0sNtbW1mZiws3NzSQSCW4YNcqoeoByQVdjOyW6HT/+/e+JxWI88sgjpKSkcNVVV3HIFVcQRz2oSsjm+1id/jCwgFyWMXfFCjNZZGZmumKV8jqRSJgB98xevUzeyQvl5SYDANSg/7f99mMBcNfy5YCKb0tcu33M1ul6lOtwyKBBNKKyXsYS5Z1O6j70+f4ETIb9RthISR0qWSexEUiHnIAdQ0PL9Ua3MYBnjRBR1HB0gthhUgJSpSheRiVqWF5oMoWiTAV+xN13f0wkEnGV85b+t88++/DYJZeYsIakJeeyzBCPx915p1HRDAaDhEIh7r77btra2rjyyivVNuPGMXbsz1CS9Vmo52YOfVhlWhwHVhnuUzF2TZzH3LknAjB58if6XKrNdzfeGHQplDpx222/hvE+yIOlNy4zImBOQTpR+MzJWcRzzzUarsXUqRFsYnYl8kS/9trRhnfxf/93JfPmPWWum2SjRCIR0wanoZ+Wlmb6cWepkaLGwftQnrEKslnlrg2l/76nvRPbWpC0tbVxfq9evEI+oiw6WIdOn1+nJsr2hq7c140bN/Lb/fcHLA/sJa0zNGTINGztY6FQSvgwAVTyySfX0b//JD75ZB6g6myAKmIpbdu4cSMAfx492oyCyv+ivBsrdZZdDm7hQpHBqQOWOgwOp4ewPeT69O79b+xM8SojeZPJ2N5fg+p5Eb2FsF0WA8s4HrgCKUefTBZ3eKy9CT5fHvCDbX5/wAHvfZc9G80od7PEGRVCIcXWF3daz54/ACoZRj2lekvJ36g18d4SnPA7/rflmF3B+x3i5qVLmT1qlKH+xR1HEmb/tvi3xbhlx5ShIW7ISmCJeXS/DB745BNT0K0jnP7hhxy3adM2v98eBg26CBXKUtkZka+0l+8KKoAgVI2wXUuiIo2Kp05jRGeZOHOO0Jk8wtoP04d6EtSaAmpWGTNH/1ZplOQ60qoB7r774x228tLHH+fXZ57JAKLEWGmyqLbH+e4YkrcFoqMSZ5VLs8ZWCg1iNSAtk+iDD8YwdOh9YDJtJrCttfFttx0PlHPfmcIzGbuD9j2FqmkhCGCfpALUlDMXsJWPD3Iore6pUAb/aNRo4dc9SL12CqA5U1OdcPJkZmqjolevacgdF0PDuS0oA0CIuKNGTQdTHbUEGM+QIR+i+vUM0DLpMp6uXXuey1OXkpJCTc3bFBUdR03Ni67iarJN9+5KkfjmtWvN501NTZR89hknAEdMnAjYvLCgbk0M4RgNdnE5dpS90qvX+fpdRP8tp9Cx3xCqlzvzAIXuqhZzek7K6caC2R/h5qvsrdhRNsrO4Rs3NpqamsillgYzgFcyaNDvWbDgbGN9Z2RkkJ2dzccfP0YgEODpXr0owl1CzLr0SgFbRCgdyNfZH/WEgAJCoXNcblcnR6Ktra3D4j+F2OE4gn0YirDmTfxHP+Jfv/qVWTlcfv0VJFEduRJoYLBun5PSlcejmind0XGd7ZT2gfJ+iDcjHo+7aqlIbZeuXbua/ThXmW1tbUSjUd7fd1+qACn7FeUO1IB3FsqujwDpzJx3Lp0RPt8QzKSaqIZIsfoigGbXyjATw1aKlLscRHnK8lB3ezHQQB5Q7+oJEWzWymKyeZOgYy+/+910s5pzstyd3jQJj1z5l78QCATIzMzkiZNOwo81G1Zcc42hse4DtN51l+lnRUXKXFXx9gpsrpJKt22gD0HXhC1E1oh+r4byyZOvAuDjj58nn0UUA3VEqSXOjTeWANOgR0/HLqqBCm66aQm5uYpzFAwGO1SklPNubPwjOTmHYdOQj1FtzUqHeAJYwltv3eqqNHzbvHlmX86wUVtbm/H2BAIBV/VSuaYePHjY1eik2Siff/65rjop3oZKoIpLJzxokjargN/PU665tLQ0vrdiBQCj9tmH8iLxC6QTCn0fcMdys1AOY4By5lPuqGTpjPc6C7FJOMNZ/2HYkiW8NHq03g9E6UMztZRgszpi2EHV7/fT3KjTC1HrNTUJOCc0ZbLIpCHHbV+ITdA+pi4FpzpKYwN3DH/Lli188cUXlJVdDFTyfeoZjtSZkSyEgcBkyPJB/BhEv1vc1p0Ng1nFKnJR171dTDkRQ03MC1CTrrDOnT4AlaqqZtZCagkRpx4brxTJUeWhOkgXdAoDYzXxr6mpyUyIV1/dyF13qdeNjY0ujoFc48bGRrp27crh99xjeBkAfz3nHKNaGwRKr75aUTtLS02fq6qq4j//eYZDD/0lNluqACgmrI2NImAYUWpYRJRhQJB33vk9R44bZ4zm7Oxsk9OlPHZ5GOeyy0US4Ve/epeiov6MHTvW/HZbSElJYWB2NtAHTKZZSF3DuORrxcw1cRIU5RxfO+QQDnhGKYvm5+eba+vz+cwz4+Rr3HfffVx88cXbbNN3BarN4h3z49bRl7ExQK9eNyAeq08/VYsAn8/XYabJunVzOlSgTUtLM9eoqamJd955h0su+QC1yMjRW5egGCA5SIXUtWuvdfHYWlpaOuSorV37Mn37XgFUMW7cQJc4mLRNJAJAjWtCcq5Yu5a2tjaKioaRR9TFDPj5X/7CJZfcRK9e06mvt8U7nYRh57n26nUx6rkvp49ecBbqKyphomqEkJtrpABkFFCZefoeBDpnzZ3dg06qIKogfGoVt81mDenYBMNF5DNp0v1AIwsX/sL1yxt0DPMGOu4MYaznoXzd1yP3LNB/oxwPFLGMuZSwjBJ0GWLHtqkVFVQgUXNYabQsFmITU8PYuPiux375+a6HtRHIBqOwauOUEgTSK/S8nlDQDSLd4MMs3ebOBxWxbqDB5YpGz2eSCF2J6ncSLnFCcvUSqCtVQAPFWGNDDMdyTqOeY/XW9Q8/TF2dmkzvuv56GhiG1Ky4+mo1ad9445cLnp33yCOcc871PP74bzjzzL8xn4X0IkpexdZJzqpPDUSZFmqyimpxNz/1FOtrU8NKY/Y+NmsW7x+tQhdX9+ypC4PLgJuF8YQ0itGmPDk33fQpjz7af6fOITt7P1QPjGMrFs3FSrkKmWbKNvdRDRywU0frXFDer5ew3iYxfvOwI5iEgJXHql+/L4AllJePNPvJzMw0Bp/Tm+nkNjgzd8LhMI2NjUABDzzQw2SjXH55COXNCxAKjSSR+D5tbW1bKdo6M0Zk/0rRtgBId4U8nOR2p9HiDAn5/X5CoRCLFs3hnXfeMb9NT0+noaGB2267ikMPPdRlODm9hW5y8mhU36pycT8iGBOXWrPQDdBgyhCs0ev2IEYBOILxUHropJ4NFS88C3Xr5wKVRMmmmqiD2DlB/4swfvxR/JEGRv3vf3zxxRdmZdO9e3dXGEIegqU6PRAUwdLpanXC6aaVDuv3+42rOyUlhX9p0svo0f/m7benACeQkZHB559/TuXhh6sp68YbSUElDcoi8I4lS2hqamLixHP1OapvsokyX4dQnCx+Z3ucaC+r7qyIKJDUrt69BwEjadAm2wAaTDWUAFaKpxDFXgdYxmNAGGqmQ80E8PuAShPT7WxIgNYK0UoTToeSUSmJAyGX9Lv8TWCh9lOEFfGS31YwknpmoGiZDz3wAA319bTq1LhpqEk9hhrgGhgLDOTGG2tcBoez7okQI1NSUsjMzARUeOKpp+7glFPeBIJkE8WH6merVtmqoFlZWTzzzN306tWLyZPfxK3HLmoB4u+A0//3P5qbm+l29NGm8ksVNlSoQpxiZldjV9kil7SAs89+HiHJNjTcv9VKNjd3f320QmxtoIjej5BCI4ixO2XKSTz//J9dMulHHHErb711K3OAU7VHr2vXrq7nVp7VWCxGNKruZ2fwagjG8qZZtNQyGOvdkLDrGOy6W3g3CYYPv5Hy8hu/1rFvu8093vzpTz7GjfsqbDIYNOiXqPu5mXffXU2/fifw6afPfa32fTUUoka7ciKOGlERpE7UANwcPrWIqCeiS1yoBQLEIe4ZGhadtBCbGkyFl7wEUX2pN65EUJa9rDqH8xhvUn7wwTQDl7z9NqAGZiEftYczA8BpTTtDFTJQCWse3OIxTot+0aJpRkxITey96f3++/x7//2NGoMfNS1lvv8+Pp+P9PR03n//OTZv3szj2uUcgQ6JTk4XY3p6eochFee5tE8xtFa9pGzBGpaTT62pjCIBArATbIBlVLKMel4CJkBCBQUyM8s6vK7fdcxLJvH5yjChkEZhsSewpOQscokaMmb7DADZuoYGrcMpphqIoJFcx3vvvhuamjjguuuMl2sC6i5omSYWEUY4FTfeGAHgppu6mXvoNBzBamtEIhEuueQvQJhh1DMZ5THzAyU/0WJKwBMoEyDKQaiBVIV6sh2cjRhqqDj9f/8zGR2HAGs//FCd+377GfpoHhBmGRUs05wjOdsEUM0A6mkGagmra5n7PcRrociugA7X2COLxwhETBrCZLPK1MObMeNGZs++A1DhwrfeuhWAv736qitzzFkfRgyMLVu2bDNz5ruKzz77jN++9RYPTZlCHKg1wnAFGH7L6J6O4oD667haeV84XBkkf1y40IwTXbt2dRl+0recIY+KigpXuFVef/7557z77ruAklLvKOV+y5Yt5vrHYjEmTXoCZSpVkE09caCVVKCaEf36mQITC4AGpvLJJw+bfcn4umnTJnPcK69czgMPjNuqbRUVFfTt29eci9N74ySjqmdc9RVnUngcqDfZOgXY4Ir40kNEjYGuFiOAS1Rv74bSlfq6+JbCKBGUC1Wi0sNRKyGJH8rApOKZNVh628uavVwEzPmaYZKvizxU69P0v2496JBH7xyurygr427NQdm1KEUNUrKcL6CexQRYZmz5IHb4l18MBxKspJKVVKCKjV13RtRV4KhzQekAWI6AE+k4s+nl/jkh/Ux9Lv4JuaYq5NcI1N5zDyST9LriCsqxDBHJs8hz/MpW6ZGjVbEjvPTznzMVe49KgHdQsmLy2PuxOV1RTUvO1vL27QM/kXb7DwMfabnyBG6DVP6FWEU1youiBuuAo/aOjYQP0F6iGFDPSOyKXJ5cZ56BXIN0ouTjp954lKZPfxaAN988c4fXp7MjGo2ydu1aKhCfm1yFIPg1KbcUW0kghC4UqPgUcn//Nn48V1UqU9cZLnEqcDqFC9euXWvCLk7Bq5aWFrN9enq6y2jZd1+R6paelMCO0XUM0H2uAmiilWb8NDCM+YTIpUEnAwTo3/8aAD7++PeucIkc9+ab+7uMBzF41q5da9rjDJ07jQ2fz0dl5VRKSm4EKl260soAzsPS+4tx14gKo8YL8SJVAxFjzHropJ6NbXkjvg6cK31nuCQtLc3lDXC6acV13dbW5tLml87sJDdlZma6sle2J00ilrdsL25xJ+ThlzYkEgnXQ+MkfznPpaOaAF9X1MvDV0dxcTFbdrzZVmjvcXNmIgkJeHfg888/32769HcFzn7/xRdfGG/P5s2bXa/lue3SpUunldhfRq524YsRFoOENpQXKC+lorjUYQsgxE3CdgIYW1LCokong2zXorxc8eZ+PHw46di6UfX0AeKimcvHaFFe8kAH6Rr0gvLtt680VXp3Jyorb2Tduh9y56RJgLp0iqdRCozQf52p62B1RLJQy4Ya/a/nbm9v50An5WwceeT3sOW35QGRVblYmhEwlDUdl0U6t/IT1JgSywrCUE5NTXWRhpxu6o618HFlpjgn8fYEJ/krg6F0WR/QBqx8cwUpuEMwKSkp/J/2ZFxXVkYYa/h0VJvA6SZ0Hr/9SkXO124Txy1AFUDq4Q7XK8+w64q6VUhG6+/ijkJ2nRPF2LolcqYx/b4O8NNALol2NFJxwMqWoCqm1tNo+B0SDDj9nnvYb7/9+PO0aWZ7Ga5K9Gtxiluvhs2Quf76LG6+WfWJ9vVHsnX11Sv0PgLYe5Sq99BP96cBaWkc4TA2jx0yhBLdTjkv8c1UApcff7zJplqB9a8sx3p0hI4Y08eqR7JNhutzkDCI2msf7YAGeT4nY3VlJFAVwfp+YvpvDlCqywiqz/73v3PMuciqctOmTQQCAU45ZT5Kr0PLnhWl8/I9swAVdpFn6cEHH+RCXV33u4xrr72Wf/zjH0AWURdbKIGimQNVQmQGEf9S38VYpT1MqzSfIzs72xUuSUtLM2Nfc3MzGzaoIgWtra3mWiWTSTZpbZ7s7GwOPfRQ0wqnwSc8uT9WVPBuaSnVupX1NHLPPb8hEAiQlpbGh7/5DclkkssuO5fLL5+HJb0GmDjxOT755Mqt9p9MJs1xX3nlFS64U+tkfFjNXXe9aNosPLIePXoYw9I5Fre1tRmPzebNm/nJwoXm87a2NiZNeh9lABVgvRc2oLy1pyPCtdf+Fg/QaXU2FJzu6SCWgR3U30f0NjHHZ6AGut23ghk06JfGit8Z9F2yhE8//RTf9deTshMr0jrUoH7a8OHGzb5SD+YrVizo8DendO9uuPv/66DWgcVy7IQAlhqaRUQ72cXYcHYbCToUguF3fDWa2HcFRVhWuWTdSBZKI1JFM0SDIc2CNTKcoQllgLingpN05VaAFwyJNEAFSmK6FDslx0x7ivReZT0a5oYbmrj5ZnefOflXv2KQEtpk4wZlDMT0vhKo6iUpo0Zt88zzULwhZ8AmgZqiYsAysqklizcJOLZGX68wDYSwBdLkc+FXTUOtDHNQ13MJ0Eytg2xrXdTFWLXV5XpbOXcxAiVo40etInc0mC3Q26v+/c+7ltHZqy2oSVPIGBLik8AXYKZ1WUDUoK6nkDgUcqmltFcvKhxhZWfFXsBF+hbvUFNTkynqd9FFrzFjxgyArRZosrDp0qWLKcOm7mYBl1/+W/7wh6u34sEN4FnW0Acr++13GUJOY0YWUccddxxX32Gvz9VXbwDu4+abr2JhWRkD33iDHj16bFPoTNC9e3dTiK2pqYnS0pUoQ7VIp/k3o56IiP6FkG+zEGP65ZfPab/bvRid1LNhPRbOSHIETOVCwK/dhwk1KDWQrTX2bAeZu3Rph5UMU1NTTRrih6NGEUElmL2uC6n10ZOMPKphoIGDUINqAcOHn8ZIVvJoebmLqOl8LQ9KRkYGffr0ISMjw1WNEOjwt3FUdy5GDfPKF6EG6uvKFCmzGkmbDepzVaqW2dRysq6H8HR9vWt1kpKSQn39a+TnX4/tFHHEVyGJh5Wg46dqsKmglmqsiVKsj9o5HdIKyeTP8flexD15yRUQf0OO6YHOdaOTuxDEqmrIWjwdjIDatGkXogo2TQCCNOhJcDYPUoi6wuIhsUqlesXKAqCc9PRzXLLbgwZiO+YG+0vxMrSNHElqu34m8Pl8zPzoI/4xZIhJdUWfX7G5GlGWkYfyPkzGmpUyicnEJ3lMhVhjYzL4B+mw9iCI5yEmWtSVvqmvcU4xBIuhRgwaSaeWZziG+0pncfDBz+htinnmGaXDUFxcrMnZymCcN09YUXlmsmlubjYToqeN4MHDrkYn82z84Q9/0K++hxp0KxhAPWHqibIEW8gCQA9qjX5E0KYeyKaeNxYvBqB3794ucpPEuuvr6+mpV3+DcCaeFgEF1JKglhD5mviWh4or3nVXEVdfXQXUKJplOEx+vpDhrPWdlpbmMm7S09O34k20zzCQ3/55zhwenDbNEP7q7rqL/YCFV19t1n0rycW6+yJICfMo6Sad6778fC7R9QKamprMuX/22U3U1qrBOBaLMWXKTN56S7manVLCkyYpAaoGinmTSsJaa7UYI/bNBn2/rr32WjofxIMhM3cE61fKM1tJmKIjxFHG2XzDYgcIc6YhjQ1E3afJOHN95rOAIlYSQMziGpR+QTpS2grmcBANrP3Zz+h7++026ygOSd0RqlD3IoLNpREjw5ml5Az73T54MKOx6zQwqgHGTxhmDbVmK0EdS5aczujRR+mjFWG9E0G7WSKmZN5zfMrwSIzB5t3IFdXepGJ9iegGNaXYoE0FYgRboqEk5opXLo+TTqpDPb3letti3n77EjpCeno68bgy7TpT6qsaR8TAkzsF1ryVBVlEf1dBLstMyQQQBQ6F9iE5HJ9LWKq1tdV4NgoLCzn3XCWe+PHHvzcZPZFIxFzP1tZWjp+gwmIFukW1jAQO1UeuJB6Pk5KSYvhs6enp+k7WagUldU5FRUofJJcGh4iWeMBGo54nOfeQeV17ww0cAGw+5BB87ZICOsrQS01NNXWh9t33nyjV2gKd2g/0SIcNpbiXnOIJtlw+D4JO69koBxYwmJUMR3Wp+SxmxYqb2bJFSQ5v2TLfwWE4Djhup/fes6yMnrJgy4PRS+Rh9GNDNWHDgc9jDTItPPDA/lx0kX+3ruynP/IIJeecQx5q+tugDY35RgTsUNSD58eWImoE4tRoYyO01V47xltvPdDh54sX/xqAMWMuAwKsJJsaokQ1Wa0IyPvRj4DOamzEsNlO4Cz4JZ61PNTQIvlPMmBbvr94mI7FiixVO45Rgg1CpWMn5RIWspI8pFdVoybNPISjNED7QcYAo3/2M165/Xb107C7xa+ifCCNwP+WLeOf50vdh45RoM9pn/feI3yAksISxoR4WYJArTEQlKk1d+5BACxZ8i9uHD2axTRQb0It0tuWoJ7dADRKvo0sBmTQFrMoaC9oBNy1WMLk672vMWnB41FhGs318AcgsV63Lw93KGHPgQptyDWMY8NJcq5h3H2u0oTpgvoTCXvKVNC+Pol85jRUhaR/8MEHA2qhEo1GaW5u5sADq7F8OjGEjgfQ8gRjgDOhqCfUJIHZ+P0rSCQShm+Wk5PDK+RiFV78QB19tGJnENUtokZGX463BGtwgup7WSygwWj+jnYYFU6OnZMD5y4rPxr1fJZDIgKJPGAEFPggJNe5EKfW6EMP9ey0WkO7B50sG8Xe/CcZzOsci+qG5UA1tZSVnUllpUp9c7pCU1JSjKcgMzPTJdIlrlOnNsUWsONjxDlMNWJdxFnm0wRrgARXX12D6nDDGfHkBZSUlJgYZFNT0zbJpS0tLeYhk8997R4I8TyUlJSwfv16Pn3ySRpPPZUNV1/NAmT1fKj+xXjdDhnERT642SjeVbDK1oJJTzfXIS0tjeLiYnVeiYSpN+GUGQa7Mv744yc5dN99tXLkWKCUKM06CXbH6ZnfVSSTl+jqr9X6E+EGKQMuX1eZlNojYH0X1SiH/xrDYC/BiizlcemlC0DX3LHCVM4QiTpmBDU4DqBWJ8vKESy9sllvI/eyOWGNgmrgWXKBUwC/SznRKTwnz8ZtgwZRBIxftYpNmzaxXO9Hpvk61B1VygLCC5AH5UDKtdhcKULGrsadHFyNFZcKo9wW4qGQbeS5yrKCrY3VKI+OalG2Vi9R/6IsogLV50tgdEAuM4R6Qvl0/SYGBJk4MclrrylCo4QuYccZYt9lPPTQrVxwwS/JpxY/a3Rg03rScnUyvYwETtMO7Hq8gC+Pgd/bF3rsC8CICUB8Peo+C59O/Cbi/8sDpsHBPdVjscAHHxah6MYWp5zSF7gT1etCiBElDKoEaAHH4brlwhWqxp2kHUYthg5iJUGgjtt3cE4FBa+guEWy4hyOMmZmo/phEOJHQlyK4IF4z+0z/b8dHGVvQwqdSmdD3HKH8TqHorpADNW9gkAtIUpKlLbD+vWzOoxLA67QiTMXXCbcCmC5fhoTjZYdYof4AHb4lWFSBsmBPP74ObS0tOD3+42AjTNPHexg3752iXwubQK3UZSVlWVef/SHP/Dra/+MdVmLRS9sf2mT05WtHtdqVrkMjI7SKNPT0ztMB3ykRw/jqIyjggAVwCLqsOt8uOWWn2z1286FCJYgGsS66qupRylGOKfSIPbsq4A1xPXvI459ylAvg3EEdfUSiLHRh2WG796M5nkQpYpVujaJUhV9hQqyiJKO7dPVuCmtcv/Ly4/vMHtKmPZ3lpVRABxfXs6WLVtYvHgxr+htY1iW1CqXAWVdxvF43Ih9DZo5k7rzz9eZOMWOKyTskSDGoCCOpdOCXf3UwMKg3n6xvjbKeBWWhuwlRpSV1KhtnTfDD4S7QagE6+b2c8QRNn/o9dfVmJKamtrpRL0A1qxZo1/FTSCukHoiuq5HEHuVnfdRrjrY7LIRdXVbhXOdhSeDwSAAN9zQnXfe0STjU7CWSyUwuyckQqh7VoPN0JCNCtS/JSNUR61SbQ8Gg2zatImUlBQ1/oyfCDUTIfQB8DzKY2E9BdbQmIZ6JiuxmYliblfqNhSgvBMjkHCds1ClwOfzUVDwa73PUsjSBlI8pvezgAG8SRFQw7OsMUUy0fufjErR3cArr6zZav97NzqZZ0NwMZatvxgbRe9DPbXagu7Z8xAgxGAaeFunbO0snA7zamzan7pY5cBc+lDPCL1NM+gIu3KlnXnmAh5/PJdvBjfoFsjEBbaCSQFWj1KsbrUSrTXpiDuPH/foYWrkyiooDsbEyKOWaq0LWf8V9v/dhDwgOajBSkzcYtbwKrDMrNkckTdOv+ce8i6/nAUsokHLninI1mGyWUOUcsQAEWErcRrLwl6mxijZ2Em5FCjiWeYQp4GBP/85PX/zG1787W/J/+lPAalrs4p/Lzhsu2f417IyRgPTVq1yVU494Fe/AuCmm+6hDw08NW8ekyZdgXKDi+O9Y1fx6bNn0zh9OnOZrz1e6sr8+c/HcumlFfqsnIEZ2Y+sUMX9HwAxJBwyaRI4CCJmdDVQAZVD7S5lsRnKQfVSSZ1VvpmXX84Gttaw6Uz4zW9+A8Bttz1Mo+4/6djnM4A6a+lL6rmUPq2u/2Ci3KyLmTlTQZ3FHVNSUkzFaAgwblwfOLjQrTbgR2t5LGCwNpjRiclyxBBrmE9CkYPLh6N66XJ++MNW4GDgScjIhv3kB0EgRB8WMQLbSxYBYlrZvihmqDPcGcL6bvx88MFZLg+yc7GnPgtjQn1xeaJDCE+oCGWKlAA1rOI/mrf3OuX6XBqJRlPNffEg6GScjV/+8pcAvKhrSOws2rtHnaEKCak465vsCjQ2NrpkzDdv3uzav7Rpy5YtrowYZ1udkrpO2WBVBOnri5s5tUTa63E4j/9VIfdrb8Q3IT7UHq2trRQVFblc5KD0D9ra2mhubjZ9Xwh+TiJyc3MzH3/8MaC0KdqTlNu/d+Kjjz4y/ebzzz/f5vk7jZlvG926dTPXI5FIdPKiWcWs0ZTifBqMsRFBOQ+UR0wYOcrjWV19FfDln/Mnn0zn1FMTltcrHW4OqLy92YbyLGwkQTVQwSJdO0f8LItRE3WNet/UCi/HUCbSAmAxk1Hm9RKzJ/HaykJLAkFBrIS4eHTFm1gNZom4LdQg8unKY4JuRyUQN8Z/MW696kLW8Cq/o54K4LIdHGNvRCfLRhGMAQr9UJewfGN5jBo1AVKtpkpYRTXD8vJYqlNZnQNma2srn3/+OaeUlbGSPuRTy5HADZ98wn39+wOqG1cA9QwG/OSzksnYQsrgrG/ajDC/L798NtOnT3cx/cV151R4dBORLFJSUoxb10li2rx5M/defjk/vOsuzj5bkmCr+eSTQ10iNykpKfTrN0ZflWLcrmsAv2tyEHTv3t2lRiltvqiw0PDds7B0R4nAB1CDQRg1uC03ugmdF8nkr/D5rtbvioHxUDRIL5hUCGENjZogbOuQAuzbowfjUX3zTV0DxEINkiraHDVp1HX62xrg0YULGT/+AuzaVPhCQb1VELnqi5lPGDihSxdisZjpj2HgnQ8+ANwVPJ24YdQoxgMTly+noqLChEKcIZff/OZH5OfnU1FRwW23nch116VhlTjUVHL11QtIJi2Z+Pe//z2T77iDgh//mFf1OvSK3/2Oa6/dgs0YqMMSmJV7O18n1q6kHFt/OYEtyKZqqtRSRy61DsqnJgluqFZvK4uxDFu5KwnmzGklI0OlxDrDrJs2beoUQl7bwh/+cCLXXrsIRVxXIxZIuGECkpavEOT994eYRYuTEwa4QqfOUKtkZ6hFThhq8tRzkBAjbS4wm5FEjR81jptcHDF7DmPDHOVY//EGFGvuZlT/qiabZYYXZYntzlCe8EKkSo6EUxbThzXUkosE27OyslxjsbMP9Op1P6DCmLCsA+9vHivJ5T80CIXZYLhuwdHzfsq7777L4YcfjgcnOplnQxAE1a82WCGlIFIiS8HW4/Trmgwd43xtaEAhCWrN78/Sg3Rs6FDtflSuucWffsrD/fpxQnW1GbwPbWvj8o0bGTXqv9jy483sLMaMuRqoYtSogTvcFtRjevXVC1EOvUrUgzRmq+0+/XQx/frdi30QY7S3Li/u25dXGACcCRQQCn1/m8cVowJwcTYi+rU4NYM7dRadA8nkXQBKdyNrkBpV4mgeQClQTlgbGxLIOvBPf+KJo492pK76cYcM1JXMQ3mhi1F3UIyNVZqXoTRAE1i13AjuVFyV+hnHOo5vd0yY24uQnnHggYDN0/gyuOuuNK6+WszM7WPwH/5Atc5G+slPXkb1MwlrlCPPSx89Ocr0EaeBNWbZHES5yoVkGwMqaKDKEJ6tqqOeaKoKIJyuy9vLNnnAt1sLaXfhmmuuYdiw1zn88L8CAYc+Sx7q2knwMwEUsGnT58aLOnHicuBmDtL34Bm9MPP7/a4Ck06SvfJgzIWEBPxAlHhkjBDKs9P5AdBAH6z4mN/xWtgjjSiehpqcpLdUAou2yrgLYr01kpUyB4CprGQCsIAG3qSStWtf3upcfD6fgyd3JBCmlgoGEDVhTTnDWkWf5nUaeZ1KclllGBuS3ROPx7nmmmvw0B6d1LPx5qxZfP/qowlusNQjWSPZhywCxHS562xD1OzatatZtdXU1LDIuBdDRoZZCVypB68UGdZ1Xnc0ygnaEHFC7T8HS88rZODAX7Fq1Q2AClmINd3U1OTq7OoILaxZutR87sxe8fv95rdbtmzRlTQjqIcqAUTo3/9xampeN+2xBYDGQ49i/QwHzfe5NLB582a9Mowgq4yCgr8SCr0A+jrIdcvBMj/ScepYSqUFWw8xD/jBrFlbXaPOjGTyOHxjsJmc/oCucDtcT4qQo5OhJ3fpwmz9O6UDkMDqYxQiE6hMgVlIlVjBcMaPb0AGP7taC+NmE1nORAyleRABGvSKLJuoqw+Jd+Pj997jzWVKqj+RSBAKhaiurqaurs6sdtPS0oyH67rrpIiWxVVXobN1IvqTMD5fL5JJNZn/SKc933HHHQ5Whx+Ja4sKSL5O8RWIp7LWTEiyf6exAXZFG8bG69vtqRFkZPj3v9OAGPF4k1nN+v1+E9ap366yrgcPHr4WfOwSS+EbNzY++eQTiCjfgazZK1ADVRQhMYUYQJQs4KV5r7q4GZL/PH78r8GQ98oZiB3O+h9yCAAfosasGGpo++XQoVz+xhv069fPDFobNmxg4sRmLNs5DMR4++1LzOqhfbllZ2ru3Ll/5sorryQ1NbXD7f1+v/lc1SHIQ4nzqJS2Bi15XVR0KQBVVX9k6NCfoiz/sxxjtl0DF+h9jUclxJbzd8OnO7tAuVsrgLdr1BQYxBobCaxRJ0lpMg3G9Latn3zCHocgdolWBFSNwVlOehlzeOQRJWSmwniy8g+jXMzgXNFVoiLSEvpeZVZtQSDCBx80MnRoCAkzjKTWIVtXa0h/EeCy22+npqZGEyaVodkMzCgr49nyclMB0+fzsa/WzwAlHz148GAA+vXrxwfakF6zZs0OOTfJ5JXb/R7gxz/+MT/5iSQbBrHk5RqghgLUc1Wtt1DhyhJsyqxc8HSsz0zJuysIATDP8RuwCqMh5s3rbTgqLS0tfPGF0uJJTU01dT06cwhFsHr1aqRHSIg3anJQnCG4dCZMHu/4ZTVQqoibZFFYeBJQQH39PcbgdFZ0VXVOZmKp+UKkVD4AMZ7LgVcYhvLAxh3HKkSFdoqwukV5WP/FFvpob6FI61UhI5f4EgrUb7N6ahs2iR2JlHdruP5XDUC8w3NxFrNU+54GRFhjlHLAGrTFut0lQIwGQsxnAQDzmc0999zE6tWrOeyw7ROy90rsGsfGt1NcYMFLb+OfONEIIstkl+sQfZGMZycKCq7CqhUGsXG+uJkGsouKDBFpAWqaUASrMDnU89dDDuGmKqshceCBc1E5YBEsYWn3aUzcc89JANx6+eV6KA6iDJ3JAAwcuArlrk4Ay6GmEltnscL8AqzG46GO/S/Xfxc6Pjt53jyampoITpvmuqZC04pgE4OnvP0277333lc+v+8qkv8B3wRsYg8BqBqPDYDEHVvLRmrCs/0hYraIY9Vp611pdHHgJYYOfQlhxx+kRbyEKyNVUsqx6hzyufOZ/nO589tvCwnHXzGeqhhJlGIsdU9Brpvwi9Kx2jZ6UhvYDeJjdJbJEtQ1DWLlxwRB1BXbtKtP6DuJyy67jMsuu4y+Pp+jD0i4S4yxCBBWSq6uJUMAdX3leodpbW01i55EImG4HNnZ2cyaNZMnjj6aSqKOcpYDgXSzpFHm9RXADKzfbrE+1mTIKdSRwFLUSLsEFeZqI0GL8aLKiBo254N5Z+zOap+W508gHq6FrCKOPB8lxsBobm42lbRbWlpIJBIMHVqH1c0txc4qYJ9vobwKa82Zlxfgsss8Yug2sWsoG9+8sXH55Zfz6quvGsmgAmxndLqmY6gunpeXR1HRXUj8O5+VgOrnUf2LbKJm8FYZ1QqzgSjHo3p1iGUsIcwapgwcqCV3Ac5HWdMqLfa3vx3HEz99hLy8PJd+hTOlTB5cWXGK4JKq4WD1DwTCD8nLyyMej/PTn/6XmTNncv75/0I9IAOxK4xi7OM+F6vkl4U89FnAy6NGmXWFPDLV2BBJlD4mrPOPSZM4FFtiSx55GaKa9fUMoKSKL7/8cvZEJBdog0Pg76YHS4Aqh8cqD2sa2MDTAK2uiv5E6VaMAZeAdDm5vMBo4DdLlrB582bemTTJ9axKX//pO+/w3nvvEQqJdyWbwdqzkYUlejqJcCkpKabIFFgvm9/vZ/RoVcdkV2Zm3X//HQD88Ie/Q12XOH20oVHabttXqMR6KIJYoyOIMaPEFpk7CDZYz5KbJxUEhitZdOabSTMtLc2EBjdu3MjJJ5+8i87yu4PH33iDQw45Rb8TE1WedDH2nEyfav1XpvcIbtXRjhFA7khEfxIH/MbDGeUw4EgY3Q1C3dQmIc2t8BdaV2nNUCgv1m/eBnzUM4CEluEK6r3ngC4HCWrRVAPxnrZrbMgBk5wPi4iwiDBQQFXVH7d7Lh98UMjQoY9hJfDNigK7bA2gfC0V+q/VFHrjjT2vH+1S7BqZjW/Hs/HBBx8QwubZCz1IGBPSZaIMZsiQG5BBDqyBVYoqtBxG5Z8vJmpElCTC3sD3Ua41YQZOo9YEbSRGHEAZGq/ym99M4YADDuCAOXNIJBJmkGtpaXExoJ2Mb3FvO9FesdO5wjjggAOYM+cA3n33Xe655xAuv/wLx1k7EQEWMFV7e8ThLFAsDfXIBPVnwgkXFBYeTjYrGc/W63awj6MMU8nf/Y4PPviAI488kj0WEaxMRMJOcHfckefIdsrCroQsSTHMKgZiS4dZvoGQ9wASFKEYG36/n9TUVKYtW0Zo5Ejjkztk4ULeeecdFi5cSCQSIS0tjZtu2gzM4Nnyq7ZqsvS5lJQUl4KtM+MomUyaiXjo0KFf5wq5cNFFFwHKCM3OzubSS+8z7nHJp7IBp3qtCCrkQXkihRUEhAepC1gMbBiOeiMrdcfTvZ+PeQ/Od6WUt7W1mdDJnmhoALzzzju88cZTABxySBXK6xnEXetHgnBgA6B5tPd1y3VLTU01fTstLY2cnBzO+d//uODgg2nvbVAGdABjSlpnA4SKgZB6buLpNgKWE4DGQpT0eReglAYS+FllzM0AkNCh43oxmsJjrNMm1g02jMAl/6wXV2lpaab97QUWBbn8QheZnIwKQRfrb+qwuWbinQsDAd54Y///b+/c46Kq8///HHFQQXLkJ2BiShR5TdRQy9S02s0uoubdrazUzUveMq9lautqmpatqWXu/qpNxTLzmq65XtJUEkEJ8YKxUEoihoPApA4w3z8+53POGQQB5SL6eT4e82AuZz7nM8OZc96f9+X11r/zLl26XDWmQsNCqXTnrLAezfXmzcM5caJ+Kj+BD+6iuzLOZsM4qT/FaW0bB9sIQR6aoaRiY5ue9S9XpWFAe6jXUizhZcKz8yBGFrY3kMCiRd1MiZnlx6JFdzBqVDSGySC/A3HCtiIz/N1bFB1BlMb5aqttG+7rHcNkM7pagOHNAONCYQU6rV3LqVNlFz66WXDFg6Ul2peZQMEhM7P72on8VjNpRTwxBCHXgzLJUXqfALL58Icf9JG8WrXCERPDndHRBJh0Ia7Gj7VrH7yRj1YuLF48HG9vb/z9/Vn31FMEYlyLxN9sDP/ZKYTBEYRuSBwLBIe3+HrrWOC89G2a8hLqeLJn2d7y+Dg3HVOmTGH58uXao+7a32zc1XA9TY/BqAqRv2abWxfqqlWr6sec1WrVNX5OEGwaQ2ZsSS+KOC9yxA+8PE3baJlGRxrCEZu4r9etZGKI8PuRSgp+ZBbigfd0F6UNRPNuSMTv6ujRPVitVn3+5vb0VapU0Rdyu3/6ifvv741YXPbWvGJAxr0Y+SBJGEHjDH7+WRhnBSVRK0xUZs/GhAkTAPhs4sSK2H2BZGdn43K5CA4O1h8X1PnP3DXR3OnQYrHoz+fXRZDj5OXl6eNHRkaakpsqngsXLuj/l9sVs17BjSBjynI15uXlRWpqKjFaFUlaWpoe6jAnG9erZxjb8tiS8zKv8M3vMR9b5s7Epc0kTdlUMnPmzFLfhxnpsbHb7XrVicPh4OWXXy7T/d5MbNz4I926dUFcJNMwSonNSwYZlLMilyQnT64t1vhr1syjd+/F2qNkjGTUWhhl2w5wSCMgAyNJ2IqspjMCtOcBGcITOU/JxOBAmCCpehL1vYAfOF2QYhFvz7iCqNATpa++7HSrXyoO0dHCI9R6ocnTbNfCQFEubWzxOTdufIyzZ2/NUupSp6icjWK2JaowzwbAIJeLSRaL5vqXDjeJOdmsHkZsPAmAdJYSyRbt9TAMF2ISxlrLBoQIY/ch7eEpYGUbOGZc6OfM+S8ul8joHxwczCenTukua3Dvmpgfs2iXJH8LcDmO+QLyxhsO5swRSo3z5jVh4kSZ1mrDcJv6kUyqLuIrvRaZekJiPdKxk84pjEQouQoRmRiiwkd+r07AgV3LC0jAkEd+/zY6iQvv1knEyedbAFyux7l06RIvvTQNEQSReRtmP3IIp3FyWg9qeWE0NJPHk4PuDzzAnzCcafZGjbBNnKgbIRaLxc1gEIJL8dxxx9P6Ci4vL08P1xVmvAJuqzyJNHrLkunTp4Mmif6mNjfxOw7C6OuZgvgGZC4BQBIk1xPbWC1IT8iOHee4fFlUMaSnp9O588Ay/ww3K0O07r6fffYZX375LX372jBCvzIoIXFgGCN2wDCazWEUabxZrVZd4Ev0S5EGrlz9g3tVkDxLgCHgloyPFt6VJkcmwcAfiDCK9ApCJk49t85oe+it7WsfZDgwegxtobkmIhcKzNKq4szyAVar1e3473jffaTShWPHlujPR4+N0bdve6iNFo2xQJTMJ/Lm999/179nRREUVY1SzBzuCjU2AMK+/JJ5fadhdHcE9F4lMkDQFGq1EeG4NC0evX8Q7q2JpJphmmn0JMAOft6GaKIDcT48Jn5w8+bNR573n3/+wDUkxEqb9cBf9Edz5zYGYNKkJIwTdggnsOPDaTIJwDgxhODmmtaNMrMbMhkj00N+cBEXF1ofkK6FCd75sjK2kb9+XFfAYtkAfEt9dmrPPq799UQXUqpVGzLkilIiQyvyuRR8taZuYAT85DptryY6dy0f3tSpy6nMLdRnaRe12gsWEBAgfkHPP7/HtIW2kgWMWLwdnMKPvmtXMwpwIt72DBo0iI0bN7JhwyXCw2shvscQESLIkJ4NL2Tfp19/nayHFcyLJXPPlNzcXN3o9fPz49//7grA88/vpz7pOEnXcipkSMYI1vuQSBhimeONIZgVD2zTPS8yIdichi59G/L8fhApIS6NpABO8DiyJk8cLXKeubm5BXb4DalbV0tkHUOT/o3hBTj+9Al3Y1tmwtrkd+XNhg2t6datW7H+B5WF9evXM23aNL0T9MKFC+nQoUOh24eHh5OYmEhcXFzRg1fWapT89OnTB5erjyYyJFPovDB03RB/pQmtn/e9GD7cg6VLZRwxGzhFczKRcr9xpAFBkKRpCqQZm7pcYjU2Z06OaZXZlFQakZube1VFifmHKz0Xcpv8CUvmH4d5BWt+HEwci6dM4ZW//Q2Xy6XHDWVkyWL5AWkoGFnc5gtSEkYmh4x32zCOCpHGN3t2d6ZO3YcwxLIBbz79VOzrwoULjB07ltsRl+t1/p9lgr7Wkl1+lyx5nREj6qAfc7qrGMTBk0yA3snCOFKD5DgYpp34rz0FNGTevN1Mn/6Qtm+X28kTYNWqsW7P5+TkuG3jdDrdqqPMfyUyfCJly8ub8eMNo/W5557T71ssKzGWRqcQVVb7+Oc/J/LyyyPKc4oKxS3JY489Rnh4OBaLhdjYWPr27cvx48cL3Hbt2rUl6/9UlGejmG2JKtzYkJhFhiyWwxjq/GmAF660/O9oBixhyRL5ngfx1UrypM0fQirfsBTXmnwCRqbw85QpU9i4cSPh4ZeQKy55EpeuuMLCIi6XS8+ONj9vdnnLbcC9HfxD2t5CQ0MLtLJdroeBh4GrxZcslta4XNFXPV8YKv+pYOZ8/DH/0qot3hk3jjf+8Q/tFS3p87wN98bvyTQnkfYIg0LmzWtBAf2dMoJtdM8VybrvaXkOr8+cmc8ItXPPPffoRi64V5oUlDsE7r158vLyqFq1Kk82asTcr74q6VdRprhc+UMigypkHpURo516WwgNlHIYcEY7xxxpBhmi1qNq1ar6cSWPB3A3bs1VdjabzZQn5KWJoUMbMvEiUz/7pulbCJ9qe4x+VjK/8wiZpANOPDA0U+QvwWyCAxwhgFRdJ0hWNT2OCIgDZCYn6+dd85xdLhfj/P0BEWrZK2ftJXbVeGcj8XOVk5bO7yQAB2vX3lNgi/rKjtl4yM7OLrRBX1ZWFu+99x7Lli2jb9++xRvcg8qbIFoULldL06OgYr7nwA3udTeQRk/KR/q4PZgUJUtGSQwNxbUZ8sknALw2dKjp2fUIU7AhhpolQDZBGF0dZBGdHaPaZx+yfwRoikXI1usFeSJXzpqlBbVKhy0nThAbG1uKIyoqEqkKS52eEI6hAmBOw4oPgowr3HnnTxhNCLYTbGrmHkl90tJi3Co7atSoQWCgOAt99dV0+vR5FT9S9W4sdoQ4oPQ3g5HFZMOQLbBhqFg4ceHugrZhmBTyuSQgFT8MaS2pR3pOW4371qjhVkFjNqql6fodsFeWtUrJFpnCIiedBpyXCqXxHD0q8oJ69uxZyDdeefnmm2+YMmUK586dY/PmzQVuM23aNMaPH6/n7RSLUgqjFJz1eJuRlZXFypUPc+jQJOZoB3v+lvJyFSn7Tnh4eFyVNCqflyJfZm9H/jEfPHSIlitX6roBivJHduYFcdJbNno0LpeLRmxDSDqvwf1U63fVSk/m7G/Cl034ks6fMWSRQfYRgTQ9uv7Z9On68WEF1h07ho+Pj368mKtOzDezR03eZChPxuhdLleFhVEUZUgIxpVZ5oiab3qOm0ha9iGSMNBzLOpzGj+/GUXsJIg4fPRUUVl7kkkH7dZFb+hgxVCgSdHuC/9bDkbCtDQnQnEXKAshlWDOYLStMGfsFRfhb9RMFSkmnYCo9T92TtzO/4CQd9zNgQP3lHAPlYuePXty/Phx1q1bx7Rp0656/fDhw/z8888lN7RkGKWwWzG5KT0b5c2AAUKxb8OGDbog0uXLl90y/c2GRf4wSf7nzW5LMy6XS3dtnj59Wt+vomIYNWoUS5cuBWDwsmXE//WvnBwzhjFLlvDZiBEmgSrpg2pKJHbgNPdiNGCLxAejM08gMgnSV+sRka71/JFOZCdG9UgKuIVLzKE4eTzl/2vOB5LbV61alfR0USGgsuxvHWSPm/vvX0+Pi93du1eC6E9w/gxGwqUIXWTiQ5pWdWZHXpgNTwGI8ISoSBGNAD/9dDgvvjiHfZpYYjZwglbAk9rOMjjBEbK1ShR5/GsFspqx4UD492wYMvUyw0ma6Q7AjxjSsJGpy7o137ePYG0+5tCJmX0BAbr5/x0AvaGJv7BhpLGRfA5DRzoZyGbPnk6cPXu2yJ5BlYnFixfzieaZ/fbbb/WQWKdOnUhMTOT8+fPUqVNH337//v1ERUURFBRETk4O586do3PnzuzatevaO7pVEkQVipuFYD1nA1766CMih72POFGahdYgkgRNShmMEtkg00h2zE2tZLlduv6LNfRcM/WQy43xSrNmzNu3r+gNFZWSnTt3svvZ2jwS30lcULdrL2R8i7iwSn+DONYglJ3aZTmAdK2aLQ1///lAAufOfYyHh4eeDxQYGMilS5fYsWMpjz46gHhStfe00W4gjuuDHGGnLmy3G1FbIroVO7ByhWBOaI01pV6HDO3IRHeZ6dGUnaQxbcdyfQ5yPvkl9/Py8hgVEEAIRsjyNH8GuguRVW9t+GNXtO9Ddoc6w+7dfyUvL4+dO3fSvXt3bhVGjhzJyJEjATh16pTugY+Ojuby5cu6eJtk+PDhDB8+HICkpCSeeeaZog0NqNyN2G5W4uLiqF1b9AEwCyzlr0wpysuRP6FPejOqVaum98GIi4sjPDy8lD+BoqRIie8aNWro/z+n01mymOZ1YD6GzMJc8r5ZmKswjRez9wPgzJkzBW6nuIXYjggTZOzSnliPEeozaxOBrKKykk59UjnNQeAIwSQCHxe6ixMnvqdXo0akkEq6nuAJwqh+im9IIV43KLqArgQUixMHTswJmNLYSMCXE4AI9Ajh8iBExmvB+Ps31j+LLycI0z6pYVK3FK9nI+Ixp0CYPtuRreR8iAP+Wug+bhW+/vprPv/8c6xWKzVq1GD16tX6uaFly5YcPnz4+gdXno3SZ+rUqbz99tsAtGvXjrvvvhuAS5cu6Vne5vp1l8vFlStX9HJYcya42UKX4kynTp0iMlIkbb311lvl98EUhfLaa68BsHDhQn01denSJe2HGoIRJJfIE3lTjGizDcP0FyJVUI/TOAggE19S9VCKxMvLi/HjZ3D06AG3ChRzuWthDdXMx6I8thbFxLB37+0p8X07sHDhQt5++21mttjF9K/7Y6zvpUdDikmYFTBEb5E2yDaOp3EiAhsf+PszNi3Nzbht0KABII7Dz6Ojmdy6NdtIwAh/NEXq0ZwgCXPjNBG+SQUcJNJMe06KG4H5aiWOWOHdS0iYoV8UzQa2h4cHwaTjR7re/8mGUW4OkE4SsBt2/0l87JQzCI/GPhppDTv/9cMPbN++Xf8Ob1UmTZp0lcqvpCBDIygoqHgaG1C55coVisqBbPouwx7mdLgg7ea+ijROh0FABn5aOXaadmoEePvjj8nKyiKY0unF07jx98TEtCl6Q0WlRi5QqlWby+TJ6aZXpKFrRxgZIm25i3Z8hWHUh8iOSSnAYj8/Rpw7B7i3oc/Ly6N69eosjI+nadM/Y1SRSMM6G2Gy+CFylWzayKsQv43XMWTNpaCXUx/lChAfLwxjq9Xq5rmTBoefXzDNtb0EmfYmy3MBVvElqVghIxsyghB+j8NACh/tFGJ9e/bsUQu7G6WyN2K7WZEH5syZM7nzzjsB4a6WyUr5hb4Kqjoxb+9yufTeDgcOHBAyz4qbDrO42fz583E4pMqAXCmCUbBvUodzk+YGcXKVazErTRGqiOb1ocViYd748VeFQc1icOaqFDP5j7PGjY+RkNCFffv28eqrr5b4cysUCsU1UZ6NsmX69OnMnj0bwC23wtzfxOVyXVUiK7eRWCwWNm7cqI+puPl5/fXXAeGatFj6YUh2ifhzc1KxkUo2MbrmgFxt2YFt+CLczTZd29UJJGoiX3l5eaQSzJYti8nJyXGrMjFXmrRoMZDo6M91sSZzrwsAT09PIiIuc+jQIV544YWy+joUNxmTJk2iSpV3AZg48TBSxcqXRP2aEIIh/W1DBF2OII5D6a/LBrdqJnles1gsupcjKmodYWFS1yfNNKLWValWbc255w2nPSCvBlxpjyFLbxjmcm7roqLw9PTk3nuHkpz8qVsYRc6nneaVEQ0WjIBNIEZjgRBgGitIx1t7ZAccbN36GQcPHtS/K8UNonI2Kp5WrVYRFVVMFTZFpWTOnFY0adIEgB493qQ5qTyCOLXJk6cNwxyJB+JJ57QWP99PJDZkCp9Zbr44PS2ttG49mtjYJTf2IRS3HLJDc05ODo0bN2bKs8+6HZc2jGMyCZE2eYLm2jPJBGut3zMzxUXdx8fHrQ2D9Mz6+PggMlJBXPobIi75pkRUJ8bFSM9Xlg3W7Npjbz0Y+XRYGKkEACE0bDiUlJRP9c+1XeutE67t1Y6h1WUDvEPBW8spHf612Ob/sw8Z6lm79nmio6NV2/jSxANVjVLWTJ06FYA5c+bQrl07QJRnyQS9o0cH6UJeYMQbPTw89MqAyMhIfRxF5WPy5Mm6h2vJkhHUqlUL51/+okvig5GeJ++L054NgEQCiCKVM0BEhBinf/9oIJSaNWu6ecrMCXJihSci7S1adCY2dtdVIbv9+/czaJCS/lbAnLVr+U0zOPwQx6EU2U9CZh15I6Xo7JzADxh6r7hyJwN7U1IoiKgoUc0RFjYDYSR7antJgwyrkEu3egs9LxcYeUsyxwMgiEztV5OpN4i0kZy8WN+PPSCA3tr9fbj7RTSdUKMBOGBJhtAo8CWOFVve1SvLFKWM8myUH2YredGiRdStWxcQq4rffvuN9PR0PD09dWnhs2fPMmrUKAAeffTR8p+wolQxG4szZ86EGTP4pUoV7njrLd0tLfOnkpHGhh2piZgEpBKs5YGAOGNa8fYWJorZyJACXxaLhaNHV9Os2bMAHDt2DIfDgcvl4rvvhJyRrKRR3L5MmTKFGTNmANB09Wqa9uuHpSFgBdspsY1UgjlBgnbPj3SsJBCnmwKJGJ4Nc9+e6tWr62GOQ4dmcubMGcLDRSmr4bmwgTMIyMTQ+miDoaIr+6JInY0jSI9HlSpV9P3eJ98KOB1GXZdd+wzxQGACbi4bL2Dtrl2kpqYSHy/8h/L7UJQSKmdDoahYXO+/z8px4wjCPWdDGBtJiLWlg1R8Wb/+fX7//XdtKy+MdWdRODl48D8kJiaW1rQVtxjy4jpx4kTyVq2i/z8HgBUs2oW7/THZDi2db0hCtlJLx1uvkYqN/c4tJ8iciFytWjVAhFf8/f05cMCfBx88hNEZKBBD2rQKYIOG/kYTaqe2WbLU1LAiDI8Mt325nKC1Q9FrauwIM+UwwqjnFIRoRlQS4LNqFb/99hvR0dHMmzfver9CxbVQ1SgVg/RYmNmwYQMAffr0Ke/pKMoZmeT79ttvl0j46+LFi5zTygzRNDfMMXLzX4mMm589e5a+ffuyRGtxrDwaimsRMXgV/bcYrRACHdA9WVy8D5KqqW1YMZqsQIsWD3P2rPA8dKlXjx1aGPiuu/6GoaVh5cCBBwCIjn6I1q334960xQfd525D6HXJTNRkIFkWv9oxUj8NEjDCkVK3NxChzJ6ByM9Ixj3zqUtJvxxFyVGeDYWi4njrrbdYtmwZwz7+mLmvvKI3kcoATebZgTjLilz6zMxMJk+urm3l4KefuupjSSMjNzdXN0CqVKmiua/tyquhKBbmlf3yrCyGrB4qHni511MZ9+QxCj5k0r5uXe4FugMrAwM5AwQDiTyjvecRHnzwZ4xcjN5Qx9+Ilhy1wmUgzyJiHnYMcdGUo8Dn2oa7kb+LevUm8Ouv7wBC91OqxdgQ/hcbwsC4gtEYbojW0TQlJYX+/fsD6H8VZYDK2VAobg7sGILRp/FFyBDJX6esDQDjVC9Nk6KpT3rRGykU+RgyZAgfavo+ISEhVOvale1AKq0QPgOpEyOOyUwCyMRBLTKxI5wSDYHxv/5K77vuAiCSbIQpUk9sEVZbWAS1EJbAL9rOL2YDh4UnI1kGQdbQihhAeCvigURaAUHcddd4IJl2pvmHmm5S8eaOAwew2+0kJAgPjNKVKSdUbxSFomKRYm1+wAldjlzKmMs8egdffz2OXr3+g+GLzND1NDw8PHBqgWqzXHnVqlV5IySEtT/+SJs2Sh1UoVBUEMqzoVBULLLUrj3g1DwQibquQAKNSGXs0qVarkYKhkcjGYtlgj5OQU3XrFYrDiA+Pl4ZG4rrQq7833vvPVrv3Mlo4JsuqxDGsB3h1ZAGcCBgJYYEvEnVVTRcLhdS0vAKO4nRe7CEgLW2kfYh850vgyhc3Y5I4YynFXF0x4i2ZCOcIYkkI7yAQmc3Ej/82AQI30lLNE0NK5AMu/74g6NHj6qcpfKmlDwbBbeTLGM2b95Mhw4dsNls1K1blyFDhujiMgAvvvginp6e1KxZU7+Zy7EUty9ffvkl7du3x8vLi86dO1f0dADouno1vYHeQDviCGAvjUjlQ61EdfjwiUA8x48v5fjxpfz4Y0Sxxu396adlNWXFbcRrr73Gpk2b2LRpEytWPIK43KchjA1v7SY7xtYjCZEnkQEcb9CAPidP0ufkSR4BfNmEMCYSRAVrFOJvvDbs5UvAt8B3+PAlzxDHGGAAQvnzcaDrL7/QFBkitCLMkFAgjIMIATKpxEE9WPnmCl4fPp5NmzYpQ6MikJ6Nwm7FpEI8GxkZGbz55pt06tSJy5cvM3DgQCZMmMBHH32kbzNx4kRmzZpVEdNT3MT4+voyduxYjh8/zo4dOyp0LlIK+f3336d1hDAgutSqpb9+5swZLl68CIAPiXoiqNVq1cv9cnNz3bQ1pGfjwoULSrBLUSb85z9PAk/yxBMvYCxZz2h/03AiEjFrIeyHlPvuA4y2azs5AoSBww+2+4HVX7w1B4TWhpDjkhLjTRHmhCzFPdmgAZ4I/8hpPY9JSK5LZOrqrjf+A+fPl96HV5Sc8qpGeffddzlw4ABff/21/tzo0aOxWCx88MEH17XTgQMH6ve9vLwYOnSo6htyG/Dzzz/Tpk0btm/fTuvWrUlJSSE0NJSvvvqq2F6Kxx8XnRGWL19ehjMtGePGjWPu3LmA0CSQIkh//PGHZmA8xJ7D89xaw8uyVqfT6aYg2qTJnwD45JPphIWFlfMnUdyqzJ8/X7//5ptvArBhwyeEh78IgC+JuixGNiIA4kSYAFJDRvpBhGGiiV2QAc5auFxDET9hP15+uQfQgzcGDdKl7eyAQ7MrTmkjiIfZCD9GEhCvl7U22rCBBZGR8P33zJo1y+2aoShnqlA+OhvPPfccM2bMwG63Y7PZyMnJISIigi1btjBixAhWrlxZ4PsaNGhAbGxssSbx/fff06xZM7fnlixZwpIlS7j77ruZOnUqvXr1KtZYipuXe+65h7lz5/Lcc88RFRXFSy+9xKBBg+jcuXOpHUs3J08Ve8uTJ4W3Zvfu3WU1GcVtjtljPH260A1q27Yt9evXZ0ZoKAmIy38CouRUZhpZtce+nCadg0ASLtfqq8aXTQGrV69O48aN2RYaqgt0gYi4JJseS3GwzZtf58cfOwHQrVs3unXrViqfV3FjVAFqeBS5WZEUaWzceeeddOrUia+++oqhQ4eydetW6tSpwwMPPMADDzygCw1dL9999x2fffYZkZGR+nOjR49mwYIF1KpVi23bttGvXz/q1q3Lww8/fEP7KitatmxZ0VOoNAwdOpSNGzfSrl07LBaLLogmjcvKSkHdJWfNmuVWdWJGhk5yc3P1DpsWi4Xk5GRAlC7mRx1nirJmxpEjWCwWvULK09NTP3ZdLheTmjUjFBi2ehh9+xavCeWfjxyhatWq1Nc8fk1yc+l25QrgHlI8ffp0KX8aRWlgAaoXuVUxxnGZdWkLISIigqVLl7J792769+9PaGhosbvq7dmzhyeffBKAhg0b6v1DAA4cOEC3bt2IiIjgscceK3SMYcOG4e3tzYIFC4q1T8XNzcaNGwkPD2fZsmUMHTr0usZYvnw5X3zxBbt27SrdyZUyc+fOZfLkRhw9ep8eRsnNzdUrUPLy8nSDBKB58+YFjqNQlDWyesXf3x9/f5GHYbPZqFmzJgBZWVkkJSUBokFhfmQo1PybfOcdIdgVFBTkNo7dbgfg3LlzurLuhx9+WKqfR1E6VAsLo35UVKGv1w4LI+oar0uKlSDao0cPhg8fTlxcHJs2bdKV6oYNG8YXX3xR4HukYdGxY0eysrKuej0mJobw8HD+9a9/XdPQANw6XSoqN1lZWYwdO5bBgwczY8YMevXqha+vb7GOpcrKO++cAO6r6GkoFNekLC72BRklispFFUrHs1Gs0tfq1avTu3dvBg4cSNu2bWnQoAEAH330EVlZWQXernVxiIuLo2vXrixatKjAuNyaNWvIysoiLy+Pbdu28cUXXxAeHl7ASIrKxpgxYwgLC2P58uU8/fTTDBs2DCj+sZSbm8ulS5fIyckhLy+PS5cu6S7fmxFPT0+3MInFYrlKV0NuExMTU1HTVCgUigLxQFQOFXYrLsXW2Rg0aBA//fQTzz//fAmGL5gFCxaQlpbG4MGDdR0Nc4LoBx98QGBgIDabjQkTJvDJJ5/cNJoKiutn/fr1bN26laVLlwJCbCg6OpoVK1YUe4x///vf1KhRg+HDh7Nnzx5q1Khx3aGY8mDcuHG89lpNPUdD4nK5dG9damoqqamppfLbUigUitJEejYKuxWXYuVsAPzyyy80btyYs2fPcscdd5R0vgrFbU1sbKxbbob82VksFi5cuABAx44dK2RuCkVpUFDOhqLyUzssjEevkZORXJo5G3l5ebz33nv0799fGRoKxXXQokULJk6cqD+Wxsa7775bUVNSKBSKIimtnI0ijY3s7GwCAgJo2LAhW7duLYVdKhS3J+YW4AqFQlEZ8ECoyd4oRRob3t7eBVaTKBQKhUKhuLUpLZ0N1fVVoVAoFApFgVSlZFUn1xpHoVAoFAqF4iqUZ0OhUCgUCkWZInU2bpRi62woFAqF4vq4fPkyw4YNIyAgAF9fX7p168aZM2eKfqNCUcFUAapd41aScRQKhUJRhnzwwQfs37+f2NhYUlJSqF27NqNGjaroaSkURVLuCqIKhUJxu7J69Wpd7bhmzZpUq1atRKrG//vf/3jiiScICAigevXq9OvXr1L3+1HcPihjQ6FQKMqJfv366b16UlJSCA4OZsCAAbzzzjvYbLZCb5LBgwfzww8/kJKSgsPhYMWKFXo37FuFli1b0rJly4qehqKUKS25cpUgqlAoFMUkLy+PgQMH0rlzZ1555RWgeJ1NQ0JCuOuuuwgMDMTDw4P777//lmupvnDhwoqegqIMUAmiCoVCUc688cYbZGZm8o9//KNE7xs5ciSXL1/m999/Jzs7m2efffaW82woKo5du3bRsmVLmjVrxiOPPFLgNh07dtS9T/Xq1aNHjx7FGru0EkSVZ0OhUCiKQUREBKtWreLgwYNYrVYAZs+ezezZswt9j1RfPnz4MH//+9/x9fUFYNSoUbz11lucP3+eOnXqlP3kFbcsdrudESNGsHXrVho0aMC5c+cK3G7Pnj36/V69etG9e/dija88GwqFQlFOxMTEMGrUKNatW4efn5/+/NSpU/VcjoJukjZt2vD555+TkZGB0+lkyZIl1KtXTxkaihtm5cqVPPvsszRo0AAAf3//a25/8eJFduzYUSLPhsrZUCgUinJg/fr1XLhwgQ4dOujPdezYkS1bthTr/fPnz2f06NGEhIRw5coVmjdvzjfffFNW01XcRpw8eRKn00nnzp3JzMxkzJgxvPDCC4Vuv27dOh577LFid3D3q1OHDmFhhb5eXIPZ4pK9rhUKhUKhUFQqXn31VaKiovjvf//LH3/8wUMPPcTmzZu57777Ctz+ySefZMiQIfTq1atc56nCKAqFQqFQVCIWL17sluz5xBNP4O3tTZ06dejUqRNHjhwp8H3nz5/nxx9/5Omnny7nGStjQ6FQKBSKSsXIkSM5fPgwhw8fpmfPnuzdu5ecnBwcDgeRkZE0adKkwPetWbOGZ555hurVS6O1WslQxoZCoVAoFJWUJk2a0LVrV1q0aEHbtm0ZMmQIzZs3B+Cpp54iJSVF3zYiIoIBAwZUyDxVzoZCoVAoFIoyRXk2FAqFQqFQlCnK2FAoFAqFQlGmKGNDoVAoFApFmaKMDYVCoVAoFGWKMjYUCoVCoVCUKcrYUCgUCoVCUaYoY0OhUCgUCkWZoowNhUKhUCgUZYoyNhQKhUKhUJQpythQKBQKhUJRpihjQ6FQKBQKRZnyf7mZl3dHBHAYAAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "from nilearn.plotting import plot_stat_map\n", + "\n", + "map_path = downloaded_db[\"local_path\"][1]\n", + "plot_stat_map(map_path)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.5" + } + }, + "nbformat": 4, + "nbformat_minor": 1 +} diff --git a/_sources/ibc_api.rst.txt b/_sources/ibc_api.rst.txt new file mode 100644 index 0000000..7c24eb6 --- /dev/null +++ b/_sources/ibc_api.rst.txt @@ -0,0 +1,25 @@ +ibc\_api package +================ + +Submodules +---------- + + +.. automodule:: ibc_api.metadata + :members: + :undoc-members: + :show-inheritance: + + +.. automodule:: ibc_api.utils + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: ibc_api + :members: + :undoc-members: + :show-inheritance: diff --git a/_sources/index.md.txt b/_sources/index.md.txt new file mode 100644 index 0000000..b9c4cb6 --- /dev/null +++ b/_sources/index.md.txt @@ -0,0 +1,61 @@ +--- +hide-toc: true +--- + +# Individual Brain Charting + +The Individual Brain Charting (IBC) project has collected a high-resolution multi-task-fMRI dataset to provide an objective basis for a comprehensive atlas of brain responses. The data refer to a cohort of participants performing many different tasks. Acquiring a large amount of tasks on the same subjects yields a precise mapping of the underlying functions, free from both inter-subject and inter-site variability. Additionally, the dataset comes with high-resolution anatomical and diffusion images, to achieve a fine anatomical characterization of these brains. + +# Cite + +- Pinho, A.L. *et al.* (2024) Individual Brain Charting dataset extension, third release for movie watching and retinotopy data. *Sci Data* **11**(1), 590. DOI: [10.1038/s41597-024-03390-1](https://doi.org/10.1038/s41597-024-03390-1). + +- Pinho, A.L. *et al.* (2020) Individual Brain Charting dataset extension, second release of high-resolution fMRI data for cognitive mapping. *Sci Data* **7**, 353. DOI: [10.1038/s41597-020-00670-4](https://doi.org/10.1038/s41597-020-00670-4). + +- Pinho, A. L. *et al.* (2018) Individual Brain Charting, a high-resolution fMRI dataset for cognitive mapping. *Sci Data* **5**, 180105. DOI: [10.1038/sdata.2018.105](https://doi.org/10.1038/sdata.2018.105). + +```{toctree} +:caption: Quickstart +:hidden: + +data_hosting +api_install +get_data +ibc_api +``` + +```{toctree} +:caption: fMRI Data +:hidden: + +tasks +processing_pipelines +``` + +```{toctree} +:caption: DWI Data +:hidden: + +dwi_acquisitions +dwi_processing +``` + +```{toctree} +:caption: Details +:hidden: + +participants +mri_acquisitions +mridata_organization +experimentaldesign_diagrams +behavioral_data +movie_protocols_data +references +``` + +```{toctree} +:caption: Miscellaneous +:hidden: + +contact +``` diff --git a/_sources/movie_protocols_data.rst.txt b/_sources/movie_protocols_data.rst.txt new file mode 100644 index 0000000..d4c9486 --- /dev/null +++ b/_sources/movie_protocols_data.rst.txt @@ -0,0 +1,65 @@ +Movie protocols implementation +============================== + +In this section, we provide details of the movie protocols used in the IBC project as some might be relevant +for the analysis of the data and reproduction of the protocols. + +Lags in Raiders movie +--------------------- + +It has been noted that there was a lag between the acquisition onset and the stimuli onset. In other words, +the presentation of the movie didn't start immediately when the acquisition started, but with a delay that +varied between runs and subjects. + +When synchrony between the movie and acquisition times is required, this lag must be considered. +We provide here the exact lag for each run and each subject. Note that this table can also be +download as a *csv* file `here `__. + +The *lag* represents the time between the acquisition onset and the stimuli onset, in milliseconds. +The *run* column includes the video file name corresponding to the movie section shown during each run, along with the respective run number. +Please note that the first three sections of the movie (movie clips displayed during the first three runs) were presented to the participants +again at the end of the last session. As a result, the *run* column includes the video file name and the run number for the first three, +along with the *test* legend. + +.. dropdown:: Lags in Raiders movie + + .. csv-table:: + :file: ../../movie_protocols_data/lags_raiders.csv + :header-rows: 1 + +Lags in GoodBadUgly movie +------------------------- + +Similar to the Raiders movie, there was a lag between the acquisition onset and the stimuli onset along the runs of the GoodBadUgly movie. +This lag could vary across runs and subjects, and should be considered when synchrony between the movie and acquisition times is required. + +We provide here the exact lag for each run and each subject. Note that this table can also be +download as a *csv* file `here `__. + +The *lag* represents the time between the acquisition onset and the stimuli onset, in milliseconds. +The *run* column includes the video file name corresponding to the movie section shown during each run, along with the respective run number. +Please note that the first three sections of the movie (movie clips displayed during the first three runs) were presented to the participants +again at the end of the last session. As a result, the *run* column includes the video file name and the run number for the first three, +along with the *test* legend. + +.. dropdown:: Lags in GoodBadUgly movie + + .. csv-table:: + :file: ../../movie_protocols_data/lags_goodbadugly.csv + :header-rows: 1 + +Lags in MonkeyKingdom movie +---------------------------- + +We report here the exact lags for each run across subjects for the Monkey Kingdom movie. +The *lag* represents the time between the acquisition onset and the stimuli onset, and could vary across runs and subjects. + +This table can be downloaded `here `__. + +The *lags* are reported in milliseconds. + +.. dropdown:: Lags in MonkeyKingdom movie + + .. csv-table:: + :file: ../../movie_protocols_data/lags_monkeykingdom.csv + :header-rows: 1 \ No newline at end of file diff --git a/_sources/mri_acquisitions.rst.txt b/_sources/mri_acquisitions.rst.txt new file mode 100644 index 0000000..60bdc55 --- /dev/null +++ b/_sources/mri_acquisitions.rst.txt @@ -0,0 +1,709 @@ +MRI acquisitions +================ + +This section contains details about the overall organization of the MRI +sessions across participants. It provides details about session IDs for +every participant, the MRI sequences employed in every session and their +imaging parameters. A description about data anomalies is also provided +per participant for every session. + +For more information about the technical specifications of the MRI +equipment used, please consult Section "MRI Equipment" of `Pinho et al. +2018 `__ or `Pinho et al. 2020 `__. + +Organization of the MRI sessions +-------------------------------- + +The figure below depicts the temporal organization of +runs in terms of MRI sequences within sessions: + +.. _acqdiagram: + +.. figure:: acquisitions_diagram/final_diagrams/acquisitions_diagram_release5.png + :alt: Structure of the IBC-MRI sessions in terms of number, type and duration of the runs performed. + :scale: 20 % + + **Structure of the IBC-MRI sessions in terms of number, type and duration of the runs performed.** Each + rectangle represents one run; its width visually quantifies the duration of that run + and the color indicates the type of sequence employed. Rows of rectangles + depict the chronological organization of every session. Labels on + the left side identify the session represented by each row. For every + session, the tasks employed during the EPI sequences are specified on + the right side of the corresponding row. + +Besides, a plan of the MRI sessions undertaken per participant can be +found in Table `[table:acqplan_appendix] <#table:acqplan_appendix>`__. +and a summary of the fMRI-data anomalies over sessions and participants +can be found in Table `[table:dataanomalies_appendix] <#table:dataanomalies_appendix>`__. + +Parameters of the MRI sequences +------------------------------- + +Details of the parameters used for all the MRI sequences employed are +provided over the following subsections. The bulk of the data is +collected using a SIEMENS MAGNETOM Prisma-fit 3T scanner. + +2D Spin-Echo +~~~~~~~~~~~~ + +The 2D Spin-Echo maps are used to obtain a model of distortions for EPI +images: 2 pairs of AP/PA images are acquired along with each EPI (BOLD +or diffusion-weighted) acquisition one at the start and one at the end +of each scanner session. + +.. _spinecho: + +.. table:: Acquisition parameters for Spin-Echo + + ======================= ============= + Parameter Value + ======================= ============= + *Sequence* Spin Echo EPI + *TR* 7680 ms + *TE* 46 ms + *FOV* 192 mm + *Matrix* 128 x 128 + *Slice thickness* 1.50 mm + *Number of slices* 93 + *GRAPPA iPAT* 2 + *Ref. lines PE* 62 + *Echo spacing* 0.65 ms + *BW* 1776 Hz/Px + *Fat suppr.* Fat sat. + *Phase partial Fourier* None + *Multi-slice mode* Interleaved + *Series* Interleaved + ======================= ============= + +EPI T2\* with BOLD contrast for task scans +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +The same acquisition parameters were used in all task-fMRI runs, only +the number of repetitions (TRs) changed as each run had a different +duration, `This table `__ contains the number of TRs for +every task. + +.. _bold: + +.. table:: Acquisition parameters for task-based BOLD-contrast images + + ========================= ================= + Parameter Value + ========================= ================= + *Sequence* Gradient Echo EPI + *TR* 2000 ms + *TE* 26.8 ms + *Flip angle* 74 deg + *Fat suppr.* Fat sat. + *FOV* 192 mm + *Matrix* 128 x 128 + *Slice thickness* 1.50 mm + *Number of slices* 93 slices + *GRAPPA iPAT* 2 + *Multiband accel. factor* 3 + *Echo spacing* 0.65 ms + *BW* 1776 Hz/Px + *Phase partial Fourier* None + *Multi-slice mode* Interleaved + *Series* Interleaved + ========================= ================= + +The only exception to these parameters specifications was the :ref:`MultiModal` task, `this table `__ contains the details of the parameters that were changed. + +.. _multimodalparam: + +.. table:: Acquisition parameters for :ref:`MultiModal` tasks' BOLD-contrast images + + ========================= ================= + Parameter Value + ========================= ================= + *Sequence* Gradient Echo EPI + *TR* 2600 ms + *TE* 26.8 ms + *Flip angle* 78 deg + *Fat suppr.* Fat sat. + *FOV* 192 mm + *Matrix* 128 x 128 + *Slice thickness* 2 mm + *Number of slices* 75 slices + *GRAPPA iPAT* 2 + *Multiband accel. factor* 3 + *Echo spacing* 0.65 ms + *BW* 1776 Hz/Px + *Phase partial Fourier* None + *Multi-slice mode* Interleaved + *Series* Interleaved + ========================= ================= + + +EPI T2\* with BOLD contrast for resting state scans +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. _resting: + +.. table:: Acquisition parameters for resting-state BOLD-contrast images + + ========================= ================= + Parameter Value + ========================= ================= + *Sequence* Gradient Echo EPI + *TR* 760 ms + *TE* 29 ms + *Number of TRs* 1120 + *Flip angle* 53 deg + *Fat suppr.* Fat sat. + *FOV* 194 mm + *Matrix* 88 x 88 + *Slice thickness* 2.20 mm + *Number of slices* 66 slices + *Multiband accel. factor* 6 + *Echo spacing* 0.55 ms + *BW* 2470 Hz/Px + *Phase partial Fourier* None + *Multi-slice mode* Interleaved + *Series* Interleaved + ========================= ================= + +T1 +~~ + +A few types of T1 images were acquired: + +- High-resolution T1 MPRAGE anatomical scan acquired during screening + +.. _mpragesagT1: + +.. table:: Acquisition parameters for high-resolution T1 MPRAGE scan. + + ========================= =========== + Parameter Value + ========================= =========== + *Sequence* T1 MPRAGE + *Orientation* Sagittal + *TA* 7:46 + *TR* 2300 ms + *TE* 2.98 ms + *TI* 900 ms + *Flip angle* 9 deg + *FOV* 256 mm + *Matrix* 256 x 256 + *Slice thickness* 1 mm + *Number of slices* 160 + *Multiband accel. factor* 1 + *Echo spacing* 7.1 ms + *BW* 240 Hz/Px + *Fat suppr.* None + *Phase partial Fourier* 7/8 + *Turbo factor* 176 + *Series* Interleaved + ========================= =========== + +- Yearly maintenance T1 MPRAGE anatomical scan + +.. _highresT1: + +.. table:: Acquisition parameters for yearly maintenance T1 MPRAGE scan. + + ========================= =========== + Parameter Value + ========================= =========== + *Sequence* T1 MPRAGE + *Orientation* Sagittal + *TA* 4:44 + *TR* 2300 ms + *TE* 3.05 ms + *TI* 900 ms + *Flip angle* 9 deg + *FOV* 230 mm + *Matrix* 256 x 256 + *Slice thickness* 0.9 mm + *Number of slices* 176 + *Multiband accel. factor* 2 + *Echo spacing* 7.4 ms + *BW* 240 Hz/Px + *Fat suppr.* None + *Phase partial Fourier* 7/8 + *Turbo factor* 176 + *Series* Interleaved + ========================= =========== + +- High-resolution T1 MPRAGE anatomical scan acquired with diffusion tractography + +.. _mpragesagT1diff: + +.. table:: Acquisition parameters for high-resolution T1 MPRAGE scan. + + ======================= =========== + Parameter Value + ======================= =========== + *Sequence* T1 MPRAGE + *Orientation* Sagittal + *TA* 18:26 + *TR* 2300 ms + *TE* 4.93 ms + *TI* 900 ms + *Flip angle* 9 deg + *FOV* 248 mm + *Matrix* 352 x 352 + *Slice thickness* 0.7 mm + *Number of slices* 160 + *GRAPPA accel. factor* 3 + *Ref. lines PE* 61 + *Echo spacing* 11.5 ms + *BW* 130 Hz/Px + *Fat suppr.* None + *Phase partial Fourier* Deactivated + *Turbo factor* 339 + *Series* Interleaved + ======================= =========== + +T2 +~~ + +Several types of images were acquired under this category: + +- High-resolution T2 turbo SE sequence (Siemens SPACE) + +.. _spcsagT2: + +.. table:: Acquisition parameters for high-resolution T2 sagittal images. + + ========================= =========== + Parameter Value + ========================= =========== + *Sequence* T2 turbo SE + *Orientation* Sagittal + *TA* 15:30 + *TR* 3200 ms + *TE* 420 ms + *Flip angle mode* T2 var + *Turbo factor* 284 + *FOV* 270 mm + *Matrix* 384 x 384 + *Slice thickness* 0.70 mm + *Number of slices* 240 slices + *Multiband accel. factor* 1 + *Echo spacing* 3.68 ms + *BW* 723 Hz/Px + *Fat suppr.* None + *Phase partial Fourier* None + *Series* Interleaved + ========================= =========== + +- T2 FLAIR sagittal. + +.. _flairsagT2: + +.. table:: Acquisition parameters for T2 FLAIR sagittal images. + + ========================= ====================================== + Parameter Value + ========================= ====================================== + *Sequence* T2_FLAIR_SAG_FOV230 + *TR* 5000 ms + *TE* 396 ms + *Flip angle mode* T2 var + *FOV* 230 x 230 mm + *Matrix* 256 x 256 + *Slice thickness* 0.81 mm, 192 slices, 0.81 mm isotropic + *Multiband accel. factor* 1 + *Echo spacing* 3,36 ms + *BW* 781 Hz/Px + *Phase partial Fourier* 0 + *b-values* 0 s/mm\ :sup:`2` + ========================= ====================================== + +- T2 sagittal with fat saturation. + +.. _sagfatsatT2: + +.. table:: Acquisition parameters for T2 images with Fat-Sat. + + ======================= ====================================== + Parameter Value + ======================= ====================================== + *Sequence* T2_SPC_SAG_fatsat + *TR* 3200 ms + *TE* 420 ms + *Flip angle mode* T2 var + *FOV* 270 x 270 mm + *Matrix* 384 x 384 + *Slice thickness* 0.70 mm, 240 slices, 0.70 mm isotropic + *Echo spacing* 3.68 ms + *BW* 723 Hz/Px + *Phase partial Fourier* None + *b-values* 0 s/mm\ :sup:`2` + ======================= ====================================== + +- T2 sagittal (0.7mm). + +.. _highres-sag_T2: + +.. table:: Acquisition parameters for high-resolution sagittal T2 images. + + ========================= ====================================== + Parameter Value + ========================= ====================================== + *Sequence* T2_SPC_SAG_0.7mm + *TR* 3200 ms + *TE* 420 ms + *Flip angle mode* T2 var + *FOV* 270 x 270 mm + *Matrix* 384 x 384 + *Slice thickness* 0.70 mm, 240 slices, 0.70 mm isotropic + *Multiband accel. factor* 1 + *Echo spacing* 3.68 ms + *BW* 723 Hz/Px + *Phase partial Fourier* None + *b-values* 0 s/mm\ :sup:`2` + ========================= ====================================== + +T1 relaxometry +~~~~~~~~~~~~~~ + +Three different runs were performed: + +- A B1 map for T1 mapping. + +.. _b1T1: + +.. table:: Acquisition parameters for B1 maps. + + ========================= =============================== + Parameter Value + ========================= =============================== + *Sequence* B1Map_for_T1_map + *TR* 20000 ms + *TE* 2.59 ms + *Flip angle* 8 deg + *FOV* 256 x 256 mm + *Matrix* 128 x 128 + *Slice thickness* 2 mm, 44 slices, 2 mm isotropic + *Multiband accel. factor* 1 + *Echo spacing* 4.5 ms + *BW* 800 Hz/Px + *Phase partial Fourier* None + *b-values* 0 s/mm\ :sup:`2` + ========================= =============================== + +- T1 maps with FA from 3 to 19 in steps of two. + +.. _faT1: + +.. table:: Acquisition parameters for T1 maps. + + ========================= ================================ + Parameter Value + ========================= ================================ + *Sequence* T1Map_1mm + *TR* 10 ms + *TE* 3 ms + *Flip angle* 3 deg + *FOV* 256 x 256 mm + *Matrix* 128 x 128 + *Slice thickness* 1 mm, 176 slices, 1 mm isotropic + *Multiband accel. factor* 1 + *BW* 240 Hz/Px + *Phase partial Fourier* 7/8 + *b-values* 0 s/mm\ :sup:`2` + ========================= ================================ + +T2 relaxometry +~~~~~~~~~~~~~~ + +Two types of relaxometry images were acquired: + +- T2\* sagittal (relaxometry). + +.. _sagT2relaxo: + +.. table:: Acquisition parameters for T2 relaxometry images. + + ========================= ====================================== + Parameter Value + ========================= ====================================== + *Sequence* relaxometry_T2star_sag + *TR* 50 ms + *TE1* 1.77 ms + *TE2* 5.06 ms + *TE3* 8.35 ms + *TE4* 11.64 ms + *TE5* 14.93 ms + *TE6* 18.22 ms + *TE7* 21.51 ms + *TE8* 24.80 ms + *TE9* 28.09 ms + *TE10* 32.50 ms + *TE11* 38.90 ms + *TE12* 47.00 ms + *Flip angle* 20 deg + *FOV* 288 x 288 mm + *Matrix* 196 x 196 + *Slice thickness* 1.50 mm, 120 slices, 1.50 mm isotropic + *Multiband accel. factor* 1 + *BW* 420 Hz/Px + *Phase partial Fourier* 7/8 + *b-values* 0 s/mm\ :sup:`2` + ========================= ====================================== + +- T2 relaxometry with 12 contrasts. + +.. _12conT2relaxo: + +.. table:: Acquisition parameters for 12-contrast T2 images. + + ======================= ==================================== + Parameter Value + ======================= ==================================== + *Sequence* relaxometry_T2_tra_12contrastes + *TR* 7600 ms + *TE1* 14 ms + *Flip angle* 180 deg + *FOV* 256 x 256 mm + *Matrix* 256 x 256 + *Slice thickness* 1,1 mm, 128 slices, 1,1 mm isotropic + *GRAPPA accel. factor* 3 + *Echo spacing* 14 ms + *BW* 215 Hz/Px + *Phase partial Fourier* None + *b-values* 0 s/mm\ :sup:`2` + ======================= ==================================== + +Number of TRs for each task +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. _TRnum: + +.. table:: Number of repetitions for each task; TR = 2s. + + +-------------------------+-------------------------+---------------+ + | Task | Runs | Number of TRs | + +=========================+=========================+===============+ + | *ARCHI Standard* | all runs | 156 | + +-------------------------+-------------------------+---------------+ + | *ARCHI Spatial* | all runs | 252 | + +-------------------------+-------------------------+---------------+ + | *ARCHI Social* | all runs | 262 | + +-------------------------+-------------------------+---------------+ + | *ARCHI Emotional* | all runs | 220 | + +-------------------------+-------------------------+---------------+ + | *HCP Language* | all runs | 229 | + +-------------------------+-------------------------+---------------+ + | *HCP Emotion* | all runs | 139 | + +-------------------------+-------------------------+---------------+ + | *HCP Gambling* | all runs | 188 | + +-------------------------+-------------------------+---------------+ + | *HCP Motor* | all runs | 185 | + +-------------------------+-------------------------+---------------+ + | *HCP Social* | all runs | 196 | + +-------------------------+-------------------------+---------------+ + | *HCP Relational* | all runs | 311 | + +-------------------------+-------------------------+---------------+ + | *HCP WM* | all runs | 303 | + +-------------------------+-------------------------+---------------+ + | *RSVP Language* | all runs | 310 | + +-------------------------+-------------------------+---------------+ + | *Mental Time Travel* | all runs | 394 | + +-------------------------+-------------------------+---------------+ + | *Preference* | all runs | 248 | + +-------------------------+-------------------------+---------------+ + | *Theory-of-Mind | all runs | 186 | + | localizer* | | | + +-------------------------+-------------------------+---------------+ + | *Theory-of-Mind and* | | | + +-------------------------+-------------------------+---------------+ + | *Pain-Matrix Narrative | | | + | localizer* | | | + +-------------------------+-------------------------+---------------+ + | *Theory-of-Mind and* | | | + +-------------------------+-------------------------+---------------+ + | *Pain-Matrix Movie | | | + | localizer* | | | + +-------------------------+-------------------------+---------------+ + | *Visual Short-Term | all runs | 260 | + | Memory* | | | + +-------------------------+-------------------------+---------------+ + | *Enumeration* | all runs | 490 | + +-------------------------+-------------------------+---------------+ + | *Self* | runs 1-3 | 359 | + +-------------------------+-------------------------+---------------+ + | *Self* | run 4 | 480 | + +-------------------------+-------------------------+---------------+ + | *Bang* | only one run | 243 | + +-------------------------+-------------------------+---------------+ + | *Clips* | all runs | 325 | + +-------------------------+-------------------------+---------------+ + | *Retinotopy* | all “wedge” and “ring” | 165 | + | | runs | | + +-------------------------+-------------------------+---------------+ + | *Raiders* | runs 1 and 11 | 374 | + +-------------------------+-------------------------+---------------+ + | *Raiders* | runs 2 and 12 | 297 | + +-------------------------+-------------------------+---------------+ + | *Raiders* | runs 3 and 13 | 314 | + +-------------------------+-------------------------+---------------+ + | *Raiders* | run 4 | 379 | + +-------------------------+-------------------------+---------------+ + | *Raiders* | run 5 | 347 | + +-------------------------+-------------------------+---------------+ + | *Raiders* | run 6 | 346 | + +-------------------------+-------------------------+---------------+ + | *Raiders* | run 7 | 350 | + +-------------------------+-------------------------+---------------+ + | *Raiders* | run 8 | 353 | + +-------------------------+-------------------------+---------------+ + | *Raiders* | run 9 | 281 | + +-------------------------+-------------------------+---------------+ + | *Raiders* | run 10 | 211 | + +-------------------------+-------------------------+---------------+ + | *Lyon MOTO* | all runs | 359 | + +-------------------------+-------------------------+---------------+ + | *Lyon MCSE* | all runs | 177 | + +-------------------------+-------------------------+---------------+ + | *Lyon MVEB* | all runs | 203 | + +-------------------------+-------------------------+---------------+ + | *Lyon MVIS* | all runs | 178 | + +-------------------------+-------------------------+---------------+ + | *Lyon LEC1* | all runs | 190 | + +-------------------------+-------------------------+---------------+ + | *Lyon LEC2* | all runs | 143 | + +-------------------------+-------------------------+---------------+ + | *Lyon AUDI* | all runs | 347 | + +-------------------------+-------------------------+---------------+ + | *Lyon VISU* | all runs | 173 | + +-------------------------+-------------------------+---------------+ + | *Real-Life Sounds* | all runs | 277 | + +-------------------------+-------------------------+---------------+ + | *Stanford Stop Signal* | all runs | 165 | + +-------------------------+-------------------------+---------------+ + | *Stanford Attention* | all runs | 175 | + +-------------------------+-------------------------+---------------+ + | *Stanford Two-by-Two* | all runs | 340 | + +-------------------------+-------------------------+---------------+ + | *Stanford Selective | all runs | 329 | + | Stop Signal* | | | + +-------------------------+-------------------------+---------------+ + | *Stanford Stroop* | all runs | 107 | + +-------------------------+-------------------------+---------------+ + | *Stanford Delay | all runs | 309 | + | Discounting* | | | + +-------------------------+-------------------------+---------------+ + | *Stanford Columbia | all runs | 240 | + | Cards* | | | + +-------------------------+-------------------------+---------------+ + | *Stanford Dot Patterns* | all runs | 369 | + +-------------------------+-------------------------+---------------+ + | *Stanford Ward and | all runs | 240 | + | Allport* | | | + +-------------------------+-------------------------+---------------+ + | *Le Petit Prince* | run 1 | 313 | + +-------------------------+-------------------------+---------------+ + | *Le Petit Prince* | run 2 | 330 | + +-------------------------+-------------------------+---------------+ + | *Le Petit Prince* | run 3 | 358 | + +-------------------------+-------------------------+---------------+ + | *Le Petit Prince* | run 4 | 319 | + +-------------------------+-------------------------+---------------+ + | *Le Petit Prince* | run 5 | 297 | + +-------------------------+-------------------------+---------------+ + | *Le Petit Prince* | run 6 | 382 | + +-------------------------+-------------------------+---------------+ + | *Le Petit Prince* | run 7 | 336 | + +-------------------------+-------------------------+---------------+ + | *Le Petit Prince* | run 8 | 298 | + +-------------------------+-------------------------+---------------+ + | *Le Petit Prince* | run 9 | 340 | + +-------------------------+-------------------------+---------------+ + | *Le Petit Prince* | localizer | 175 | + +-------------------------+-------------------------+---------------+ + | *Biological Motion* | all runs | 204 | + +-------------------------+-------------------------+---------------+ + | *Math-Language* | run 1 type “a” | 281 | + +-------------------------+-------------------------+---------------+ + | *Math-Language* | run 2 type “a” and run | 280 | + | | 3 type “b” | | + +-------------------------+-------------------------+---------------+ + | *Math-Language* | run 3 type “a” | 286 | + +-------------------------+-------------------------+---------------+ + | *Math-Language* | run 4 type “a” | 288 | + +-------------------------+-------------------------+---------------+ + | *Math-Language* | runs 1 and 2 type “b” | 283 | + +-------------------------+-------------------------+---------------+ + | *Spatial Navigation* | run 1 | 151 | + +-------------------------+-------------------------+---------------+ + | *Spatial Navigation* | runs 2-8 | 241 | + +-------------------------+-------------------------+---------------+ + | *The Good, the Bad and | runs 1 and 19 | 265 | + | the Ugly* | | | + +-------------------------+-------------------------+---------------+ + | *The Good, the Bad and | runs 2 and 20 | 244 | + | the Ugly* | | | + +-------------------------+-------------------------+---------------+ + | *The Good, the Bad and | runs 3-18 and 21 | 304 | + | the Ugly* | | | + +-------------------------+-------------------------+---------------+ + | *CamCAN Emotional | all runs | 306 | + | Memory* | | | + +-------------------------+-------------------------+---------------+ + | *CamCAN Emotion | all runs | 195 | + | Recognition* | | | + +-------------------------+-------------------------+---------------+ + | *CamCAN Stop/No-Go* | all runs | 304 | + +-------------------------+-------------------------+---------------+ + | *CamCAN Oddball* | all runs | 135 | + +-------------------------+-------------------------+---------------+ + | *CamCAN VSTM* | all runs | 254 | + +-------------------------+-------------------------+---------------+ + | *CamCAN Finger Tapping* | all runs | 163 | + +-------------------------+-------------------------+---------------+ + | *FBIRN Breath Holding* | all runs | 182 | + +-------------------------+-------------------------+---------------+ + | *FBIRN Checkerboard* | all runs | 190 | + +-------------------------+-------------------------+---------------+ + | *FBIRN Finger Tapping* | all runs | 236 | + +-------------------------+-------------------------+---------------+ + | *FBIRN Item | all runs | 222 | + | Recognition* | | | + +-------------------------+-------------------------+---------------+ + | *Visual Search and | run1 | 355 | + | Working Memory* | | | + +-------------------------+-------------------------+---------------+ + | *Visual Search and | run2 | 354 | + | Working Memory* | | | + +-------------------------+-------------------------+---------------+ + | *Visual Search and | run3 | 345 | + | Working Memory* | | | + +-------------------------+-------------------------+---------------+ + | *Visual Search and | run4 | 356 | + | Working Memory* | | | + +-------------------------+-------------------------+---------------+ + | *Reward Processing* | all runs | 362 | + +-------------------------+-------------------------+---------------+ + | *NARPS* | all runs | 222 | + +-------------------------+-------------------------+---------------+ + | *Scene perception* | all runs | 284 | + +-------------------------+-------------------------+---------------+ + | *Face-body* | all runs | 229 | + +-------------------------+-------------------------+---------------+ + | *Monkey Kingdom* | runs 1 and 2 | 465 | + +-------------------------+-------------------------+---------------+ + | *Monkey Kingdom* | runs 3 to 5 | 466 | + +-------------------------+-------------------------+---------------+ + | *Color* | all runs | 221 | + +-------------------------+-------------------------+---------------+ + | *Motion* | all runs | 198 | + +-------------------------+-------------------------+---------------+ + | *Optimism Bias* | all runs | 302 | + +-------------------------+-------------------------+---------------+ + | *AOMIC* | Movie 1 run | 331 | + +-------------------------+-------------------------+---------------+ + | *AOMIC* | Face perception 2 runs | 188 | + +-------------------------+-------------------------+---------------+ + | *AOMIC* | Gender stroop 2 runs | 246 | + +-------------------------+-------------------------+---------------+ + | *AOMIC* | Emotion matching 2 runs | 121 | + +-------------------------+-------------------------+---------------+ + | *AOMIC* | Working memory run 1 | 162 | + +-------------------------+-------------------------+---------------+ + | *AOMIC* | Working memory run 2 | 181 | + +-------------------------+-------------------------+---------------+ \ No newline at end of file diff --git a/_sources/mridata_organization.rst.txt b/_sources/mridata_organization.rst.txt new file mode 100644 index 0000000..82900dc --- /dev/null +++ b/_sources/mridata_organization.rst.txt @@ -0,0 +1,21 @@ +MRI-data organization +===================== + +The tree structure of the IBC dataset follows BIDS Specification +(`http://bids.neuroimaging.io/ `__), as in the `figure below `__. + +- The identifiers of the 13 participants are "sub-01", "sub-02", + "sub-04", ..., "sub-15". + +- The acquisitions are organized in sessions ("ses-00", "ses-01", ..., "ses-20", etc.). + +- Within each session, data is divided according to modality: "anat", "dwi", "fmap", "func". + +- For each modality, files are stored in .nii.gz format, with a name that recapitulates subject, session and modality together with meta-information stored in .tsv and .json files. + +.. _bids: + +.. figure:: ibc_bids.png + :alt: **Imaging modalities employed in each session.** + + **Imaging modalities employed in each session.** diff --git a/_sources/participants.rst.txt b/_sources/participants.rst.txt new file mode 100644 index 0000000..be9a1cf --- /dev/null +++ b/_sources/participants.rst.txt @@ -0,0 +1,37 @@ +Participants +============ + +The cohort of the IBC dataset consists in a permanent group of twelve +adults with neither psychiatric and neurologic disorders nor specific +psychometric profile. Participants are numbered from 1 to 15, by which +participants 3 and 10 are not part of the group. + +`This table `__ contains demographic information of +the participants. Data from *sub-02* were only acquired for the ARCHI +tasks, HCP tasks plus RSVP Language task and, thus, the cohort is +exceptionally composed of thirteen participants for these particular +tasks. For further details about exclusion criteria and experimental +procedures concerned with the handling of the participants, please +consult (Pinho et al. 2018). + +.. _demographics: + +.. table:: Demographic data of the participants. Age is participant age at the time of recruitment. + + ========== =================== ==== === ================ + Subject ID Year of recruitment Age Sex Handedness score + ========== =================== ==== === ================ + *sub-01* 2015 39.5 M 0.3 + *sub-02* 2015 32.8 M 1 + *sub-04* 2015 26.9 M 0.8 + *sub-05* 2015 27.4 M 0.6 + *sub-06* 2015 33.1 M 0.7 + *sub-07* 2015 38.8 M 1 + *sub-08* 2015 36.5 F 1 + *sub-09* 2015 38.5 F 1 + *sub-11* 2016 35.8 M 1 + *sub-12* 2016 40.8 M 1 + *sub-13* 2016 28.2 M 0.6 + *sub-14* 2016 28.3 M 0.7 + *sub-15* 2017 30.3 M 0.9 + ========== =================== ==== === ================ \ No newline at end of file diff --git a/_sources/processing_pipelines.rst.txt b/_sources/processing_pipelines.rst.txt new file mode 100644 index 0000000..bb02461 --- /dev/null +++ b/_sources/processing_pipelines.rst.txt @@ -0,0 +1,97 @@ +fMRI processing pipelines +========================= + +fMRI preprocessing +------------------ + +Source data were preprocessed using *PyPreprocess*. This library offers +a collection of Python tools to facilitate pipeline runs, reporting and +quality check (https://github.com/neurospin/pypreprocess). It is built +upon the *Nipype* library (`Gorgolewski et al., 2011 `__) v0.12.1, that in +turn launched various commands used to process neuroimaging data. These +commands were taken from the *SPM12* software package (Wellcome +Department of Imaging Neuroscience, London, UK) v6685, and the *FSL* +library (Analysis Group, FMRIB, Oxford, UK) v5.0. + +All fMRI images, i.e. GE-EPI volumes, were collected twice with reversed +phase-encoding directions, resulting in pairs of images with distortions +going in opposite directions. Susceptibility-induced off-resonance field +was estimated from the two Spin-Echo EPI volumes in reversed +phase-encoding directions. The images were corrected based on the +estimated deformation model, using the *topup* tool (`Andersson, Skare, +and Ashburner 2003 `__) implemented in FSL (`Smith et al., 2004 `__). + +Further, the GE-EPI volumes were aligned to each other within each +participant. A rigid body transformation was employed, in which the +average volume of all images was used as reference (`Friston et al., +1995 `__). The mean EPI volume was also co-registered onto the corresponding +T1-weighted MPRAGE (anatomical) volume for every participant (`Ashburner +and Friston 1997 `__). The individual anatomical volumes were then segmented +into tissue types to finally allow for the normalization of both +anatomical and functional data (`Ashburner and Friston 2005 `__). Concretely, +the segmented volumes were used to compute the deformation field for +normalization to the standard MNI152 space. The deformation field was +then applied to the EPI data. In the end, all volumes were resampled to +their original resolution, i.e. 1 mm isotropic for the +T1-weighted MPRAGE images and 1.5 mm for the EPI images. + +.. _subsubsec:modelspec: + +Model specification +~~~~~~~~~~~~~~~~~~~ + +The fMRI data were analyzed using the *General Linear Model* (GLM). +Regressors of the model were designed to capture variations in BOLD +response strictly following stimulus timing specifications. They were +estimated through the convolution of temporal representations referring +to the task-conditions with the canonical *Hemodynamic Response +Function* (HRF), defined according to (`Friston, Fletcher, et al., +1998 `__) and (`Friston, Josephs, et al., 1998 `__). + +The temporal profile of the conditions was characterized by boxcar +functions. To build such models, paradigm descriptors grouped in +triplets (i.e. onset time, duration and trial type according to BIDS +Specification) were determined from the log files' registries generated +by the stimulus-delivery software. + +To account for small fluctuations in the latency of the HRF peak +response, additional regressors were computed based on the convolution +of the same task-conditions profile with the time derivative of the HRF. + +Nuisance regressors were also added to the design matrix in order to +minimize the final residual error. To remove signal variance associated +with spurious effects arising from movements, six temporal regressors +were defined for the motion parameters. Further, the first five +principal components of the signal, extracted from voxels showing the 5% +highest variance, were also regressed to capture physiological noise +(`Behzadi et al., 2007 `__). + +In addition, a discrete-cosine transform set was applied for high-pass filtering (cutoff = 128 seconds). Model specification was implemented using *Nistats* library v0.0.1b, a Python module devoted to statistical analysis of fMRI data (https://nistats.github.io), which leverages *Nilearn* (`Abraham et al., 2014 `__), a Python library for statistical learning on neuroimaging data (https://nilearn.github.io/). + +.. _subsubsec:modelest: + +Model estimation +~~~~~~~~~~~~~~~~ + +In order to restrict GLM parameters estimation to voxels inside +functional brain regions, a brain mask was extracted from the mean EPI +volume. The procedure implemented in the Nilearn software simply +thresholds the mean fMRI image of each subject in order to separate +brain tissue from background, and performs then a morphological opening +of the resulting image to remove spurious voxels. + +Regarding noise modeling, a first-order autoregressive model was used in +the maximum likelihood estimation procedure. + +A mass-univariate GLM fit was applied separately to the preprocessed +GE-EPI data of each run with respect to a specific task. Parameter +estimates pertaining to the experimental conditions were thus computed, +along with the respective covariance at every voxel. Various contrasts +(linear combinations of the effects), were then defined, referring only +to differences in evoked responses between either *(i)* two +conditions-of-interest or *(ii)* one condition-of-interest and baseline. +GLM estimation and subsequent statistical analyses were also implemented +using Nistats v0.1. fMRI data analysis was first run on unsmoothed data +and, afterwards, on data smoothed with a 5mm full-width-at-half-maximum +kernel. Such procedure allows for increased *Signal-to-Noise Ratio* +(SNR) and it facilitates between-image comparison. diff --git a/_sources/references.rst.txt b/_sources/references.rst.txt new file mode 100644 index 0000000..46b9067 --- /dev/null +++ b/_sources/references.rst.txt @@ -0,0 +1,778 @@ +References +============== + +.. container:: references hanging-indent + :name: refs + + .. container:: + :name: ref-Abraham2014 + + Abraham, Alexandre, Fabian Pedregosa, Michael Eickenberg, Philippe + Gervais, Andreas Mueller, Jean Kossaifi, Alexandre Gramfort, + Bertrand Thirion, and Gaë Varoquaux. 2014. “Machine Learning for + Neuroimaging with Scikit-Learn.” *Front Neuroinform* 8: 14. + https://doi.org/10.3389/fninf.2014.00014. + + .. container:: + :name: ref-Amalric2016 + + Amalric, Marie, and Stanislas Dehaene. 2016. “Origins of the Brain + Networks for Advanced Mathematics in Expert Mathematicians.” *Proc + Natl Acad Sci U S A* 113 (18): 4909–17. + https://doi.org/10.1073/pnas.1603205113. + + .. container:: + :name: ref-Andersson2003 + + Andersson, Jesper L. R., Stefan Skare, and John Ashburner. 2003. + “How to Correct Susceptibility Distortions in Spin-Echo + Echo-Planar Images: Application to Diffusion Tensor Imaging.” + *Neuroimage* 20 (2): 870–88. + http://doi.org/10.1016/S1053-8119(03)00336-7. + + .. container:: + :name: ref-Andersson2016 + + Andersson, Jesper LR, and Stamatios N Sotiropoulos. 2016. “An + Integrated Approach to Correction for Off-Resonance Effects and + Subject Movement in Diffusion Mr Imaging.” *Neuroimage* 125: + 1063–78. https://doi.org/10.1016/j.neuroimage.2015.10.019. + + .. container:: + :name: ref-Ashburner1997 + + Ashburner, J., and K. Friston. 1997. “Multimodal Image + Coregistration and Partitioning - A Unified Framework.” + *Neuroimage* 6 (3): 209–17. + https://doi.org/10.1006/nimg.1997.0290. + + .. container:: + :name: ref-Ashburner2005 + + Ashburner, John, and Karl J. Friston. 2005. “Unified + Segmentation.” *Neuroimage* 26 (3): 839–51. + https://doi.org/10.1016/j.neuroimage.2005.02.018. + + .. container:: + :name: ref-Avants2009 + + Avants, Brian B, Nick Tustison, Gang Song, and others. 2009. + “Advanced Normalization Tools (Ants).” *Insight J* 2 (365): 1–35. + https://psychiatry.ucsd.edu/research/programs-centers/snl/_files/ants2.pdf. + + .. container:: + :name: ref-Barch2013 + + Barch, Deanna M, Gregory C Burgess, Michael P Harms, Steven E + Petersen, Bradley L Schlaggar, Maurizio Corbetta, Matthew F + Glasser, et al. 2013. “Function in the Human Connectome: Task-FMRI + and Individual Differences in Behavior.” *Neuroimage* 80: 169–89. + https://doi.org/10.1016/j.neuroimage.2013.05.033. + + .. container:: + :name: ref-Behzadi2007 + + Yashar Behzadi and Khaled Restom and Joy Liau and Thomas T. Liu. 2007. “A component based noise correction method (CompCor) for BOLD and perfusion based fMRI.” *Neuroimage* 37 (1): 90–101. + https://doi.org/10.1016/j.neuroimage.2007.04.042. + + .. container:: + :name: ref-Bhattasali2019 + + Bhattasali, Shohini, Murielle Fabre, Wen-Ming Luh, Hazem Al Saied, + Mathieu Constant, Christophe Pallier, Jonathan R. Brennan, R. + Nathan Spreng, and John Hale. 2019. “Localising Memory Retrieval + and Syntactic Composition: An FMRI Study of Naturalistic Language + Comprehension.” *Lang Cogn Neurosci* 34 (4): 491–510. + https://doi.org/10.1080/23273798.2018.1518533. + + .. container:: + :name: ref-Binder2011 + + Binder, Jeffrey R., William L. Gross, Jane B. Allendorfer, + Leonardo Bonilha, Jessica Chapin, Jonathan C. Edwards, Thomas J. + Grabowski, et al. 2011. “Mapping Anterior Temporal Lobe Language + Areas with FMRI: A Multicenter Normative Study.” *Neuroimage* 54 + (2): 1465–75. http://doi.org/10.1016/j.neuroimage.2010.09.048. + + .. container:: + :name: ref-Bissett2011 + + Bissett, Patrick G, and Gordon D Logan. 2011. “Balancing Cognitive + Demands: Control Adjustments in the Stop-Signal Paradigm.” *J Exp + Psychol Learn Mem Cogn* 37 (2): 392. + https://doi.org/10.1037/a0021800. + + .. container:: + :name: ref-Botvinik2019 + + Botvinik-Nezer, Rotem, Roni Iwanir, Felix Holzmeister, Jürgen + Huber, Magnus Johannesson, Michael Kirchler, Anna Dreber, Colin F + Camerer, Russell A Poldrack, and Tom Schonberg. 2019. “FMRI Data + of Mixed Gambles from the Neuroimaging Analysis Replication and + Prediction Study.” *Sci Data.* 6 (1): 1–9. + https://doi.org/10.1038/s41597-019-0113-7. + + .. container:: + :name: ref-Campbell2015 + + Campbell, Karen L., Meredith A. Shafto, Paul Wright, Kamen A. + Tsvetanov, Linda Geerligs, Rhodri Cusack, Lorraine K. Tyler, et + al. 2015. “Idiosyncratic Responding During Movie-Watching + Predicted by Age Differences in Attentional Control.” *Neurobiol + Aging* 36 (11): 3045–55. + https://doi.org/10.1016/j.neurobiolaging.2015.07.028. + + .. container:: + :name: ref-Carretie2019 + + Carretié, L., M. Tapia, S. López-Martín, and J. Albert. 2019. + “EmoMadrid: An emotional pictures database for affect research.” + *Motivation and Emotion* 43: 929–39. + + .. container:: + :name: ref-Chang2018 + + Chang, Dorita HF, Hiroshi Ban, Yuji Ikegaya, Ichiro Fujita, and + Nikolaus F Troje. 2018. “Cortical and Subcortical Responses to + Biological Motion.” *Neuroimage* 174: 87–96. + https://doi.org/10.1016/j.neuroimage.2018.03.013. + + .. container:: + :name: ref-Cordero2019 + + Cordero-Grande, Lucilio, Daan Christiaens, Jana Hutter, Anthony N + Price, and Jo V Hajnal. 2019. “Complex Diffusion-Weighted Image + Estimation via Matrix Recovery Under General Noise Models.” + *Neuroimage* 200: 391–404. + https://doi.org/10.1016/j.neuroimage.2019.06.039. + + .. container:: + :name: ref-Crone2018 + + Crone, D. L., S. Bode, C. Murawski, and S. M. Laham. 2018. “The + Socio-Moral Image Database (SMID): A Novel Stimulus Set for the + Study of Social, Moral and Affective Processes.” *PLoS One* 13 + (1). https://doi.org/10.1371/journal.pone.0190954. + + .. container:: + :name: ref-Danglauser2011 + + Dan-Glauser, E. S., and K. R. Scherer. 2011. “The Geneva Affective + Picture Database (GAPED): A New 730-Picture Database Focusing on + Valence and Normative Significance.” *Behavior Research Methods* + 43 (2): 468–77. http://doi.org/10.3758/s13428-011-0064-1. + + .. container:: + :name: ref-Dhollander2019 + + Dhollander, Thijs, Remika Mito, David Raffelt, and Alan Connelly. + 2019. “Improved White Matter Response Function Estimation for + 3-Tissue Constrained Spherical Deconvolution.” In *Proc. Intl. + Soc. Mag. Reson. Med*. Vol. 555. 10. + https://archive.ismrm.org/2019/0555.html. + + .. container:: + :name: ref-Diersch2021 + + Diersch, Nadine, Jose P Valdes-Herrera, Claus Tempelmann, and + Thomas Wolbers. 2021. “Increased Hippocampal Excitability and + Altered Learning Dynamics Mediate Cognitive Mapping Deficits in + Human Aging.” *Journal of Neuroscience* 41 (14): 3204–21. + https://doi.org/10.1523/JNEUROSCI.0528-20.2021. + + .. container:: + :name: ref-DodellFeder2011 + + Dodell-Feder, David, Jorie Koster-Hale, Marina Bedny, and Rebecca + Saxe. 2011. “FMRI Item Analysis in a Theory of Mind Task.” + *Neuroimage* 55 (2): 705–12. + https://doi.org/10.1016/j.neuroimage.2010.12.040. + + .. container:: + :name: ref-Douglas2017 + + Douglas, Danielle, Sathesan Thavabalasingam, Zahraa Chorghay, + Edward B. O’Neil, Morgan D. Barense, and and Andy C. H. Lee. 2017. + “Perception of Impossible Scenes Reveals Differential Hippocampal + and Parahippocampal Place Area Contributions to Spatial + Coherency.” *Hippocampus* 27 (1): 61–76. + https://doi.org/10.1002/hipo.22673. + + .. container:: + :name: ref-Eisenberg2017 + + Eisenberg, Ian W., Patrick G. Bissett, Jessica R. Canning, Jesse + Dallery, A. Zeynep Enkavi, Susan Whitfield-Gabrieli, Oscar + Gonzalez, et al. 2017. “Applying novel technologies and methods to + inform the ontology of self-regulation.” *Behaviour Research and + Therapy* 101: 46–57. https://doi.org/10.1016/j.brat.2017.09.014. + + .. container:: + :name: ref-Endo2003 + + Endo, Nobutaka, Jun Saiki, Yoko Nakao, and Hirofumi Saito. 2003. + “Perceptual Judgments of Novel Contour Shapes and Hierarchical + Descriptions of Geometrical Properties.” *Japanese Journal of + Psychology*. https://pubmed.ncbi.nlm.nih.gov/14708480/. + + .. container:: + :name: ref-Erb2018 + + Erb, Julia, Marcelo Armendariz, Federico De Martino, Rainer + Goebel, Wim Vanduffel, and Elia Formisano. 2018. “Homology and + Specificity of Natural Sound-Encoding in Human and Monkey Auditory + Cortex.” *Cerebral Cortex* 29 (9): 3636–50. + https://doi.org/10.1093/cercor/bhy243. + + .. container:: + :name: ref-Ericsson1995 + + Ericsson, K. Anders, and Walter Kintsch. 1995. “Long-Term Working + Memory.” *Psychol Rev* 102 (2): 211–45. + https://doi.org/10.1037/0033-295x.102.2.211. + + .. container:: + :name: ref-Eriksen1974 + + Eriksen, Barbara A, and Charles W Eriksen. 1974. “Effects of Noise + Letters Upon the Identification of a Target Letter in a Nonsearch + Task.” *Percept Psychophys* 16 (1): 143–49. + https://doi.org/10.3758/BF03203267. + + .. container:: + :name: ref-Favre2021 + + Favre, Pauline, Philipp Kanske, Haakon Engen, and Tania Singer. + 2021. “Decreased Emotional Reactivity After 3-Month + Socio-Affective but Not Attention- or Meta-Cognitive-Based Mental + Training: A Randomized, Controlled, Longitudinal FMRI Study.” + *NeuroImage* 237: 118–32. + https://doi.org/10.1016/j.neuroimage.2021.118132. + + .. container:: + :name: ref-Friston1998a + + Friston, K. J., P. Fletcher, O. Josephs, A. Holmes, M. D. Rugg, + and R. Turner. 1998. “Event-Related FMRI: Characterizing + Differential Responses.” *Neuroimage* 7 (1): 30–40. + http://doi.org/10.1006/nimg.1997.0306. + + .. container:: + :name: ref-Friston1995 + + Friston, K. J., C. D. Frith, R. S. J. Frackowiak, and R. Turner. + 1995. “Characterizing Dynamic Brain Responses with FMRI: A + Multivariate Approach.” *Neuroimage* 2 (2): 166–72. + https://doi.org/10.1006/nimg.1995.1019. + + .. container:: + :name: ref-Friston1998b + + Friston, K. J., O. Josephs, G. Rees, and R. Turner. 1998. + “Nonlinear Event-Related Responses in FMRI.” *Magn Reson Med* 39 + (1): 41–52. https://doi.org/10.1002/mrm.1910390109. + + .. container:: + :name: ref-Gauthier2018 + + Gauthier, Baptiste, Karin Pestke, and Virginie van Wassenhove. + 2018. “Building the Arrow of Time… Over Time: A Sequence of Brain + Activity Mapping Imagined Events in Time and Space.” *Cereb + Cortex* 29 (10): 4398–4414. https://doi.org/10.1093/cercor/bhy320. + + .. container:: + :name: ref-Gauthier2016a + + Gauthier, Baptiste, and Virginie van Wassenhove. 2016a. “Cognitive + Mapping in Mental Time Travel and Mental Space Navigation.” + *Cognition* 154: 55–68. + https://doi.org/10.1016/j.cognition.2016.05.015. + + .. container:: + :name: ref-Gauthier2016b + + ———. 2016b. “Time Is Not Space: Core Computations and + Domain-Specific Networks for Mental Travels.” *J Neurosci* 36 + (47): 11891–11903. https://doi.org/10.1523/JNEUROSCI.1400-16.2016. + + .. container:: + :name: ref-Genon2014 + + Genon, Sarah, Mohamed Ali Bahri, Fabienne Collette, Lucie Angel, + Arnaud d’Argembeau, David Clarys, Sandrine Kalenzaga, Eric Salmon, + and Christine Bastin. 2014. “Cognitive and Neuroimaging Evidence + of Impaired Interaction Between Self and Memory in Alzheimer’s + Disease.” *Cortex* 51: 11–24. + https://doi.org/10.1016/j.cortex.2013.06.009. + + .. container:: + :name: ref-Gorgolewski2011 + + Gorgolewski, Krzysztof, Christopher D. Burns, Cindee Madison, Dav + Clark, Yaroslav O. Halchenko, Michael L. Waskom, and Satrajit S. + Ghosh. 2011. “Nipype: A Flexible, Lightweight and Extensible + Neuroimaging Data Processing Framework in Python.” *Front + Neuroinform* 5: 13. http://doi.org/10.3389/fninf.2011.00013. + + .. container:: + :name: ref-Hale2022 + + Hale, John T., Luca Campanelli, Jixing Li, Shohini Bhattasali, + Christophe Pallier, and Jonathan R. Brennan. 2022. + “Neurocomputational Models of Language Processing.” *Annual Review + of Linguistics* 8 (1): null. + https://doi.org/10.1146/annurev-linguistics-051421-020803. + + .. container:: + :name: ref-Hamame2012 + + Hamamé, Carlos M, Juan R Vidal, Tomás Ossandón, Karim Jerbi, + Sarang S Dalal, Lorella Minotti, Olivier Bertrand, Philippe + Kahane, and Jean-Philippe Lachaux. 2012. “Reading the Mind’s Eye: + Online Detection of Visuo-Spatial Working Memory and Visual + Imagery in the Inferior Temporal Lobe.” *Neuroimage* 59 (1): + 872–79. https://doi.org/10.1016/j.neuroimage.2011.07.087. + + .. container:: + :name: ref-Hariri2002 + + Hariri, Ahmad R., Alessandro Tessitore, Venkata S. Mattay, + Francesco Fera, and Daniel R. Weinberger. 2002. “The Amygdala + Response to Emotional Stimuli: A Comparison of Faces and Scenes.” + *Neuroimage* 17 (1): 317–23. + http://doi.org/10.1006/nimg.2002.1179. + + .. container:: + :name: ref-Haxby2011 + + Haxby, James V., J. Swaroop Guntupalli, Andrew C. Connolly, + Yaroslav O. Halchenko, Bryan R. Conroy, M. Ida Gobbini, Michael + Hanke, and Peter J. Ramadge. 2011. “A Common, High-Dimensional + Model of the Representational Space in Human Ventral Temporal + Cortex.” *Neuron* 72 (2): 404–16. + http://doi.org/10.1016/j.neuron.2011.08.026. + + .. container:: + :name: ref-Helfrich2013 + + Helfrich, Randolph F, Hubertus GT Becker, and Thomas Haarmeier. + 2013. “Processing of Coherent Visual Motion in Topographically + Organized Visual Areas in Human Cerebral Cortex.” *Brain + Topography* 26 (2): 247–63. + https://doi.org/10.1007/s10548-012-0226-1. + + .. container:: + :name: ref-Humphries2006 + + Humphries, Colin, Jeffrey R. Binder, David A. Medler, and Einat + Liebenthal. 2006. “Syntactic and Semantic Modulation of Neural + Activity During Auditory Sentence Comprehension.” *J Cogn + Neurosci* 18 (4): 665–79. + http://doi.org/10.1162/jocn.2006.18.4.665. + + .. container:: + :name: ref-Huth2016a + + Huth, Alexander, Wendy de Heer, Thomas Griffiths, Frédéric + Theunissen, and Jack Gallant. 2016. “Natural Speech Reveals the + Semantic Maps That Tile Human Cerebral Cortex.” *Nature* 532 + (7600): 453–8. https://doi.org/10.1038/nature17637. + + .. container:: + :name: ref-Jacoby2016 + + Jacoby, Nir, Emile Bruneau, Jorie Koster-Hale, and Rebecca Saxe. + 2016. “Localizing Pain Matrix and Theory of Mind Networks with + Both Verbal and Non-Verbal Stimuli.” *Neuroimage* 126: 39–48. + https://doi.org/10.1016/j.neuroimage.2015.11.025. + + .. container:: + :name: ref-Johansson1973 + + Johansson, Gunnar. 1973. “Visual Perception of Biological Motion + and a Model for Its Analysis.” *Percept Psychophys* 14 (2): + 201–11. https://doi.org/10.3758/BF03212378. + + .. container:: + :name: ref-Keator2016 + + Keator, David B., Theo G. M. van Erp, Jessica A. Turner, Gary H. + Glover, Bryon A. Mueller, Thomas T. Liu, James T. Voyvodic, et al. + 2016. “The Function Biomedical Informatics Research Network Data + Repository.” *NeuroImage* 124: 1074–9. + https://doi.org/10.1016/j.neuroimage.2015.09.003. + + .. container:: + :name: ref-King2019 + + King, Maedbh, Carlos R Hernandez-Castillo, Russell A Poldrack, + Richard B Ivry, and Jörn Diedrichsen. 2019. “Functional Boundaries + in the Human Cerebellum Revealed by a Multi-Domain Task Battery.” + *Nat Neurosci* 22 (8): 1371–8. + https://doi.org/10.1038/s41593-019-0436-x. + + .. container:: + :name: ref-Knops2014 + + Knops, André, Manuela Piazza, Rakesh Sengupta, Evelyn Eger, and + David Melcher. 2014. “A Shared, Flexible Neural Map Architecture + Reflects Capacity Limits in Both Visual Short-Term Memory and + Enumeration.” *J Neurosci* 34 (30): 9857–66. + https://doi.org/10.1523/JNEUROSCI.2758-13.2014. + + .. container:: + :name: ref-Kuo2016 + + Kuo, Bo-Cheng, Anna Christina Nobre, Gaia Scerif, and Duncan E + Astle. 2016. “Top–down Activation of Spatiotopic Sensory Codes in + Perceptual and Working Memory Search.” *Journal of Cognitive + Neuroscience* 28 (7): 996–1009. + https://www.researchgate.net/publication/297895192_Top-Down_Activation_of_Spatiotopic_Sensory_Codes_in_Perceptual_and_Working_Memory_Search. + + .. container:: + :name: ref-Lang2008 + + Lang, P. J., M. M. Bradley, and B. N. Cuthbert. 2008. + “International Affective Picture System (IAPS): Affective Ratings + of Pictures and Instruction Manual.” *University of Florida*. + + .. container:: + :name: ref-Lebreton2015 + + Lebreton, Maël, Raphaëlle Abitbol, Jean Daunizeau, and Mathias + Pessiglione. 2015. “Automatic Integration of Confidence in the + Brain Valuation Signal.” *Nat Neurosci* 18 (8): 1159–67. + https://doi.org/10.1038/nn.4064. + + .. container:: + :name: ref-Mantini2012 + + Mantini, D., U. Hasson, V. Betti, M. G. Perrucci, G. L. Romani, M. + Corbetta, G. A. Orban, and W. Vanduffel. 2012. “Interspecies + activity correlations reveal functional correspondence between + monkey and human brain areas.” *Nat Methods* 9 (3): 277–82. + https://doi.org/10.1038/nmeth.1868. + + .. container:: + :name: ref-McKeefry1997 + + McKeefry, DJ, and SEMIR Zeki. 1997. “The Position and Topography + of the Human Colour Centre as Revealed by Functional Magnetic + Resonance Imaging.” *Brain: A Journal of Neurology* 120 (12): + 2229–42. https://doi.org/10.1093/brain/120.12.2229. + + .. container:: + :name: ref-Morrison2017 + + Morrison, Danielle, Hongyi Wang, Amanda C Hahn, Benedict C Jones, + and Lisa M DeBruine. 2017. “Predicting the Reward Value of Faces + and Bodies from Social Perception.” *PloS One* 12 (9): e0185093. + https://doi.org/10.1371/journal.pone.0185093. + + .. container:: + :name: ref-Newell1972 + + Newell, Allen, and Herbert A. Simon. 1972. *Human Problem + Solving*. 1st ed. NJ: Prentice-Hall. + + .. container:: + :name: ref-Nishimoto2011 + + Nishimoto, Shinji, An Vu, Thomas Naselaris, Yuval Benjamini, B. + Yu, and Jack Gallant. 2011. “Reconstructing Visual Experiences + from Brain Activity Evoked by Natural Movies.” *Curr Biol* 21 + (September): 1641–6. https://doi.org/10.1016/j.cub.2011.08.031. + + .. container:: + :name: ref-ODoherty2003 + + O’Doherty, John, Hugo Critchley, Ralf Deichmann, and Raymond J + Dolan. 2003. “Dissociating Valence of Outcome from Behavioral + Control in Human Orbital and Ventral Prefrontal Cortices.” *J + Neurosci.* 23 (21): 7931–9. + https://doi.org/10.1523/JNEUROSCI.23-21-07931.2003. + + .. container:: + :name: ref-ODoherty2001 + + O’Doherty, John, Morten L Kringelbach, Edmund T Rolls, Julia + Hornak, and Caroline Andrews. 2001. “Abstract Reward and + Punishment Representations in the Human Orbitofrontal Cortex.” + *Nat Neurosci.* 4 (1): 95–102. https://doi.org/10.1038/82959. + + .. container:: + :name: ref-Ossandon2012 + + Ossandón, Tomas, Juan R Vidal, Carolina Ciumas, Karim Jerbi, + Carlos M Hamamé, Sarang S Dalal, Olivier Bertrand, Lorella + Minotti, Philippe Kahane, and Jean-Philippe Lachaux. 2012. + “Efficient ‘Pop-Out’ Visual Search Elicits Sustained Broadband + Gamma Activity in the Dorsal Attention Network.” *J Neurosci* 32 + (10): 3414–21. https://doi.org/10.1523/JNEUROSCI.6048-11.2012. + + .. container:: + :name: ref-Perrone2012 + + Perrone-Bertolotti, Marcela, Jan Kujala, Juan R Vidal, Carlos M + Hamame, Tomas Ossandon, Olivier Bertrand, Lorella Minotti, + Philippe Kahane, Karim Jerbi, and Jean-Philippe Lachaux. 2012. + “How Silent Is Silent Reading? Intracerebral Evidence for Top-down + Activation of Temporal Voice Areas During Reading.” *J Neurosci* + 32 (49): 17554–62. https://doi.org/10.1523/JNEUROSCI.2982-12.2012. + + .. container:: + :name: ref-Pinel2007 + + Pinel, Philippe, Bertrand Thirion, Sébastien Meriaux, Antoinette + Jobert, Julien Serres, Denis Le Bihan, Jean-Baptiste Poline, and + Stanislas Dehaene. 2007. “Fast Reproducible Identification and + Large-Scale Databasing of Individual Functional Cognitive + Networks.” *BMC Neurosci* 8: 91. + https://doi.org/10.1186/1471-2202-8-91. + + .. container:: + :name: ref-Pinho2024 + + Pinho, Ana Luísa, Hugo Richard, Ana Fernanda Ponce, + Michael Eickenberg, Alexis Amadon, Elvis Dohmatob, Isabelle Denghien, + et al. 2024. “Individual Brain Charting dataset extension, + third release for movie watching and retinotopy data” *Sci Data* 11: 590. + https://doi.org/10.1038/s41597-024-03390-1. + + .. container:: + :name: ref-Pinho2021 + + Pinho, Ana Luísa, Alexis Amadon, Murielle Fabre, Elvis Dohmatob, + Isabelle Denghien, Juan Jes{\'u}s Torre, Chantal Ginisty, et al. 2021. + “Subject-specific segregation of functional territories based on deep + phenotyping” *Hum Brain Mapp* 42(4): 841-870. + https://doi.org/10.1002/hbm.25189. + + .. container:: + :name: ref-Pinho2020 + + Pinho, Ana Luísa, Alexis Amadon, Baptiste Gauthier, Nicolas + Clairis, André Knops, Sarah Genon, Elvis Dohmatob, et al. 2020. + “Individual Brain Charting dataset extension, second release of + high-resolution fMRI data for cognitive mapping.” *Sci Data* 7 + (1). https://doi.org/10.1038/s41597-020-00670-4. + + .. container:: + :name: ref-Pinho2018 + + Pinho, Ana Luísa, Alexis Amadon, Torsten Ruest, Murielle Fabre, + Elvis Dohmatob, Isabelle Denghien, Chantal Ginisty, et al. 2018. + “Individual Brain Charting, a High-Resolution FMRI Dataset for + Cognitive Mapping.” *Sci Data* 5: 180105. + https://doi.org/10.1038/sdata.2018.105. + + .. container:: + :name: ref-Reggio1982 + + Reggio, Godfrey. 1982. “Koyaanisqatsi.” + https://www.koyaanisqatsi.org/films/koyaanisqatsi.php. + + .. container:: + :name: ref-Richardson2018 + + Richardson, Hilary, Grace Lisandrelli, Alexa Riobueno-Naylor, and + Rebecca Saxe. 2018. “Development of the Social Brain from Age + Three to Twelve Years.” *Nat Commun* 9 (1027). + https://doi.org/10.1038/s41467-018-03399-2. + + .. container:: + :name: ref-Saignavongs2017 + + Saignavongs, Mani, Carolina Ciumas, Mathilde Petton, Romain Bouet, + Sébastien Boulogne, Sylvain Rheims, David W Carmichael, + Jean-Philippe Lachaux, and Philippe Ryvlin. 2017. “Neural activity + elicited by a cognitive task can be detected in single-trials with + simultaneous intracerebral EEG-fMRI recordings.” *Int J Neural + Syst* 27 (01): 1750001. https://doi.org/10.1142/S0129065717500010. + + .. container:: + :name: ref-Santoro2017 + + Santoro, Roberta, Michelle Moerel, Federico De Martino, Giancarlo + Valente, Kamil Ugurbil, Essa Yacoub, and Elia Formisano. 2017. + “Reconstructing the Spectrotemporal Modulations of Real-Life + Sounds from FMRI Response Patterns.” *Proc Natl Acad Sci U S A* + 114 (18): 4799–4804. https://doi.org/10.1073/pnas.1617622114. + + .. container:: + :name: ref-Schaefer2018 + + Schaefer, Alexander, Ru Kong, Evan M Gordon, Timothy O Laumann, + Xi-Nian Zuo, Avram J Holmes, Simon B Eickhoff, and BT Thomas Yeo. + 2018. “Local-Global Parcellation of the Human Cerebral Cortex from + Intrinsic Functional Connectivity Mri.” *Cerebral Cortex* 28 (9): + 3095–3114. https://doi.org/10.1093/cercor/bhx179. + + .. container:: + :name: ref-Schneider2011 + + Schneider, Darryl W, and Gordon D Logan. 2011. “Task-switching + performance with 1: 1 and 2: 1 cue–task mappings: Not so different + after all.” *J Exp Psychol Learn Mem Cogn* 37 (2): 405. + https://doi.org/10.1037/a0021967. + + .. container:: + :name: ref-Sereno1995 + + Sereno, MI, AM Dale, JB Reppas, KK Kwong, JW Belliveau, TJ Brady, + BR Rosen, and RB Tootell. 1995. “Borders of Multiple Visual Areas + in Humans Revealed by Functional Magnetic Resonance Imaging.” + *Science* 268 (5212): 889–93. + https://doi.org/10.1126/science.7754376. + + .. container:: + :name: ref-Shafto2014 + + Shafto, Meredith A, Lorraine K Tyler, Marie Dixon, Jason R Taylor, + James B Rowe, Rhodri Cusack, Andrew J Calder, et al. 2014. “The + Cambridge Centre for Ageing and Neuroscience (Cam-Can) Study + Protocol: A Cross-Sectional, Lifespan, Multidisciplinary + Examination of Healthy Cognitive Ageing.” *BMC Neurology* 14 + (204). https://doi.org/10.1186/s12883-014-0204-1. + + .. container:: + :name: ref-Shallice1982 + + Shallice, Timothy. 1982. “Specific Impairments of Planning.” + *Philos Trans R Soc Lond B Biol Sci* 298 (1089): 199–209. + https://doi.org/10.1098/rstb.1982.0082. + + .. container:: + :name: ref-Sharot2007 + + Sharot, Tali, Alison M Riccardi, Candace M Raio, and Elizabeth A + Phelps. 2007. “Neural Mechanisms Mediating Optimism Bias.” + *Nature* 450 (7166): 102–5. https://doi.org/10.1038/nature06280. + + .. container:: + :name: ref-Smith2007 + + Smith, Rachelle, Kamyar Keramatian, and Kalina Christoff. 2007. + “Localizing the Rostrolateral Prefrontal Cortex at the Individual + Level.” *Neuroimage* 36 (4): 1387–96. + http://doi.org/10.1016/j.neuroimage.2007.04.032. + + .. container:: + :name: ref-Smith2015b + + Smith, Robert E, Jacques-Donald Tournier, Fernando Calamante, and + Alan Connelly. 2015. “SIFT2: Enabling Dense Quantitative + Assessment of Brain White Matter Connectivity Using Streamlines + Tractography.” *Neuroimage* 119: 338–51. + https://doi.org/10.1016/j.neuroimage.2015.06.092. + + .. container:: + :name: ref-Smith2004 + + Smith, Stephen, Mark Jenkinson, Mark Woolrich, Christian Beckmann, + Timothy E. J. Behrens, Heidi Johansen-Berg, Peter R. Bannister, et + al. 2004. “Advances in Functional and Structural {Mr} Image + Analysis and Implementation as {Fsl}.” *Neuroimage* 23, Supplement + 1: S208–S219. http://doi.org/10.1016/j.neuroimage.2004.07.051. + + .. container:: + :name: ref-Snoek2021 + + Snoek, Lukas, Maite M van der Miesen, Tinka Beemsterboer, Andries + van der Leij, Annemarie Eigenhuis, and H Steven Scholte. 2021. + “The Amsterdam Open Mri Collection, a Set of Multimodal Mri + Datasets for Individual Difference Analyses.” *Scientific Data* 8 + (1): 1–23. https://doi.org/10.1038/s41597-021-00870-6. + + .. container:: + :name: ref-Stigliani2015 + + Stigliani, Anthony, Kevin S Weiner, and Kalanit Grill-Spector. + 2015. “Temporal Processing Capacity in High-Level Visual Cortex Is + Domain Specific.” *J Neurosci.* 35 (36): 12412–24. + https://doi.org/10.1523/JNEUROSCI.4822-14.2015. + + .. container:: + :name: ref-Stroop1935 + + Stroop, J Ridley. 1935. “Studies of Interference in Serial Verbal + Reactions.” *J Exp Psychol* 18 (6): 643. + https://doi.org/10.1037/h0054651. + + .. container:: + :name: ref-Szymanska2015 + + Szymanska, M., J. Monnin, N. Noiret, G. Tio, L. Galdon, E. + Laurent, S. Nezelof, and L. Vulliez-Coady. 2015. “The Besançon + Affective Picture Set-Adolescents (the BAPS-Ado): Development and + validation.” *Psychiatry Res* 228 (3): 576–84. + https://doi.org/10.1016/j.psychres.2015.04.055. + + .. container:: + :name: ref-Tom2007 + + Tom, Sabrina M, Craig R Fox, Christopher Trepel, and Russell A + Poldrack. 2007. “The Neural Basis of Loss Aversion in + Decision-Making Under Risk.” *Science.* 315 (5811): 515–18. + https://doi.org/10.1126/science.1134239. + + .. container:: + :name: ref-Tournier2010 + + Tournier, J Donald, Fernando Calamante, Alan Connelly, and others. + 2010. “Improved Probabilistic Streamlines Tractography by 2nd + Order Integration over Fibre Orientation Distributions.” In + *Proceedings of the International Society for Magnetic Resonance + in Medicine*. Vol. 1670. Ismrm. + https://archive.ismrm.org/2010/1670.html. + + .. container:: + :name: ref-Tournier2019 + + Tournier, J-Donald, Robert Smith, David Raffelt, Rami Tabbara, + Thijs Dhollander, Maximilian Pietsch, Daan Christiaens, Ben + Jeurissen, Chun-Hung Yeh, and Alan Connelly. 2019. “MRtrix3: A + Fast, Flexible and Open Software Framework for Medical Image + Processing and Visualisation.” *Neuroimage* 202: 116137. + https://doi.org/10.1016/j.neuroimage.2019.116137. + + .. container:: + :name: ref-Schalk2011 + + Van Der Schalk, Job, Skyler T Hawk, Agneta H Fischer, and Bertjan + Doosje. 2011. “Moving Faces, Looking Places: Validation of the + Amsterdam Dynamic Facial Expression Set (Adfes).” *Emotion* 11 + (4): 907. https://doi.org/10.1037/a0023853. + + .. container:: + :name: ref-Veraart2016 + + Veraart, Jelle, Dmitry S Novikov, Daan Christiaens, Benjamin + Ades-Aron, Jan Sijbers, and Els Fieremans. 2016. “Denoising of + Diffusion Mri Using Random Matrix Theory.” *Neuroimage* 142: + 394–406. https://doi.org/10.1016/j.neuroimage.2016.08.016. + + .. container:: + :name: ref-Vidal2010 + + Vidal, Juan R, Tomás Ossandón, Karim Jerbi, Sarang S Dalal, + Lorella Minotti, Philippe Ryvlin, Philippe Kahane, and + Jean-Philippe Lachaux. 2010. “Category-Specific Visual Responses: + An Intracranial Study Comparing Gamma, Beta, Alpha, and ERP + Response Selectivity.” *Front Hum Neurosci* 4: 195. + https://doi.org/10.3389/fnhum.2010.00195. + + .. container:: + :name: ref-Ward1997 + + Ward, Geoff, and Alan Allport. 1997. “Planning and Problem Solving + Using the Five Disc Tower of London Task.” *Q J Exp Psychol + Section A* 50 (1): 49–78. https://doi.org/10.1080/713755681. + + .. container:: + :name: ref-Weierich2019 + + Weierich, M. R., O. Kleshchova, J. K. Rieder, and D. M. Reilly. + 2019. “The Complex Affective Scene Set (COMPASS): Solving the + social content problem in affective visual stimulus sets.” + *Psychology* 10 (10). https://doi.org/10.1525/collabra.256. diff --git a/_sources/section8.rst.txt b/_sources/section8.rst.txt new file mode 100644 index 0000000..d399aa2 --- /dev/null +++ b/_sources/section8.rst.txt @@ -0,0 +1,2 @@ +Acquisition table +================= \ No newline at end of file diff --git a/_sources/tasks.rst.txt b/_sources/tasks.rst.txt new file mode 100644 index 0000000..0d37993 --- /dev/null +++ b/_sources/tasks.rst.txt @@ -0,0 +1,5891 @@ +All tasks +========= + +Apart from the MRI data, IBC is also a great resource for fMRI tasks. We have ran over 80 different tasks - gathered from our fellow researchers in the community - that altogether probe a large variety of cognitive domains in the human brain. The following figure depicts how much of the human brain cortex we have covered with these experiments. + +The codes and stimuli for all these tasks are openly available on the `individual-brain-charting/public_protocols `__ repository. Most of these were implemented with Python, MATLAB or Octave and are hence readily usable. However, some of them were originally implemented with proprietary softwares, and that was what we also used and have provided on the repo. You would still need to have access to these softwares to run those experiments. + +Below, you can find the paradigm descriptions, conditions, contrasts as well as the sample `stimulation videos `__ for each of these tasks. To help you look for relevant tasks, we have also tagged each of them with some of the broad :bdg-primary:`cognitive_domains` they intend to probe. These tags are based on the definitions from `Cognitive Atlas `__. + + +ArchiStandard +------------- + +.. container:: tags + + :bdg-primary:`vertical_checkerboard` :bdg-primary:`visual_sentence_comprehension` :bdg-success:`auditory_word_recognition` :bdg-primary:`visual_orientation` :bdg-primary:`visual_attention` + +.. admonition:: Implemented using proprietary software + :class: seealso + + - Software: E-Prime 2.0 Professional (Psychological Software Tools, Inc.) + - Response device: In-house custom-made sticks featuring one-top button, each one to be used in each hand + + - Audio device: MRConfon MKII + + - :octicon:`video;1em;` `See demo `__ + +The ARCHI tasks are a battery of localizers comprising a wide range of psychological domains. The **ArchiStandard** task, described in (`Pinel et al., 2007 `__) probes basic functions, such as button presses with the left or right hand, viewing horizontal and vertical checkerboards, reading and listening to short sentences, and mental computations (subtractions). Visual stimuli were displayed in four 250-ms epochs, separated by 100ms intervals (i.e., 1.3s in total). Auditory stimuli were generated from a recorded male voice (i.e., a total of 1.6s for motor instructions, 1.2-1.7s for sentences, and 1.2-1.3s for subtraction). The auditory or visual stimuli were shown to the participants for passive viewing or button response in event related paradigms. Informal inquiries undertaken after the MRI session confirmed that the experimental tasks were understood and followed correctly. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for ArchiStandard + :name: condArchiStandard + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - audio_computation + - Mental subtraction, indicated by auditory instruction + * - audio_sentence + - Listen to narrative sentences + * - horizontal_checkerboard + - Visualization of flashing horizontal checkerboards + * - vertical_checkerboard + - Visualization of flashing vertical checkerboards + * - video_computation + - Mental subtraction, indicated by visual instruction + * - video_left_button_press + - Left-hand three-times button press, indicated by visual instruction + * - video_right_button_press + - Right-hand three-times button press, indicated by visual instruction + * - video_sentence + - Read narrative sentences + +.. dropdown:: Contrasts for ArchiStandard + :name: contArchiStandard + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - audio_computation + - mental subtraction upon audio instruction + * - audio_left_button_press + - left hand button presses upon audio instructions + * - audio_right_button_press + - right hand button presses upon audio instructions + * - audio_sentence + - listen to narrative sentence + * - cognitive-motor + - narrative/computation vs. button presses + * - computation + - mental subtraction + * - computation-sentences + - mental subtraction vs. sentence reading + * - horizontal-vertical + - horizontal vs. vertical checkerboard + * - horizontal_checkerboard + - watch horizontal checkerboard + * - left-right_button_press + - left vs. right hand button press + * - listening-reading + - listening to sentence vs. reading a sentence + * - motor-cognitive + - button presses vs. narrative/computation + * - reading-checkerboard + - read sentence vs. checkerboard + * - reading-listening + - reading sentence vs. listening to sentence + * - right-left_button_press + - right vs. left hand button press + * - sentences + - read or listen to sentences + * - sentences-computation + - sentence reading vs. mental subtraction + * - vertical-horizontal + - vertical vs. horizontal checkerboard + * - vertical_checkerboard + - watch vertical checkerboard + * - video_computation + - mental subtraction upon video instruction + * - video_left_button_press + - left hand button presses upon video instructions + * - video_right_button_press + - right hand button presses upon video instructions + * - video_sentence + - read narrative sentence + +ArchiSpatial +------------ + +.. container:: tags + + :bdg-warning:`saccadic_eye_movement` :bdg-primary:`visual_orientation` :bdg-warning:`grasping` :bdg-light:`hand_chirality_recognition` :bdg-light:`hand_side_recognition` + +.. admonition:: Implemented using proprietary software + :class: seealso + + - Software: E-Prime 2.0 Professional (Psychological Software Tools, Inc.) + - Audio device: MRConfon MKII + + - :octicon:`video;1em;` `See demo `__ + +The ARCHI tasks are a battery of localizers comprising a wide range of psychological domains. **ArchiSpatial** includes the performance of (1) ocular saccade, (2) grasping and (3) orientation judgments on objects (the two different tasks were actually made on the same visual stimuli in order to characterize grasping-specific activity), (4) judging whether a hand photograph was the left or right hand or (5) was displaying the front or back. The same input stimuli were presented twice in order to characterize specific response to hand side judgment. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for ArchiSpatial + :name: condArchiSpatial + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - object_grasp + - Mimicry of object grasping with right hand, in which the corresponding object was displayed on the screen + * - object_orientation + - Mimic orientation of rhombus, displayed as image background on the screen , using right hand along with fingers + * - rotation_hand + - Mental judgment on whether the hand displayed on the image is a left or a right hand + * - rotation_side + - Mental judgment on the palmar-dorsal direction of a hand displayed as visual stimulus + * - saccades + - Ocular movements were performed according to the displacement of a fixation cross from the center towards peripheral points in the image displayed + +.. dropdown:: Contrasts for ArchiSpatial + :name: contArchiSpatial + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - grasp-orientation + - object grasping vs. orientation reporting + * - hand-side + - left or right hand vs. hand palm or back + * - object_grasp + - object grasping + * - object_orientation + - image orientation reporting + * - rotation_hand + - left or right hand + * - rotation_side + - hand palm or back vs. fixation + * - saccades + - saccade vs. fixation + +ArchiSocial +----------- + +.. container:: tags + + :bdg-primary:`visual_sentence_comprehension` :bdg-light:`mentalization` :bdg-dark:`animacy_decision` :bdg-success:`auditory_sentence_recognition` :bdg-success:`voice_perception` + +.. admonition:: Implemented using proprietary software + :class: seealso + + - Software: E-Prime 2.0 Professional (Psychological Software Tools, Inc.) + - Audio device: MRConfon MKII + + - :octicon:`video;1em;` `See demo `__ + +The ARCHI tasks are a battery of localizers comprising a wide range of psychological domains. **ArchiSocial** relies on (1) the interpretation of short stories involving false beliefs or not, (2) observation of moving objects with or without a putative intention, and (3) listening to speech and non-speech sounds. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for ArchiSocial + :name: condArchiSocial + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - false_belief_audio + - Interpret short stories (presented as auditory stimuli) through mental reply (no active response was involved), featuring a false-belief plot + * - false_belief_video + - Interpret short stories (presented as visual stimuli) through mental reply (no active response was involved), featuring a false-belief plot + * - mechanistic_audio + - Interpret short stories (presented as auditory stimuli) through mental reply (no active response was involved), featuring a cause-consequence plot + * - mechanistic_video + - Interpret short stories (presented as visual stimuli) through mental reply (no active response was involved), featuring a cause-consequence plot + * - non_speech_sound + - Listen passively to short samples of natural sounds + * - speech_sound + - Listen passively to short samples of human voices + * - triangle_mental + - Watch short movies of triangles, which exhibit a putative interaction + * - triangle_random + - Watch short movies of triangles, which exhibit a random movement + +.. dropdown:: Contrasts for ArchiSocial + :name: contArchiSocial + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - false_belief-mechanistic + - false-belief story or tale vs. mechanistic story or tale + * - false_belief-mechanistic_audio + - false-belief tale vs. mechanistic tale + * - false_belief-mechanistic_video + - false-belief story vs. mechanistic story + * - false_belief_audio + - false-belief tale + * - false_belief_video + - false-belief story + * - mechanistic_audio + - listening to a mechanistic tale + * - mechanistic_video + - reading a mechanistic story + * - non_speech_sound + - listen to natural sound + * - speech-non_speech + - listen to voice sound vs. natural sound + * - speech_sound + - listen to voice sound + * - triangle_mental + - mental motion of triangle + * - triangle_mental-random + - mental motion vs. random motion + * - triangle_random + - randomly drifting triangle + +ArchiEmotional +-------------- + +.. container:: tags + + :bdg-primary:`visual_representation` :bdg-primary:`visual_pattern_recognition` :bdg-primary:`visual_orientation` :bdg-danger:`emotional_expression` :bdg-primary:`visual_face_recognition` + +.. admonition:: Implemented using proprietary software + :class: seealso + + - Software: E-Prime 2.0 Professional (Psychological Software Tools, Inc.) + - Audio device: MRConfon MKII + + - :octicon:`video;1em;` `See demo `__ + +The ARCHI tasks are a battery of localizers comprising a wide range of psychological domains. **ArchiEmotional** includes (1) facial judgments of gender, and (2) trustworthiness plus expression based on complete portraits or photos of eyes' expressions. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for ArchiEmotional + :name: condArchiEmotional + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - expression_control + - Mental assessment on the slope of a gray-scale grid image (obtained from scrambling an eyes' image) that may be tilted or not + * - expression_gender + - Gender evaluation of the presented human eye images + * - expression_intention + - Trustworthy evaluation of the presented human eye images + * - face_control + - Mental assessment on the slope of a gray-scale grid image (obtained from scrambling a face's image) that may be tilted or not + * - face_gender + - Gender evaluation of the presented human faces + * - face_trusty + - Trustworthy evaluation of the presented human faces + +.. dropdown:: Contrasts for ArchiEmotional + :name: contArchiEmotional + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - expression_control + - look at scrambled eyes image + * - expression_gender + - guess gender from eyes image + * - expression_gender-control + - guess the gender from eyes image vs. view scrambled image + * - expression_intention + - guess intention from eyes image + * - expression_intention-control + - guess intention from eyes image vs. view scrambled image + * - expression_intention-gender + - guess intention vs. gender from eyes image + * - face_control + - look at scrambled image + * - face_gender + - guess the gender from face image + * - face_gender-control + - guess the gender from face image + * - face_trusty + - assess face trustfulness + * - face_trusty-control + - assess face trustfulness vs. view scrambled image + * - face_trusty-gender + - assess face trustfulness vs. gender + * - trusty_and_intention-control + - assess face trustfulness or guess expression intention vs. scrambled image + * - trusty_and_intention-gender + - assess face trustfulness or guess expression intention vs. guess the gender + +HcpEmotion +---------- + +.. container:: tags + + :bdg-primary:`emotional_face_recognition` :bdg-primary:`visual_form_recognition` :bdg-light:`feature_comparison` + +.. admonition:: Implemented using proprietary software + :class: seealso + + - Software: E-Prime 2.0 Professional (Psychological Software Tools, Inc.) + - Response device: Five-button ergonomic pad (current designs, package 932 with pyka hhsc-1x5-n4) + +The HCP tasks used herein were reproductions made in a subset of task-fMRI paradigms originally developed for the `Human Connectome Project `__ (`Barch et al., 2013 `__), but with minor changes. The main purpose of the **HCP Emotion** task was to capture neural activity arising from fear- or angry-response processes. To elicit stronger effects, affective facial expressions were used as visual stimuli due to their importance in adaptive social behavior (`Hariri et al., 2002 `__). The paradigm was thus composed by two categories of blocks: (1) the face block and (2) the shape block. All blocks consisted of a series of events, in which images with faces or shapes were displayed, respectively. There were always three faces/shapes per image; one face/shape was shown at the top and two faces/shapes were shown at the bottom. The participants were then asked to decide which face/shape at the bottom, i.e. left or right face/shape, matched the one displayed at the top, by pressing respectively the index or middle finger's button of the response box. The task was formed by twelve blocks per run, i.e. six face blocks and six shape blocks. The two block categories were alternately presented for each run. All blocks contained six trials and they were always initiated by a cue of three seconds. In turn, the trials included a visual-stimulus period of two seconds and a fixation-cross period of one second; the total duration of the trial was thus three seconds. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for HcpEmotion + :name: condHcpEmotion + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - face + - Images with faces were displayed + * - shape + - Images with shapes were displayed + +.. dropdown:: Contrasts for HcpEmotion + :name: contHcpEmotion + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - face + - emotional face comparison + * - face-shape + - emotional face comparison vs. shape comparison + * - shape + - shape comparison + * - shape-face + - shape comparison vs. emotional face comparison + +HcpGambling +----------- + +.. container:: tags + + :bdg-dark:`reward_processing` :bdg-dark:`punishment_processing` + +.. admonition:: Implemented using proprietary software + :class: seealso + + - Software: E-Prime 2.0 Professional (Psychological Software Tools, Inc.) + - Response device: Five-button ergonomic pad (current designs, package 932 with pyka hhsc-1x5-n4) + +The HCP tasks used herein were reproductions made in a subset of task-fMRI paradigms originally developed for the `Human Connectome Project `__ (`Barch et al., 2013 `__), but with minor changes. The **HCP Gambling** task was adapted from the incentive processing task-fMRI paradigm of the HCP and its aim was to localize brain structures that take part to the reward system, namely the basal ganglia complex. The paradigm included eight blocks and each block was composed by eight events. For every event, the participants were asked to play a game. The goal was to guess whether the next number to be displayed, which ranged from one to nine, would be more or less than five while a question mark was shown on the screen. The answer was given by pressing the index or middle finger's button of the response box, respectively. Feedback on the correct number was provided afterwards. There was an equal amount of blocks in which the participants experienced either reward or loss, for most of the events. Concretely, six out of the eight events within a block pertained to one of these two outcomes; the remaining events corresponded to the antagonist or a neutral outcome, i.e. when the correct number was five. The task was constituted by eight blocks per run, in which each half related to reward and loss experience, respectively. The order of the two block categories were pseudorandomized during a single run, but fixed for all participants. A fixation-cross period of fifteen seconds was displayed between blocks. All blocks contained eight trials. The trials included a question-mark visual stimulus lasting up to 1.5 seconds, a feedback period of one second and a fixation-cross period of one second, as well; the total duration of the trial was then 3.5 seconds, approximately. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for HcpGambling + :name: condHcpGambling + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - punishment + - The participant experiences loss + * - reward + - The participant experiences reward + +.. dropdown:: Contrasts for HcpGambling + :name: contHcpGambling + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - punishment + - negative gambling outcome + * - punishment-reward + - negative vs. positive gambling outcome + * - reward + - gambling with positive outcome + * - reward-punishment + - positive vs. negative gambling outcome + +HcpMotor +-------- + +.. container:: tags + + :bdg-warning:`response_execution` :bdg-warning:`left_hand_response_execution` :bdg-warning:`tongue_response_execution` :bdg-warning:`right_hand_response_execution` :bdg-warning:`response_selection` + +.. admonition:: Implemented using proprietary software + :class: seealso + + - Software: E-Prime 2.0 Professional (Psychological Software Tools, Inc.) +The HCP tasks used herein were reproductions made in a subset of task-fMRI paradigms originally developed for the `Human Connectome Project `__ (`Barch et al., 2013 `__), but with minor changes. The **HCP Motor** task was designed with the intent of extracting maps on gross motor topography, in particular motor skills associated with movements of the foot, hand and tongue. There were thus five categories of blocks with respect to motor tasks involving (1) the left foot, (2) the right foot, (3) the left hand, (4) the right hand, and (5) the tongue, respectively. The blocks always started with visual cues referring to which part of the body should be moved. The cues were then followed by a set of events, which were in turn indicated by flashing arrows on the screen. The events pertained to the corresponding movements performed by the participants. The task was formed by five blocks per category, with a total of twenty blocks per run. The order of the block categories were pseudo-randomized during each run, but fixed for all participants. A fixation-dot period of fifteen seconds was inserted between some blocks. All blocks contained ten trials. Every trial included a cue of one second and a period of performance of twelve seconds. During the period of performance, arrows flashed ten times on the screen, as an indication of the number of movements that should be performed. The total duration of the trial was then thirteen seconds. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for HcpMotor + :name: condHcpMotor + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - cue + - Fixation dot + * - left_foot + - Visual cue indicating the left foot should be moved + * - left_hand + - Visual cue indicating the left hand should be moved + * - right_foot + - Visual cue indicating the right foot should be moved + * - right_hand + - Visual cue indicating the right hand should be moved + * - tongue + - Visual cue indicating the tongue hand should be moved + +.. dropdown:: Contrasts for HcpMotor + :name: contHcpMotor + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - cue + - motion cue of motion + * - left_foot + - move left foot + * - left_foot-avg + - move left foot vs. right foot hands and tongue + * - left_hand + - move left hand + * - left_hand-avg + - move left hand vs. right hand feet and tongue + * - right_foot + - move right foot + * - right_foot-avg + - move right foot vs. left foot hands and tongue + * - right_hand + - move right hand + * - right_hand-avg + - move right hand vs. left hand feet and tongue + * - tongue + - move tongue + * - tongue-avg + - move tongue vs. hands and feet + +HcpLanguage +----------- + +.. container:: tags + + :bdg-success:`auditory_sentence_recognition` :bdg-success:`auditory_arithmetic_processing` :bdg-secondary:`narrative_comprehension` + +.. admonition:: Implemented using proprietary software + :class: seealso + + - Software: E-Prime 2.0 Professional (Psychological Software Tools, Inc.) + - Response device: Five-button ergonomic pad (current designs, package 932 with pyka hhsc-1x5-n4) + +The HCP tasks used herein were reproductions made in a subset of task-fMRI paradigms originally developed for the `Human Connectome Project `__ (`Barch et al., 2013 `__), but with minor changes. The **HCP Language** task was used as a localizer of brain regions involved in semantic processing, with special focus on the anterior temporal lobe (ATL) (`Binder et al., 2011 `__). The paradigm comprised two categories of blocks: (1) story blocks, and (2) math blocks. The math block served as a control task in this context, since it was likely to address other brain regions during the attentional demands. Both type of blocks exhibited auditory stimuli in short epochs, which in turn finished with a final question followed by two possible answers. During story blocks, participants were presented with stories, whose question targeted their respective topics. Conversely, math blocks showed arithmetic problems for which the correct solution must be selected. The answer was provided after the two possible options were displayed, through pressing the corresponding button of the response box, i.e. the button for the index or middle finger of the response box for the first or second option, respectively. The difficulty levels of the problems, presented for both categories, were adjusted throughout the experiment, in order to keep the participants engaged in the task and, thus, assure accurate performances (`Binder et al., 2011 `__). The task was composed by eleven blocks per run. For the first run, six story blocks and five math blocks were interleaved, respectively. The reverse amount and order of blocks were used during the second run. The number of trials per block varied between one and four. Nevertheless, it was assured that both block categories matched their length of presentation at every run. There was a cue of two seconds in the beginning of each block, indicating its category. The duration of the trials within a block varied between ten and thirty seconds. Finally, the presentation of the auditory stimuli was always accompanied by the display of a fixation cross on the screen throughout the entire run. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for HcpLanguage + :name: condHcpLanguage + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - math + - Auditorily-cued mental addition + * - story + - Listening to tales + +.. dropdown:: Contrasts for HcpLanguage + :name: contHcpLanguage + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - math + - mental additions + * - math-story + - mental additions vs. listening to tale + * - story + - listening to tale + * - story-math + - listening to tale vs. mental additions + +HcpRelational +------------- + +.. container:: tags + + :bdg-primary:`visual_pattern_recognition` :bdg-light:`relational_comparison` :bdg-primary:`visual_form_recognition` :bdg-light:`feature_comparison` + +.. admonition:: Implemented using proprietary software + :class: seealso + + - Software: E-Prime 2.0 Professional (Psychological Software Tools, Inc.) + - Response device: Five-button ergonomic pad (current designs, package 932 with pyka hhsc-1x5-n4) + +The HCP tasks used herein were reproductions made in a subset of task-fMRI paradigms originally developed for the `Human Connectome Project `__ (`Barch et al., 2013 `__), but with minor changes. The **HCP Relational** task employed a relational matching-to-sample paradigm, featuring a second-order comparison of relations between two pairs of objects. It served primarily as a localizer of the rostrolateral prefrontal cortex, since relational matching mechanisms were shown to elicit activation on this region (`Smith et al., 2007 `__). Similarly to some previous tasks, two categories of blocks described the paradigm: (1) the relational-processing block, and (2) the control-matching block. All blocks were constituted by a set of events. In the relational-processing block, visual stimuli consisted of images representing two pairs of objects, in which one pair was placed at the top and the other one at the bottom of the image, respectively. Objects within a pair may differ in two dimensions: shape and texture. The participants had to identify whether the pair of objects from the top differed in a specific dimension and, subsequently, they were asked to determine whether the pair from the bottom changed along the same dimension. For the control block, one pair of objects was displayed at the top of the image and a single object at the bottom of the same image. In addition, a cue was shown in the middle of that image referring to one of the two possible dimensions. The participants had thus to indicate whether the object from the bottom was matching either of the two objects from the top, according to the dimension specified as a cue. If there was a match they had to press with the index finger on the corresponding button of the button box; otherwise, they had to press with the middle finger on the corresponding one. + +This task was formed by twelve blocks per run. Two groups of six blocks referred to the two block categories, respectively. Block categories were, in turn, interleaved for display within a run. A fixation-cross period of sixteen seconds was inserted between some blocks. All blocks contained six trials and they were always initiated by a cue of two seconds. The trials were described by a visual-stimulus plus response period followed by a fixation-cross period, lasting up to ten seconds. The duration of the former differed in agreement with the type of block, i.e. it lasted nine seconds and 7.6 seconds during the relational-processing block and control-matching block, respectively. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for HcpRelational + :name: condHcpRelational + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - match + - Simple visual matching + * - relational + - Relational processing of visual objects + +.. dropdown:: Contrasts for HcpRelational + :name: contHcpRelational + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - match + - visual feature matching vs. fixation + * - relational + - relational comparison vs. fixation + * - relational-match + - relational comparison vs. matching + +HcpSocial +--------- + +.. container:: tags + + :bdg-dark:`animacy_decision` :bdg-light:`mentalization` :bdg-light:`animacy_perception` :bdg-warning:`motion_detection` + +.. admonition:: Implemented using proprietary software + :class: seealso + + - Software: E-Prime 2.0 Professional (Psychological Software Tools, Inc.) + - Response device: Five-button ergonomic pad (current designs, package 932 with pyka hhsc-1x5-n4) + +The HCP tasks used herein were reproductions made in a subset of task-fMRI paradigms originally developed for the `Human Connectome Project `__ (`Barch et al., 2013 `__), but with minor changes. The **HCP Social** task intended to provide evidence for task-specific activation in brain structures presumably implicated in social cognition. The paradigm included two categories of blocks, in which movies were presented during short epochs. The movies consisted in triangle-shape clip art, moving in a predetermined fashion. Putative social interactions could be drawn from movements referring to the block category on the effect-of-interest. In contrast, objects appeared to be randomly moving the other category, i.e. the control-effect block. Participants were to decide whether the movements of the objects appeared to represent a social interaction (by pressing with the index finger in the corresponding button of the response box) or not (by pressing with the ring finger in the corresponding button of the response box; in case of uncertainty, they had to press with the middle finger. The task was constituted by ten blocks per run. Each half of the blocks corresponded to one of the aforementioned block categories, whose order was pseudo-randomized for every run, but fixed for all participants. There was only one trial present per block. It consisted of a twenty-second period of video-clip presentation plus three seconds maximum of a response period, indicated by a momentary instruction on the screen. Thus, the total duration of a block was approximately twenty three seconds. A fixation-cross period of fifteen seconds was always displayed between blocks. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for HcpSocial + :name: condHcpSocial + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - mental + - Watching a movie with mental motion + * - random + - Watching a movie with random motion + +.. dropdown:: Contrasts for HcpSocial + :name: contHcpSocial + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - mental + - mental motion vs. fixation + * - mental-random + - mental motion vs. random motion + * - random + - random motion vs. fixation + +HcpWm +----- + +.. container:: tags + + :bdg-light:`tool_maintenance` :bdg-primary:`visual_place_recognition` :bdg-primary:`face_maintenance` :bdg-light:`updating` :bdg-primary:`visual_face_recognition` + +.. admonition:: Implemented using proprietary software + :class: seealso + + - Software: E-Prime 2.0 Professional (Psychological Software Tools, Inc.) + - Response device: Five-button ergonomic pad (current designs, package 932 with pyka hhsc-1x5-n4) + +The HCP tasks used herein were reproductions made in a subset of task-fMRI paradigms originally developed for the `Human Connectome Project `__ (`Barch et al., 2013 `__), but with minor changes. The **HCP Working Memory** task was adapted from the classical n-back task to serve as functional localizer for evaluation of working-memory (WM) capacity and related processes. The paradigm integrated two categories of blocks: (1) the "0-back" WM-task block, and (2) the "2-back" WM-task block. They were both equally presented within a run. A cue was always displayed at the beginning of each block, indicating its task-related type. Blocks were formed by set of events, during which pictures of faces, places, tools or body parts were shown on the screen. One block was always dedicated to one specific category of pictures and the four categories were always presented at every run. At each event, the participant were to decide whether the image matched with the reference or not, by pressing respectively on the index or middle finger's button of the response box. The task was constituted by sixteen blocks per run, split into two block categories. Besides, there were four pairs of blocks per category, referring respectively to the four classes of pictures mentioned above. The order of the blocks, regardless their category and corresponding class of pictures, was pseudo-randomized for every run, but fixed for all participants. A fixation-cross period of fifteen seconds was introduced between some blocks. All blocks contained ten trials, and they were always initiated by a cue of 2.5 seconds. Trials included in turn the presentation of a picture for two seconds and a very short fixation-cross period for half of a second; the total duration of one trial was thus 2.5 seconds. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for HcpWm + :name: condHcpWm + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - 0back_body + - 0-back, pictures of body parts were displayed + * - 0back_face + - 0-back, pictures of faces were displayed + * - 0back_place + - 0-back, pictures of places were displayed + * - 0back_tools + - 0-back, pictures of tools were displayed + * - 2back_body + - 2-back, pictures of body parts were displayed + * - 2back_face + - 2-back, pictures of faces were displayed + * - 2back_place + - 2-back, pictures of places were displayed + * - 2back_tools + - 2-back, pictures of tools were displayed + +.. dropdown:: Contrasts for HcpWm + :name: contHcpWm + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - 0back-2back + - 0-back vs. 2-back + * - 0back_body + - body image 0-back task vs. fixation + * - 0back_face + - face image 0-back task vs. fixation + * - 0back_place + - place image 0-back task vs. fixation + * - 0back_tools + - tool image 0-back task vs. fixation + * - 2back-0back + - 2-back vs. 0-back + * - 2back_body + - body image 2-back task vs. fixation + * - 2back_face + - face image 2-back task vs. fixation + * - 2back_place + - place image 2-back task vs. fixation + * - 2back_tools + - tool image 2-back task vs. fixation + * - body-avg + - body image versus face place tool image + * - face-avg + - face image versus body place tool image + * - place-avg + - place image versus face body tool image + * - tools-avg + - tool image versus face place body image + +RSVPLanguage +------------ + +.. container:: tags + + :bdg-secondary:`sentence_comprehension` :bdg-light:`recognition` :bdg-light:`string_maintenance` :bdg-secondary:`syntactic_parsing` :bdg-secondary:`combinatorial_semantics` + +.. admonition:: Implementation + :class: seealso + + - Software: Expyriment 0.7.0 (Python 2.7) + - Response device: In-house custom-made sticks featuring one-top button, each one to be used in each hand + + - Audio device: MRConfon MKII + + - :octicon:`video;1em;` `See demo `__ + +The **Rapid-Serial-Visual-Presentation** (RSVP) language task, adapted from (`Humphries et al., 2006 `__) study on syntactic and semantic processing in auditory sentence comprehension, targets similar modules in the context of reading. This adaptation allows for additional insights into visual word recognition, sublexical processing, and other aspects of active reading. The paradigm employs a block-design presentation strategy, with each block representing an epoch within a trial. These epochs correspond to different experimental conditions, involving the consecutive visual presentation of ten constituents composed by letters. All linguistic content elicited from the conditions except "consonant strings", such as grammar rules, lexicon and phonemes, were part of the french language. To ensure continuous engagement, participants were immediately prompted after each sentence to determine if the current constituent, or 'probe', belonged to the preceding sentence. They responded by pressing the left button for 'yes' and the right button for 'no'. + +Data were collected in a single session comprising six runs, each consisting of sixty trials. Within each run, ten trials were dedicated to each condition. Trial order was pseudo-randomized within and between runs, ensuring no repeated trials in a session. The presentation order of trials varied across participants. Each trial included several experimental stages: fixation cross display (2 seconds), brief blank screen (0.5 seconds), linguistic stimuli block (4 seconds), variable blank screen jitter (1-1.5 seconds), second fixation cross display (0.5 seconds), probe display (0.5 seconds), and response period (up to 2 seconds). This resulted in a total trial duration of ten seconds. Additionally, three extra seconds of blank screen preceded the first trial in every run. Opposite phase-encoding directions were applied during acquisition of each half of the total runs. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for RSVPLanguage + :name: condRSVPLanguage + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - complex + - Constituents, i.e. words formed syntactically and semantically congruent sentences with more than one clause grid image that may be tilted or not (high sentence-structure complexity) + * - consonant_string + - Syntactically and semantically non-congruent sentences composed by non-vocable constituents + * - probe + - Presented word, for which one has to assess whether it was in the previously presented sequence or not + * - pseudoword_list + - Syntactically and semantically non-congruent sentences composed by non-lexical vocable constituents + * - read_jabberwocky + - Syntactically congruent sentences composed by non-lexical vocable constituents + * - simple + - Constituents, i.e. words formed syntactically and semantically congruent sentences of one single clause (low_sentence-structure_complexity) + * - word_list + - Syntactically non-congruent sentences but with semantic content + +.. dropdown:: Contrasts for RSVPLanguage + :name: contRSVPLanguage + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - complex + - read sentence with complex syntax vs. fixation + * - complex-consonant_string + - read complex sentence vs. consonant strings + * - complex-simple + - read sentence with complex vs. simple syntax + * - consonant_string + - read and encode consonant strings vs. fixation + * - jabberwocky + - read jabberwocky vs. fixation + * - jabberwocky-consonant_string + - read jabberwocky vs. consonant strings + * - jabberwocky-pseudo + - read jabberwocky vs. pseudowords + * - probe + - word probe + * - pseudo-consonant_string + - read pseudowords vs. consonant strings + * - pseudoword_list + - read pseudowords vs. fixation + * - sentence-consonant_string + - read sentence vs. consonant strings + * - sentence-jabberwocky + - read sentence vs. jabberwocky + * - sentence-pseudo + - read sentence vs. pseudowords + * - sentence-word + - read sentence vs. words + * - simple + - read sentence with simple syntax vs. fixation + * - simple-consonant_string + - read simple sentence vs. consonant strings + * - word-consonant_string + - read words vs. consonant strings + * - word-pseudo + - read words vs. pseudowords + * - word_list + - read words vs. fixation + +RestingState +------------ + +.. container:: tags + + + +.. admonition:: Implementation + :class: seealso + + - Software: nan +Participants underwent two sessions, each consisting of two 15-minute runs of resting-state fMRI data. This resulted in a total of 1 hour of resting-state data per subject. Participants were instructed to remain still, keep their eyes open, and focus on a crosshair displayed on the screen. For more information on the acquisition parameters used for the resting-state data, refer to :ref:`resting`. + + +MTTWE +----- + +.. container:: tags + + :bdg-info:`memory_retrieval` :bdg-info:`spatial_working_memory` :bdg-light:`temporal_distance` :bdg-dark:`east_cardinal-direction_judgment` :bdg-light:`spatial_distance` + +.. admonition:: Implementation + :class: seealso + + - Software: Expyriment 0.7.0 / pygame 1.9.3 + - Response device: In-house custom-made sticks featuring one-top button, each one to be used in each hand + + - :octicon:`mark-github;1em;` `Repository `__ + + - :octicon:`video;1em;` `See demo `__ + +The **Mental Time Travel** (MTT) task battery was built on prior NeuroSpin studies focused on chronosthesia and mental space navigation (`Gauthier et al., 2016 `__, `Gauthier et al., 2016 `__, `Gauthier et al., 2018 `__). These studies involved judging the ordinality of historical events via egocentric mapping. In contrast, our task assessed neural correlates for both mental time and space judgment using narratives and allocentric mapping. To eliminate subject-specific representations, we used fictional scenarios with fabricated stories and characters on different islands. + +Each island had two stories plotted in a two-dimensional mesh of nodes, each representing a specific action. The narratives were presented as audio to prevent graphical memory retrieval, and participants learned the stories chronographically, without taking visual notes. The stories of each island evolved both in time and in one single cardinal direction. The cardinal directions, cued in the MTTWE task, were West-East (WE). In addition, the stories of each island evolved spatially in opposite ways. So, the two stories plotted in the West-East island evolved across time from west to east and east to west, respectively. + +The task followed a block-design paradigm, featuring three audio stimulus conditions: (1) Reference, providing context for time or space judgment; (2) Cue, instructing the type of mental judgment to be made, i.e. “Before or After?” for the time judgment or “West or East?” for the space judgment; and (3) Event, the action to be judged. Each trial began with a two-second Reference followed by silence, then a two-second Cue with silence, and four Events presented for two seconds each, interspersed by a three-second Response condition. The total trial duration was 39 seconds. + +A black fixation cross was always on screen, participants were instructed to keep their eyes open. At the end of each trial, the cross briefly turned red, signaling the next trial. Participants responded by pressing left or right-hand buttons to indicate their judgments based on the Cue, either temporal or spatial. If the Cue hinted at time judgment, the participants were to judge whether the previous Event occurred before or after the Reference. If the Cue concerned with space judgment, participants were to judge whether the Event occurred west or east of the Reference. + +One data collection session consisted of three runs, each comprising twenty trials. Half of these trials focused on time navigation, and the other half on space navigation. Both types of navigation shared five different references, resulting in two trials with the same reference for each type of navigation. These two trials differed in the distance between the node of the Reference and the node of each Event, with 'close' referring to two consecutive nodes, and 'far' indicating two nodes interspersed by another node. Within trials, half of the Events related to past or western actions, and the other half to future or eastern actions with respect to the Reference. + +Trial order was shuffled within runs, ensuring each run featured a unique sequence of trials based on reference type (both in time and space) and cue. Given only two types of answers, events were randomized according to their correct answer within each trial. This randomized sequence was consistent across all participants for each run and is available in the task's `Github `__ repository. It's important to note that the sequence of trials for all runs is predetermined and provided as inputs for a specific session in the protocol. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for MTTWE + :name: condMTTWE + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - we_after_event + - Action to be judged whether it takes place before or after this reference, that actually takes place before this reference, in the west-east island + * - we_all_event_response + - Motor responses performed after every event condition in the west-east island + * - we_all_space_cue + - Cue indicating a question about spatial orientation in the west-east island + * - we_all_time_cue + - Cue indicating a question about time orientation in the west-east island + * - we_average_reference + - Action in the story to serve as reference for the time or space judgment in the same trial in the west-east island + * - we_before_event + - Action to be judged whether it takes place before or after this reference, that actually takes place before this reference, in the west-east island + * - we_eastside_event + - Action to be judged whether it takes place west or east from this reference, that actually takes place east from this reference + * - we_westside_event + - Action to be judged whether it takes place west or east from this reference, that actually takes place west from this reference + +.. dropdown:: Contrasts for MTTWE + :name: contMTTWE + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - eastside-westside_event + - events occuring eastside vs. westside + * - we_after-before_event + - events occuring after vs. before in west-east island + * - we_after_event + - events occuring after vs. fixation in west-east island + * - we_all_event_response + - motor responses performed after every event condition in the west-east island + * - we_all_space-time_cue + - spatial vs. time cues in west-east island + * - we_all_space_cue + - spatial cue of the next event in west-east island + * - we_all_time-space_cue + - time vs. spatial cues in west-east island + * - we_all_time_cue + - time cue of the next event in west-east island + * - we_average_event + - figuring out the space or time of an event in west-east island + * - we_average_reference + - updating ones position in space and time in west-east island + * - we_before-after_event + - events occuring before vs. after in west-east island + * - we_before_event + - events occuring before vs. fixation in west-east island + * - we_eastside_event + - events occuring eastside vs. fixation + * - we_space-time_event + - event in space vs. event in time in west-east island + * - we_space_event + - figuring out the position of an event in west-east island + * - we_time-space_event + - event in time vs. event in space in west-east island + * - we_time_event + - figuring out the time of an event in west-east island + * - we_westside_event + - events occuring westside vs. fixation + * - westside-eastside_event + - events occuring westside vs. eastside + +MTTNS +----- + +.. container:: tags + + :bdg-info:`memory_retrieval` :bdg-info:`spatial_working_memory` :bdg-light:`temporal_distance` :bdg-light:`spatial_distance` :bdg-dark:`north_cardinal-direction_judgment` + +.. admonition:: Implementation + :class: seealso + + - Software: Expyriment 0.7.0 / pygame 1.9.4 + - Response device: In-house custom-made sticks featuring one-top button, each one to be used in each hand + + - :octicon:`mark-github;1em;` `Repository `__ + + - :octicon:`video;1em;` `See demo `__ + +The **Mental Time Travel** (MTT) task battery was developed following previous studies conducted at the NeuroSpin platform on chronosthesia and mental space navigation (`Gauthier et al., 2016 `__, `Gauthier et al., 2016 `__, `Gauthier et al., 2018 `__). The MTTNS task is exactly the same as `MTTWE`_ task except that the the cardinal directions, cued in the task, were North-South (NS). In addition, the two stories plotted in the South-North island evolved across time from north to south and south to north. The MTTNS task was performed in a separate session from the `MTTWE`_ task. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for MTTNS + :name: condMTTNS + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - sn_after_event + - Action to be judged whether it takes place before or after this reference, that actually takes place before this reference, in the south-north island + * - sn_all_event_response + - Motor responses performed after every event condition in the south-north island + * - sn_all_space_cue + - Cue indicating a question about spatial orientation in the south-north island + * - sn_all_time_cue + - Cue indicating a question about time orientation in the south-north island + * - sn_average_reference + - Action in the story to serve as reference for the time or space judgment in the same trial in the west-east island + * - sn_before_event + - Action to be judged whether it takes place before or after this reference, that actually takes place before this reference, in the south-north island + * - sn_northside_event + - Action to be judged whether it takes place south or north from this reference, that actually takes place north from this reference + * - sn_southside_event + - Action to be judged whether it takes place south or north from this reference, that actually takes place south from this reference + +.. dropdown:: Contrasts for MTTNS + :name: contMTTNS + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - northside-southside_event + - events occuring northsife vs. southside + * - sn_after-before_event + - events occuring after vs. before in south-north island + * - sn_after_event + - events occuring after vs. fixation in south-north island + * - sn_all_event_response + - motor responses performed after all event condition in the south-north island + * - sn_all_space-time_cue + - spatial vs. time cues in south-north island + * - sn_all_space_cue + - spatial cue of the next event in south-north island + * - sn_all_time-space_cue + - time vs. spatial cues in south-north island + * - sn_all_time_cue + - time cue of the next event in south-north island + * - sn_average_event + - figuring out the space or time of an event in south-north island + * - sn_average_reference + - updating ones position in space and time in south-north island + * - sn_before-after_event + - events occuring before vs. after in south-north island + * - sn_before_event + - events occuring before vs. fixation in south-north island + * - sn_northside_event + - events occuring northside vs. fixation + * - sn_southside_event + - events occuring southside vs. fixation + * - sn_space-time_event + - event in space vs. event in time in south-north island + * - sn_space_event + - figuring out the position of an event in south-north island + * - sn_time-space_event + - event in time vs. event in space in south-north island + * - sn_time_event + - figuring out the time of an event in south-north island + * - southside-northside_event + - events occuring southside vs. northside + +PreferenceFood +-------------- + +.. container:: tags + + :bdg-dark:`reward_valuation` :bdg-dark:`judgment` :bdg-dark:`confidence_judgment` :bdg-light:`food_cue_reactivity` :bdg-dark:`incentive_salience` + +.. admonition:: Implementation + :class: seealso + + - Software: Psychophysics Toolbox Version 3 (PTB-3), aka Psychtoolbox-3, for GNU Octave + - Response device: Five-button ergonomic pad (current designs, package 932 with pyka hhsc-1x5-n4) + +The Preference task battery was adapted from the Pleasantness Rating task described in (`Lebreton et al., 2015 `__), in order to capture the neural correlates underlying the decision-making for potentially rewarding outcomes (aka "positive-incentive value") as well as the level of confidence of such type of action. The whole task battery is composed of four tasks, each of them pertaining to the presentation of items of a certain kind. Therefore, PreferenceFood task was dedicated to "food items". The task was organized as a block-design experiment with one condition per trial. Every trial started with a fixation cross, whose duration was jittered between 0.5 seconds and 4.5 seconds, after which a picture of an item was displayed on the screen together with a rating scale and a cursor. Participants were to indicate how pleasant the presented stimulus was, by sliding the cursor along the scale. Index and ring finger's of the response box were to move respectively with low and high speed to the left whereas the middle and little fingers were to move respectively with low and high speed to the right; thumb's button was used to validate the answer. The scale ranged between 1 and 100. The value 1 corresponded to the choices "unpleasant" or "indifferent"; the middle of the scale corresponded to the choice "pleasant"; and the value 100 corresponded to the choice "very pleasant". Therefore, the ratings related only to the estimation of the positive-incentive value of the items displayed. + +The task was presented twice in two fully dedicated runs. The stimuli were always different between runs of the same task. As a consequence, no stimulus was ever repeated in any trial and, thus, no item was ever assessed more than once by the participants. Although each trial had a variable duration, according to the time spent by the participant in the assessment, no run lasted longer than eight minutes and sixteen seconds. To avoid any selection bias in the sequence of stimuli, the order of their presentation was shuffled across trials and between runs of the same type. This shuffle is embedded in the code of the protocol and, thus, the sequence was determined upon launching it. Consequently, the sequence of stimuli was also random across subjects. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for PreferenceFood + :name: condPreferenceFood + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - food_constant + - Classify the level of pleasantness of a food item displayed on the screen in terms of willingness to eat it, this condition serves as an occurrence regressor when formulated as visual evaluation of an item vs. fixation + * - food_linear + - Classify the level of pleasantness of a food item displayed on the screen in terms of willingness to eat it. this condition captures the linear effect of pleasantness (akin to judgement effects) when formulated as visual preference vs. no preference + * - food_quadratic + - Classify the level of pleasantness of a food item displayed on the screen in terms of willingness to eat it. this condition captures the quadratic effect of pleasantness (akin to confidence effects) when formulated as confidence in preference vs. no confidence + +.. dropdown:: Contrasts for PreferenceFood + :name: contPreferenceFood + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - food_constant + - evaluation of food + * - food_linear + - linear effect of food preference + * - food_quadratic + - quadratic effect of food preference + +PreferencePaintings +------------------- + +.. container:: tags + + :bdg-dark:`reward_valuation` :bdg-primary:`visual_form_discrimination` :bdg-dark:`judgment` :bdg-dark:`confidence_judgment` :bdg-primary:`visual_color_discrimination` + +.. admonition:: Implementation + :class: seealso + + - Software: Psychophysics Toolbox Version 3 (PTB-3), aka Psychtoolbox-3, for GNU Octave + - Response device: Five-button ergonomic pad (current designs, package 932 with pyka hhsc-1x5-n4) + + - :octicon:`video;1em;` `See demo `__ + +The Preference task battery was adapted from the Pleasantness Rating task described in (`Lebreton et al., 2015 `__), in order to capture the neural correlates underlying the decision-making for potentially rewarding outcomes (aka "positive-incentive value") as well as the level of confidence of such type of action. The whole task battery is composed of four tasks, each of them pertaining to the presentation of items of a certain kind. Therefore, PreferencePaintings task was dedicated to "paintings". The task was organized as a block-design experiment with one condition per trial. Every trial started with a fixation cross, whose duration was jittered between 0.5 seconds and 4.5 seconds, after which a picture of an item was displayed on the screen together with a rating scale and a cursor. Participants were to indicate how pleasant the presented stimulus was, by sliding the cursor along the scale. Index and ring finger's of the response box were to move respectively with low and high speed to the left whereas the middle and little fingers were to move respectively with low and high speed to the right; thumb's button was used to validate the answer. The scale ranged between 1 and 100. The value 1 corresponded to the choices "unpleasant" or "indifferent"; the middle of the scale corresponded to the choice "pleasant"; and the value 100 corresponded to the choice "very pleasant". Therefore, the ratings related only to the estimation of the positive-incentive value of the items displayed. + +The task was presented twice in two fully dedicated runs. The stimuli were always different between runs of the same task. As a consequence, no stimulus was ever repeated in any trial and, thus, no item was ever assessed more than once by the participants. Although each trial had a variable duration, according to the time spent by the participant in the assessment, no run lasted longer than eight minutes and sixteen seconds. To avoid any selection bias in the sequence of stimuli, the order of their presentation was shuffled across trials and between runs of the same type. This shuffle is embedded in the code of the protocol and, thus, the sequence was determined upon launching it. Consequently, the sequence of stimuli was also random across subjects. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for PreferencePaintings + :name: condPreferencePaintings + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - painting_constant + - Classify the level of pleasantness of a painting displayed on the screen in terms of willingness to possess it, this condition serves as an occurrenceregressor when formulated as visual evaluation of an item vs. fixation + * - painting_linear + - Classify the level of pleasantness of a painting displayed on the screen in terms of willingness to possess it. this condition captures the linear effect of pleasantness (akin to judgement effects) when formulated as visual preference vs. no preference + * - painting_quadratic + - Classify the level of pleasantness of a painting displayed on the screen in terms of willingness to possess it. this condition captures the quadratic effect of pleasantness (akin to confidence effects) when formulated as confidence in preference vs. no confidence + +.. dropdown:: Contrasts for PreferencePaintings + :name: contPreferencePaintings + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - painting_constant + - evaluation of paintings + * - painting_linear + - linear effect of paintings preference + * - painting_quadratic + - quadratic effect of paintings preference + +PreferenceFaces +--------------- + +.. container:: tags + + :bdg-dark:`reward_valuation` :bdg-primary:`face_perception` :bdg-dark:`judgment` :bdg-dark:`confidence_judgment` :bdg-primary:`visual_face_recognition` + +.. admonition:: Implementation + :class: seealso + + - Software: Psychophysics Toolbox Version 3 (PTB-3), aka Psychtoolbox-3, for GNU Octave + - Response device: Five-button ergonomic pad (current designs, package 932 with pyka hhsc-1x5-n4) + +The Preference task battery was adapted from the Pleasantness Rating task described in (`Lebreton et al., 2015 `__), in order to capture the neural correlates underlying the decision-making for potentially rewarding outcomes (aka "positive-incentive value") as well as the level of confidence of such type of action. The whole task battery is composed of four tasks, each of them pertaining to the presentation of items of a certain kind. Therefore, PreferenceFaces task was dedicated to "human faces". All tasks were organized as a block-design experiment with one condition per trial. Every trial started with a fixation cross, whose duration was jittered between 0.5 seconds and 4.5 seconds, after which a picture of an item was displayed on the screen together with a rating scale and a cursor. Participants were to indicate how pleasant the presented stimulus was, by sliding the cursor along the scale. Index and ring finger's of the response box were to move respectively with low and high speed to the left whereas the middle and little fingers were to move respectively with low and high speed to the right; thumb's button was used to validate the answer. The scale ranged between 1 and 100. The value 1 corresponded to the choices "unpleasant" or "indifferent"; the middle of the scale corresponded to the choice "pleasant"; and the value 100 corresponded to the choice "very pleasant". Therefore, the ratings related only to the estimation of the positive-incentive value of the items displayed. + +The task was presented twice in two fully dedicated runs. The stimuli were always different between runs of the same task. As a consequence, no stimulus was ever repeated in any trial and, thus, no item was ever assessed more than once by the participants. Although each trial had a variable duration, according to the time spent by the participant in the assessment, no run lasted longer than eight minutes and sixteen seconds. To avoid any selection bias in the sequence of stimuli, the order of their presentation was shuffled across trials and between runs of the same type. This shuffle is embedded in the code of the protocol and, thus, the sequence was determined upon launching it. Consequently, the sequence of stimuli was also random across subjects. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for PreferenceFaces + :name: condPreferenceFaces + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - face_constant + - Classify the level of pleasantness of a human face displayed on the screen in terms of willingness to meet the person portrayed, this condition serves as an occurrence regressor when formulated as visual evaluation of an item vs. fixation + * - face_linear + - Classify the level of pleasantness of a human face displayed on the screen in terms of willingness to meet the person portrayed. this condition captures the linear effect of pleasantness (akin to judgement effects) when formulated as visual preference vs. no preference + * - face_quadratic + - Classify the level of pleasantness of a human face displayed on the screen in terms of willingness to meet the person portrayed. this condition captures the quadratic effect of pleasantness (akin to confidence effects) when formulated as confidence in preference vs. no confidence + +.. dropdown:: Contrasts for PreferenceFaces + :name: contPreferenceFaces + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - face_constant + - evaluation of faces + * - face_linear + - linear effect of face preference + * - face_quadratic + - quadratic effect of face preference + +PreferenceHouses +---------------- + +.. container:: tags + + :bdg-dark:`reward_valuation` :bdg-dark:`judgment` :bdg-dark:`confidence_judgment` :bdg-primary:`visual_place_recognition` :bdg-dark:`incentive_salience` + +.. admonition:: Implementation + :class: seealso + + - Software: Psychophysics Toolbox Version 3 (PTB-3), aka Psychtoolbox-3, for GNU Octave + - Response device: Five-button ergonomic pad (current designs, package 932 with pyka hhsc-1x5-n4) + +The Preference task battery was adapted from the Pleasantness Rating task described in (`Lebreton et al., 2015 `__), in order to capture the neural correlates underlying the decision-making for potentially rewarding outcomes (aka "positive-incentive value") as well as the level of confidence of such type of action. The whole task battery is composed of four tasks, each of them pertaining to the presentation of items of a certain kind. Therefore, PreferenceHouses task was dedicated to "houses". All tasks were organized as a block-design experiment with one condition per trial. Every trial started with a fixation cross, whose duration was jittered between 0.5 seconds and 4.5 seconds, after which a picture of an item was displayed on the screen together with a rating scale and a cursor. Participants were to indicate how pleasant the presented stimulus was, by sliding the cursor along the scale. Index and ring finger's of the response box were to move respectively with low and high speed to the left whereas the middle and little fingers were to move respectively with low and high speed to the right; thumb's button was used to validate the answer. The scale ranged between 1 and 100. The value 1 corresponded to the choices "unpleasant" or "indifferent"; the middle of the scale corresponded to the choice "pleasant"; and the value 100 corresponded to the choice "very pleasant". Therefore, the ratings related only to the estimation of the positive-incentive value of the items displayed. + +The task was presented twice in two fully dedicated runs. The stimuli were always different between runs of the same task. As a consequence, no stimulus was ever repeated in any trial and, thus, no item was ever assessed more than once by the participants. Although each trial had a variable duration, according to the time spent by the participant in the assessment, no run lasted longer than eight minutes and sixteen seconds. To avoid any selection bias in the sequence of stimuli, the order of their presentation was shuffled across trials and between runs of the same type. This shuffle is embedded in the code of the protocol and, thus, the sequence was determined upon launching it. Consequently, the sequence of stimuli was also random across subjects. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for PreferenceHouses + :name: condPreferenceHouses + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - house_constant + - Classify the level of pleasantness of a house displayed on the screen in terms of willingness to live in that house. this condition serves as an occurrenceregressor when formulated as visual evaluation of an item vs. fixation + * - house_linear + - Classify the level of pleasantness of a house displayed on the screen in terms of willingness to live in that house. this condition captures the linear effect of pleasantness (akin to judgement effects) when formulated as visual preference vs. no preference + * - house_quadratic + - Classify the level of pleasantness of a house displayed on the screen in terms of willingness to live in that house. this condition captures the quadratic effect of pleasantness (akin to confidence effects) when formulated as confidence in preference vs. no confidence + +.. dropdown:: Contrasts for PreferenceHouses + :name: contPreferenceHouses + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - house_constant + - evaluation of houses + * - house_linear + - linear effect of houses preference + * - house_quadratic + - quadratic effect of houses preference + +TheoryOfMind +------------ + +.. container:: tags + + :bdg-light:`theory_of_mind` :bdg-secondary:`semantic_processing` :bdg-secondary:`narrative_comprehension` + +.. admonition:: Implementation + :class: seealso + + - Software: Psychophysics Toolbox Version 3 (PTB-3), aka Psychtoolbox-3, for GNU Octave + - Response device: Five-button ergonomic pad (current designs, package 932 with pyka hhsc-1x5-n4) + + - :octicon:`mark-github;1em;` `Repository `__ + + - :octicon:`video;1em;` `See demo `__ + +This battery of tasks was adapted from the original task-fMRI localizers of `Saxe Lab `__, that intended to identify functional regions-of-interest in the Theory-of-Mind network and Pain Matrix regions. Minor changes were employed in the present versions of the tasks herein described. Because the cohort of this dataset is composed solely of native French speakers, the verbal stimuli were thus translated to French. Therefore, the durations of the reading period and the response period within conditions were slightly increased. The **Theory Of Mind** task was a localizer was intended to identify brain regions involved in theory-of-mind and social cognition, by contrasting activation during two distinct story conditions: belief judgments, reading a false-belief story that portrayed characters with false beliefs about their own reality; and fact judgments, reading a story about a false photograph, map or sign (`Dodell-Feder et al., 2011 `__). The task was organized as a block-design experiment with one condition per trial. Every trial started with a fixation cross of twelve seconds, followed by the main condition that comprised a reading period of eighteen seconds and a response period of six seconds. During this response period, participants were to judge whether a statement about the story previously displayed is true or false by pressing respectively with the index or middle finger in the corresponding button of the response box. The total duration of the trial amounted to thirty six seconds. There were ten trials in a run, followed by an extra period of fixation cross for twelve seconds at the end of the run. Two runs were dedicated to this task in one single session. The designs, i.e. the sequence of conditions across trials, for two possible runs were pre-determined by the authors of the original study and hard-coded in the original protocol. The IBC-adapted protocols contain the exactly same designs. For all subjects, design 1 was employed for the PA-run and design 2 for the AP-run. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for TheoryOfMind + :name: condTheoryOfMind + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - belief + - Read a false-belief story + * - photo + - Read a false-photograph story + +.. dropdown:: Contrasts for TheoryOfMind + :name: contTheoryOfMind + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - belief + - manipulation of belief judgments + * - belief-photo + - belief vs. factual judgments + * - photo + - manipulation of fact judgments + +EmotionalPain +------------- + +.. container:: tags + + :bdg-danger:`imagined_physical_pain` :bdg-danger:`imagined_emotional_pain` :bdg-danger:`empathy` :bdg-secondary:`narrative_comprehension` + +.. admonition:: Implementation + :class: seealso + + - Software: Psychophysics Toolbox Version 3 (PTB-3), aka Psychtoolbox-3, for GNU Octave + - Response device: Five-button ergonomic pad (current designs, package 932 with pyka hhsc-1x5-n4) + +This task is part of the `Saxe Lab `__ task-fMRI localizers, which aimed to identify functional regions of interest in the Theory-of-Mind network and Pain Matrix regions. **Emotional Pain** was an emotional pain localizer that was intended to identify brain regions involved in theory-of-mind and Pain Matrix areas, by contrasting activation during two distinct story conditions: reading a story that portrayed characters suffering from emotional pain and physical pain (`Jacoby et al., 2016 `__). The experimental design of this task is identical to the one employed for the `TheoryOfMind`_ localizer, except that the reading period lasted twelve seconds instead of eighteen seconds. During the response period, the participant had to the judge the amount of pain experienced by the character(s) portrayed in the previous story. For no pain, they had to press with their thumb on the corresponding button of the response box; for mild pain, they had to press with their index finger; for moderate pain, they had to press with the middle finger; and for a strong pain, they had to press with the ring finger. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for EmotionalPain + :name: condEmotionalPain + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - emotional_pain + - Read story about fictional characters suffering from emotional pain + * - physical_pain + - Read story about fictional characters suffering from physical pain + +.. dropdown:: Contrasts for EmotionalPain + :name: contEmotionalPain + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - emotional-physical_pain + - emotional vs. physical pain story + * - emotional_pain + - reading emotional pain story + * - physical_pain + - reading physical pain story + +PainMovie +--------- + +.. container:: tags + + :bdg-light:`mentalization` :bdg-danger:`imagined_emotional_pain` :bdg-danger:`imagined_physical_pain` :bdg-light:`theory_of_mind` :bdg-danger:`empathy` + +.. admonition:: Implementation + :class: seealso + + - Software: Psychophysics Toolbox Version 3 (PTB-3), aka Psychtoolbox-3, for GNU Octave + - Audio device: MRConfon MKII + +This task is part of the `Saxe Lab `__ task-fMRI localizers, which aimed to identify functional regions of interest in the Theory-of-Mind network and Pain Matrix regions. The **Pain Movie** task was a pain movie localizer and consisted displaying "Partly Cloudy", a 6 minutes movie from Disney Pixar, in order to study the responses implicated in theory-of-mind and Pain Matrix brain regions (`Jacoby et al., 2016 `__, `Richardson et al., 2018 `__). Two main conditions were thus hand-coded in the movie, according to (`Richardson et al. `__), as follows: mental movie, in which characters were "mentalizing"; and physical pain movie, in which characters were experiencing physical pain. Such conditions were intended to evoke brain responses from theory-of-mind and pain-matrix networks, respectively. All moments in the movie not focused on the direct interaction of the main characters were considered as a baseline period. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for PainMovie + :name: condPainMovie + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - movie_mental + - Watch movie-scene wherein characters experience changes in beliefs, desires, and/or emotions + * - movie_pain + - Watch movie-scene wherein characters experience physical pain + +.. dropdown:: Contrasts for PainMovie + :name: contPainMovie + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - movie_mental + - movie with events about changes in beliefs desires and emotions + * - movie_mental-pain + - mental events vs. physically painful events + * - movie_pain + - movie with physically painful events + +VSTM +---- + +.. container:: tags + + :bdg-primary:`visual_orientation` :bdg-info:`short-term_memory` :bdg-primary:`visual_form_discrimination` :bdg-primary:`visual_buffer` :bdg-info:`visual_working_memory` + +.. admonition:: Implementation + :class: seealso + + - Software: Psychophysics Toolbox Version 3 (PTB-3), aka Psychtoolbox-3, for GNU Octave + - Response device: Five-button ergonomic pad (current designs, package 932 with pyka hhsc-1x5-n4) + + - :octicon:`video;1em;` `See demo `__ + +This battery of tasks was adapted from the control experiment described in (`Knops et al., 2014 `__). Minor changes were employed for the IBC implementation of this battery which have been described later in this section. In the **Visual Short-Term Memory** (VSTM) task, participants were presented with a certain number of bars, varying from one to six. Every trial started with the presentation of a black fixation dot in the center of the screen for 0.5 seconds. While still on the screen, the black fixation dot was then displayed together with a certain number of tilted bars - variable between trials from one to six - for 0.15 seconds. Afterwards, a white fixation dot was shown for 1 second. It was next replaced by the presentation of the test stimulus for 1.7 seconds, displaying identical number of tilted bars in identical positions together with a green fixation dot. The participants were to remember the orientation of the bars from the previous sample and answer with one of the two possible button presses, i.e. respectively with the index or middle finger, depending on whether one of the bars in the current display had changed orientation by 90◦ or not, which was the case in half of the trials. The test display was replaced by another black fixation dot for a fixed duration of 3.8 seconds. Thus, the trial was 7.15 seconds long. There were seventy two trials in a run and four runs in one single session. Pairs of runs were launched consecutively. To avoid selection bias in the sequence of stimuli, the order of the trials was shuffled according to numerosity and change of orientation within runs and across participants. Both the response period and the period of the fixation dot at the end of each trial were made constant. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for VSTM + :name: condVSTM + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - vstm_constant + - Judge whether any bar changed orientation within two consecutive displays of bar sets on the screen, response to numerosity vs. fixation + * - vstm_linear + - Judge whether any bar changed orientation within two consecutive displays of bar sets on the screen, linear response to numerosity + * - vstm_quadratic + - Judge whether any bar changed orientation within two consecutive displays of bar sets on the screen, response to quadratic numerosity effect + +.. dropdown:: Contrasts for VSTM + :name: contVSTM + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - vstm_constant + - visual orientation + * - vstm_linear + - linear effect of numerosity in visual orientation + * - vstm_quadratic + - quadratic effect of numerosity in visual orientation + +Enumeration +----------- + +.. container:: tags + + :bdg-light:`enumeration` :bdg-primary:`visual_buffer` :bdg-info:`visual_working_memory` :bdg-primary:`shape_recognition` :bdg-light:`numerosity` + +.. admonition:: Implementation + :class: seealso + + - Software: Psychophysics Toolbox Version 3 (PTB-3), aka Psychtoolbox-3, for GNU Octave + - Response device: Five-button ergonomic pad (current designs, package 932 with pyka hhsc-1x5-n4) + + - :octicon:`video;1em;` `See demo `__ + +The Enumeration task was also a part of battery of tasks was adapted from the control experiment described in (`Knops et al., 2014 `__). Minor changes were employed for the IBC implementation of this battery which have been described later in this section. In this task, participants were presented with a certain number of tilted dark-gray bars on a light-gray background, varying from one to eight. Every trial started with the presentation of a black fixation dot in the center of the screen for 0.5 seconds. While still on the screen, the black fixation dot was then displayed together with a certain number of tilted bars for 0.15 seconds. It was followed by a response period of 1.7s, in which only a green fixation dot was being displayed on the screen. The participants were to remember the number of the bars that were shown right before and answer accordingly, by pressing the corresponding button: once with the thumb's button for one bar; once with the index finger's button for two bars; once with the middle finger's button for three bars; once with the ring finger's button for four bars; twice with the thumb's button for five bars; twice with the index finger's button for six bars; twice with the middle finger's button for seven bars; twice with the ring finger's button for eight bars. Afterwards, another black fixation dot was displayed for a fixed duration of 7.8 seconds. The trial length was thus 9.95 seconds. There were ninety six trials in a run and two (consecutive) runs in one single session. To avoid selection bias in the sequence of stimuli, the order of the trials was shuffled according to numerosity within runs and across participants. Both the response period and the period of the fixation dot at the end of each trial were made constant. The answers were registered via a button-press response box instead of an audio registration of oral responses as in the original study. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for Enumeration + :name: condEnumeration + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - enumeration_constant + - Occurrence regressor for the enumeration response to constant numerosity when compared against fixation + * - enumeration_linear + - Capture the linear effect of enumeration response to numerosity + * - enumeration_quadratic + - Capture the quadratic effect of enumeration response to numerosity interaction + +.. dropdown:: Contrasts for Enumeration + :name: contEnumeration + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - enumeration_constant + - enumeration + * - enumeration_linear + - linear effect of numerosity in enumeration + * - enumeration_quadratic + - quadratic effect of numerosity in enumeration + +Self +---- + +.. container:: tags + + :bdg-light:`self-reference_effect` :bdg-light:`recognition` :bdg-info:`episodic_memory` :bdg-dark:`judgment` :bdg-secondary:`reading` + +.. admonition:: Implementation + :class: seealso + + - Software: Expyriment 0.7.0 (Python 2.7) + - Response device: Five-button ergonomic pad (current designs, package 932 with pyka hhsc-1x5-n4) + + - :octicon:`video;1em;` `See demo `__ + +The Self task was adapted from (`Genom et al., 2014 `__), originally developed to investigate the Self-Reference Effect in older adults. This effect pertains to the encoding mechanism of information referring to the self, characterized as a memory-advantaged process. Consequently, memory-retrieval performance is also better for information encoded in reference to the self than to other people, objects or concepts. The present task was thus composed of two phases, each of them relying on encoding and recognition procedures. The encoding phase was intended to map brain regions related to the encoding of items in reference to the self, whereas the recognition one was conceived to isolate the memory network specifically involved in the retrieval of those items. The phases were interspersed, so that the recognition phase was always related to the encoding phase presented immediately before. The encoding phase had two blocks. Each block was composed of a set of trials pertaining to the same condition. For both conditions, a different adjective was presented at every trial on the screen. The participants were to judge whether or not the adjective described themselves -- *self-reference encoding* condition-- or another person -- *other-reference encoding* condition-- by pressing with the index finger on the corresponding button of the response box for "yes" and with the middle finger for "no". The other person was a public figure in France around the same age range as the cohort, whose gender matched the gender of every participant. + +Two public figures were mentioned, one at the time, across all runs; four public figures --two of each gender-- were selected beforehand. By this way, we ensured that all participants were able to successfully characterize the same individuals, holding equal the levels of familiarity and affective attributes with respect to these individuals. In the recognition phase, participants were to remember whether or not the adjectives had also been displayed during the previous encoding phase, by pressing with the index finger on the corresponding button of the response box for "yes" and with the middle finger for "no". This phase was composed of a single block of trials, pertaining to three categories of conditions. *New* adjectives were presented during one half of the trials whereas the other half were in reference to the adjectives displayed in the previous phase. Thus, trials referring to the adjectives from *self-reference encoding* were part of the *self-reference recognition* category and trials referring to the *other-reference encoding* were part of the *other-reference recognition* category. + +There were four runs in one session. The first three ones had three phases; the fourth and last run had four phases. Their total durations were twelve and 15.97 seconds, respectively. Blocks of both phases started with an *instruction* condition of five seconds, containing a visual cue. The cue was related to the judgment that should be performed next, according to the type of condition featured in that block. A set of trials, showing different adjectives, were presented afterwards. Each trial had a duration of five seconds, in which a response was to be provided by the participant. During the trials of the encoding blocks, participants had to press the button with their left or right hand, depending on whether they believed or not the adjective on display described someone (i.e. self or other, respectively for *self-reference encoding* or *other-reference encoding* conditions). During the trials of the recognition block, participants had to answer in the same way, depending on whether they believed or not the adjective had been presented before. A fixation cross was always presented between trials, whose duration was jittered between 0.3 seconds and 0.5 seconds. A rest period was introduced between encoding and recognition phases, whose duration was also jittered between ten and fourteen seconds. Long intervals between these two phases, i.e. longer than ten seconds, ensured the measurement of long-term memory processes during the recognition phase, at the age range of the cohort (`Newell et al., 1972 `__, `Ericsson et al., 1995 `__). Fixation-cross periods of three and fifteen seconds were also introduced in the beginning and end of each run, respectively. Lastly, all adjectives were presented in the lexical form according to the gender of the participant. There were also two sets of adjectives. One set was presented as new adjectives during the recognition phase and the other set for all remaining conditions of both phases. + +To avoid cognitive bias across the cohort, sets were switched for the other half of the participants. Plus, adjectives never repeated across runs but their sequence was fixed for the same runs and across participants from the same set. Yet, pseudo-randomization of the trials for the recognition phase was pre-determined by the authors of the original study, according to their category (i.e. *self-reference recognition*, *other-reference recognition* or *new*), such that no more than three consecutive trials of the same category were presented within a block. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for Self + :name: condSelf + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - instructions + - Presentation of a question related to the succeeding block + * - memory + - Successful identification with an overt response that a new adjective has never been presented before + * - no_recognition + - Unsuccessful identification with an overt response that a new adjective has been presented before + * - other-reference_encoding + - Judging with overt response whether a certain adjective, displayed on the screen, qualifies someone else + * - other-reference_recognition + - Successful recognition with an overt response of an adjective, displayed on the screen, as having been already presented during one “other-reference encoding” trial of the preceding encoding phase + * - self-reference_encoding + - Judge with overt response whether or not a certain adjective, displayed on the screen, qualifies oneself + * - self-reference_recognition + - Successful recognition with an overt response of an adjective, displayed on the screen, as having been already presented during one “self-reference encoding” trial of the preceding encoding phase + +.. dropdown:: Contrasts for Self + :name: contSelf + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - correct_rejection + - identification of a new adjective + * - encode_other + - encoding of adjectives processed with other-reference + * - encode_self + - encoding of adjectives processed with self-reference + * - encode_self-other + - self-reference effect + * - false_alarm + - erroneous response + * - instructions + - read instruction in form of a question + * - recognition_hit + - recognition of adjectives previously displayed + * - recognition_hit-correct_rejection + - recognition of an adjective previously displayed + * - recognition_other_hit + - recognition of adjectives previously displayed with other-reference + * - recognition_self-other + - memory retrieval of encoded information with self-reference + * - recognition_self_hit + - recognition of adjectives previously displayed with self-reference + +Bang +---- + +.. container:: tags + + :bdg-light:`action_perception` :bdg-light:`audiovisual_perception` :bdg-success:`speech_processing` :bdg-secondary:`language_processing` :bdg-success:`speech_perception` + +.. admonition:: Implementation + :class: seealso + + - Software: Expyriment 0.9.0 (Python 2.7) + - Audio device: MagnaCoil (Magnacoustics) + +The Bang task was adapted from the study (`Campbell et al., 2015 `__), dedicated to investigate aging effects on neural responsiveness during naturalistic viewing. The task relies on watching - viewing and listening - of an edited version of the episode "Bang! You're Dead" from the TV series "Alfred Hitchcock Presents". The original black-and-white, 25-minute episode was condensed to seven minutes and fifty five seconds while preserving its narrative. The plot of the final movie includes scenes with characters talking to each other as well as scenes with no verbal communication. This task was performed during a single run in one unique session. Participants were never informed of the title of the movie before the end of the session. Ten seconds of acquisition were added at the end of the run. The total duration of the run was thus eight minutes and five seconds. + +**Note:** We used the MagnaCoil (Magnacoustics) audio device for all subjects except for *subject-08*, for whom we employed MRConfon MKII. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for Bang + :name: condBang + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - no_talk + - Watch contiguous scenes with no speech + * - talk + - Speech: watch contiguous scenes of speech. No speech: watch contiguous scenes with no speech + +.. dropdown:: Contrasts for Bang + :name: contBang + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - no_talk + - non-speech section in movie watching + * - talk + - speech sections in movie watching + * - talk-no_talk + - speech vs. non-speech sections in movie watching + +Clips +----- + +.. container:: tags + + + +.. admonition:: Implementation + :class: seealso + + - Software: Python 2.7 + - Audio device: MRConfon MKII + + - :octicon:`video;1em;` `See demo `__ + +The Clips battery stands for an adaptation of (`Nishimoto et al., 2011 `__), in which participants were to visualize naturalistic scenes edited as video clips of ten and a half minutes each. Each run was always dedicated to the data collection of one video clip at a time. As in the original study, runs were grouped in two tasks pertaining to the acquisition of training data and test data, respectively. Scenes from training-clips (ClipsTrn) task were shown only once. Contrariwise, scenes from the test-clips (ClipsVal) task were composed of approximately one-minute-long excerpts extracted from the clips presented during training. Excerpts were concatenated to construct the sequence of every ClipsVal run; each sequence was predetermined by randomly permuting many excerpts that were repeated ten times each across all runs. The same randomized sequences, employed across ClipsVal runs, were used to collect data from all participants. + +There were twelve and nine runs dedicated to the collection of the ClipsTrn and ClipsVal tasks, respectively. Data from nine runs of each task were interspersedly acquired in three full sessions; the three remaining runs devoted to train-data collection were acquired in half of one last session, before the `Retinotopy `__ tasks. To assure the same topographic reference of the visual field for all participants, a colored fixation point was always presented at the center of the images. Such point was changing three times per second to ensure that it was visible regardless the color of the movie. Ten and twenty extra seconds of acquisition were respectively added at the beginning and end of every run. The total duration of each run was thus ten minutes and fifty seconds. Note that images from the test-clips task (ClipsVal) were presented three times to each participant. More precisely, in a given session, three test runs showing the same images were acquired, with the order of images varying between runs. Regardless of the session, one can find the order of images on our GitHub repository for the `first `__, `second `__ and `third `__ test-clips runs. Lastly, the `order of images for the training-clips `__ is the same in all training runs and can be found on our GitHub repository. + + +WedgeClock +---------- + +.. container:: tags + + :bdg-primary:`upper-right_vision` :bdg-primary:`upper-left_vision` :bdg-primary:`lower-right_vision` :bdg-primary:`visual_color_discrimination` :bdg-primary:`lower-left_vision` + +.. admonition:: Implementation + :class: seealso + + - Software: Psychopy (Python 2.7) + - Response device: Five-button ergonomic pad (current designs, package 932 with pyka hhsc-1x5-n4) + +The Retinotopy protocols on IBC include classic retinotopic paradigms, namely the Wedge and the Ring tasks. Within the Wedge protocol, the **Wedge Clock** task consists of visual stimuli of a slowly rotating clockwise checkerboard. The phase of the periodic response at the rotation frequency, measured at each voxel, corresponds to the assessment of perimetric parameters related to the polar angle (`Sereno et al., 1995 `__). Under IBC, two runs were dedicated to this task (one run for each phase-encoding direction). Each run was five-and-a-half minutes long. They were programmed for the same session following the last three *training-data* runs of the `Clips`_ task. Similarly to the Clips task, a point was displayed at the center of the visual stimulus in order to keep constant the perimetric origin in all participants. Participants were thus to fixate continuously this point whose color flickered between red, green, blue and yellow throughout the entire run. To keep the participants engaged in the task, they were instructed that after each run, they would be asked which color had most often been presented. Additionally, ten seconds of a non-flickering, red fixation cross were displayed at the end of every run. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for WedgeClock + :name: condWedgeClock + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - left_meridian + - Visual representation in the left half-plane of the visual field delimited by its vertical meridian + * - lower_left + - Visual representation in the lower-left quadrant of the visual field delimited by its vertical and horizontal meridians + * - lower_meridian + - Visual representation in the lower half-plane of the visual field delimited by its horizontal meridian + * - lower_right + - Visual representation in the lower-right quadrant of the visual field delimited by its vertical and horizontal meridians + * - right_meridian + - Visual representation in the right half-plane of the visual field delimited by its vertical meridian + * - upper_left + - Visual representation in the upper-left quadrant of the visual field delimited by its vertical and horizontal meridians + * - upper_meridian + - Visual representation in the upper half-plane of the visual field delimited by its horizontal meridian + * - upper_right + - Visual representation in the upper-right quadrant of the visual field delimited by its vertical and horizontal meridians + +.. dropdown:: Contrasts for WedgeClock + :name: contWedgeClock + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - left_meridian + - visual representation in the left half-plane of the visual field delimited by its vertical meridian + * - lower_left + - visual representation in the lower-left quadrant of the visual field delimited by its vertical and horizontal meridians + * - lower_meridian + - visual representation in the lower half-plane of the visual field delimited by its horizontal meridian + * - lower_right + - visual representation in the lower-right quadrant of the visual field delimited by its vertical and horizontal meridians + * - right_meridian + - visual representation in the right half-plane of the visual field delimited by its vertical meridian + * - upper_left + - visual representation in the upper-left quadrant of the visual field delimited by its vertical and horizontal meridians + * - upper_meridian + - visual representation in the upper half-plane of the visual field delimited by its horizontal meridian + * - upper_right + - visual representation in the upper-right quadrant of the visual field delimited by its vertical and horizontal meridians + +WedgeAnti +--------- + +.. container:: tags + + :bdg-primary:`upper-right_vision` :bdg-primary:`upper-left_vision` :bdg-primary:`lower-right_vision` :bdg-primary:`visual_color_discrimination` :bdg-primary:`lower-left_vision` + +.. admonition:: Implementation + :class: seealso + + - Software: Psychopy (Python 2.7) + - Response device: Five-button ergonomic pad (current designs, package 932 with pyka hhsc-1x5-n4) + +The Retinotopy protocols on IBC include classic retinotopic paradigms, namely the Wedge and the Ring tasks. Within the Wedge protocol, the **Wedge Anticlock** task consists of visual stimuli of a slowly rotating counterclockwise checkerboard. The phase of the periodic response at the rotation frequency, measured at each voxel, corresponds to the assessment of perimetric parameters related to the polar angle (`Sereno et al., 1995 `__). Under IBC, two runs were dedicated to this task (one run for each phase-encoding direction). Each run was five-and-a-half minutes long. A point was displayed at the center of the visual stimulus in order to keep constant the perimetric origin in all participants. Participants were thus to fixate continuously this point whose color flickered between red, green, blue and yellow throughout the entire run. To keep the participants engaged in the task, they were instructed that after each run, they would be asked which color had most often been presented. Additionally, ten seconds of a non-flickering, red fixation cross were displayed at the end of every run. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for WedgeAnti + :name: condWedgeAnti + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - left_meridian + - Visual representation in the left half-plane of the visual field delimited by its vertical meridian + * - lower_left + - Visual representation in the lower-left quadrant of the visual field delimited by its vertical and horizontal meridians + * - lower_meridian + - Visual representation in the lower half-plane of the visual field delimited by its horizontal meridian + * - lower_right + - Visual representation in the lower-right quadrant of the visual field delimited by its vertical and horizontal meridians + * - right_meridian + - Visual representation in the right half-plane of the visual field delimited by its vertical meridian + * - upper_left + - Visual representation in the upper-left quadrant of the visual field delimited by its vertical and horizontal meridians + * - upper_meridian + - Visual representation in the upper half-plane of the visual field delimited by its horizontal meridian + * - upper_right + - Visual representation in the upper-right quadrant of the visual field delimited by its vertical and horizontal meridians + +.. dropdown:: Contrasts for WedgeAnti + :name: contWedgeAnti + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - left_meridian + - visual representation in the left half-plane of the visual field delimited by its vertical meridian + * - lower_left + - visual representation in the lower-left quadrant of the visual field delimited by its vertical and horizontal meridians + * - lower_meridian + - visual representation in the lower half-plane of the visual field delimited by its horizontal meridian + * - lower_right + - visual representation in the lower-right quadrant of the visual field delimited by its vertical and horizontal meridians + * - right_meridian + - visual representation in the right half-plane of the visual field delimited by its vertical meridian + * - upper_left + - visual representation in the upper-left quadrant of the visual field delimited by its vertical and horizontal meridians + * - upper_meridian + - visual representation in the upper half-plane of the visual field delimited by its horizontal meridian + * - upper_right + - visual representation in the upper-right quadrant of the visual field delimited by its vertical and horizontal meridians + +ContRing +-------- + +.. container:: tags + + :bdg-primary:`far-peripheral_vision` :bdg-primary:`mid-peripheral_vision` :bdg-primary:`foveal_vision` :bdg-primary:`visual_color_discrimination` + +.. admonition:: Implementation + :class: seealso + + - Software: Psychopy (Python 2.7) + - Response device: Five-button ergonomic pad (current designs, package 932 with pyka hhsc-1x5-n4) + +The Retinotopy protocols on IBC include classic retinotopic paradigms, namely the Wedge and the Ring tasks. The **Contracting Ring** task consists of visual stimuli depicting a thick, contracting ring. The phase of the periodic response at the contraction frequency, measured at each voxel, corresponds to the assessment of the perimetric parameters related to eccentricity (`Sereno et al., 1995 `__). Under IBC, one run was dedicated to this task (*ap* phase-encoding direction), which was five-and-a-half minutes long. A point was displayed at the center of the visual stimulus in order to keep constant the perimetric origin in all participants. Participants were thus to fixate continuously this point whose color flickered between red, green, blue and yellow throughout the entire run. To keep the participants engaged in the task, they were instructed that after the run, they would be asked which color had most often been presented. Additionally, ten seconds of a non-flickering, red fixation cross were displayed at the end of the run. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for ContRing + :name: condContRing + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - foveal + - Visual representation in the fovea + * - middle + - Visual representation in the mid-periphery of the visual field + * - peripheral + - Visual representation in the far-periphery of the visual field + +.. dropdown:: Contrasts for ContRing + :name: contContRing + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - foveal + - visual representation in the fovea + * - middle + - visual representation in the mid-periphery of the visual field + * - peripheral + - visual representation in the far-periphery of the visual field + +ExpRing +------- + +.. container:: tags + + :bdg-primary:`far-peripheral_vision` :bdg-primary:`mid-peripheral_vision` :bdg-primary:`foveal_vision` :bdg-primary:`visual_color_discrimination` + +.. admonition:: Implementation + :class: seealso + + - Software: Psychopy (Python 2.7) + - Response device: Five-button ergonomic pad (current designs, package 932 with pyka hhsc-1x5-n4) + +The Retinotopy protocols on IBC include classic retinotopic paradigms, namely the Wedge and the Ring tasks. The **Expanding Ring** task consists of visual stimuli depicting a thick, dilating ring. The phase of the periodic response at the dilation frequency, measured at each voxel, corresponds to the assessment of the perimetric parameters related to eccentricity (`Sereno et al., 1995 `__). Under IBC, one run was dedicated to this task (*pa* phase-encoding direction), which was five-and-a-half minutes long. A point was displayed at the center of the visual stimulus in order to keep constant the perimetric origin in all participants. Participants were thus to fixate continuously this point whose color flickered between red, green, blue and yellow throughout the entire run. To keep the participants engaged in the task, they were instructed that after the run, they would be asked which color had most often been presented. Additionally, ten seconds of a non-flickering, red fixation cross were displayed at the end of the run. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for ExpRing + :name: condExpRing + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - foveal + - Visual representation in the fovea + * - middle + - Visual representation in the mid-periphery of the visual field + * - peripheral + - Visual representation in the far-periphery of the visual field + +.. dropdown:: Contrasts for ExpRing + :name: contExpRing + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - foveal + - visual representation in the fovea + * - middle + - visual representation in the mid-periphery of the visual field + * - peripheral + - visual representation in the far-periphery of the visual field + +Raiders +------- + +.. container:: tags + + + +.. admonition:: Implementation + :class: seealso + + - Software: Expyriment 0.9.0 (Python 2.7) + - Audio device: MRConfon MKII + +The Raiders task was adapted from (`Haxby et al., 2011 `__), in which the full-length action movie Raiders of the Lost Ark was presented to the participants. The main goal of the original study was the estimation of the hyperalignment parameters that transform voxel space of functional data into feature space of brain responses, linked to the visual characteristics of the movie displayed. Similarly, herein, the movie was shown to the IBC participants in contiguous runs determined according to the chapters of the movie defined in the DVD. This task was completed in two sessions. In order to use the acquired fMRI data in train-test split and cross-validation experiments, we performed three extra-runs at the end of the second session in which the three first chapters of the movie were repeated. To account for stabilization of the BOLD signal, ten seconds of acquisition were added at the end of the run. **Note:** there was some lag between the onset of each run and the initiation of the stimuli (movie), which might vary between runs and subjects. This lag should probably be considered when analyzing the data. Find more details in the section :ref:`Lags in Raiders movie`. + + +Lec2 +---- + +.. container:: tags + + :bdg-info:`working_memory` :bdg-secondary:`language_processing` :bdg-secondary:`reading` :bdg-light:`inhibition` :bdg-secondary:`language_comprehension` + +.. admonition:: Implemented using proprietary software + :class: seealso + + - Software: Presentation (Version 20.1, Neurobehavioral Systems, Inc., Berkeley, CA) +This task belongs to a battery of 8 different localizers that tap on a wide array of cognitive functions provided to us by the `Labex Cortex group `__ at the University of Lyon. Originally described in (`Perrone-Bertolotti et al., 2012 `__), this task focuses on silent reading. During the task, participants were presented with two intermixed stories, shown word by word at a rapid rate. One of the stories was written in black (on a gray screen) and the other in white. Consecutive words with the same color formed a meaningful and simple short story in French. Participants were instructed to read the black story to report it at the end of the block, while ignoring the white one. Each block comprised 400 words, with 200 black words (attend condition) and 200 white words (ignore condition) for the two stories. The time sequence of colors within the 400 words series was randomized, so that participants could not predict whether the subsequent word was to be attended or not; however, the randomization was constrained to forbid series of more than three consecutive words with the same color. Data were acquired in two runs, and each word was presented for 100 ms, with a jittered inter-stimulus interval centered around 700 ms. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for Lec2 + :name: condLec2 + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - attend + - A black word is rapidly presented and the participant must silently read it to form a short story together with the rest of black words + * - unattend + - A white word is rapidly presented and the participant must ignore it + +.. dropdown:: Contrasts for Lec2 + :name: contLec2 + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - attend + - response to attended text + * - attend-unattend + - response to attended vs. unattended text + * - unattend + - response to unattended text + +Audi +---- + +.. container:: tags + + :bdg-success:`voice_perception` :bdg-success:`listening` :bdg-success:`sound_perception` :bdg-success:`auditory_sentence_recognition` :bdg-success:`auditory_attention` + +.. admonition:: Implemented using proprietary software + :class: seealso + + - Software: Presentation (Version 20.1, Neurobehavioral Systems, Inc., Berkeley, CA) + - Audio device: MagnaCoil (Magnacoustics) + +This task belongs to a battery of 8 different localizers that tap on a wide array of cognitive functions provided to us by the `Labex Cortex group `__ at the University of Lyon. This task was originally described in (`Perrone-Bertolotti et al., 2012 `__) together with the `Lec2`_ localizer. Participants listened to sounds of several categories with the instruction that three of them would be presented again at the end of the task, together with three novel sounds and that they should be able to detect previously played items. There were three speech and speech-like categories, including sentences told by a computerized voice in a language familiar to the participant (French) or unfamiliar (Suomi), and reversed speech, originally in French (the same sentences as the "French" category, played backwards). These categories were compared with nonspeech-like human sounds (coughing and yawning), music, environmental sounds, and animal sounds. Participants were instructed to close their eyes while listening to three sounds of each category, with a duration of 12s each, along with three 12 s intervals with no stimulation, serving as a baseline (Silence). Consecutive sounds were separated by a 3 s silent interval. The sequence was pseudorandom, to ensure that two sounds of the same category did not follow each other. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for Audi + :name: condAudi + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - alphabet + - French voice saying the alphabet + * - animals + - Real-life animal sounds + * - cough + - Concatenated sounds of people coughing + * - environment + - Real-life complex environmental sounds + * - human + - Other human sounds + * - laugh + - Concatenated sounds of people laughing + * - music + - Real-life complex musical sounds + * - reverse + - French speech stimuli played in reverse + * - silence + - Silence, used as a baseline + * - speech + - French speech stimuli + * - suomi + - Suomi speech stimuli + * - tear + - Concatenated sounds of people crying + * - yawn + - Concatenated sounds of people yawning + +.. dropdown:: Contrasts for Audi + :name: contAudi + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - alphabet + - listen to letters + * - alphabet-silence + - listen to letters + * - animals + - listen to animals + * - animals-silence + - listen to animals + * - cough + - listen to coughing + * - cough-silence + - listen to coughing + * - environment + - listen to environment sounds + * - environment-silence + - listen to environment sounds + * - human + - listen to human sounds + * - human-silence + - listen to human sounds + * - laugh + - listen to laugh + * - laugh-silence + - listen to laugh + * - music + - listen to music + * - music-silence + - listen to music + * - reverse + - listen to reversed speech + * - reverse-silence + - listen to reversed speech + * - silence + - listen to silence + * - speech + - listen to speech + * - speech-silence + - listen to speech + * - suomi + - listen to unknown language + * - suomi-silence + - listen to unknown language + * - tear + - listen to tears + * - tear-silence + - listen to tears + * - yawn + - listen to yawning + * - yawn-silence + - listen to yawning + +Visu +---- + +.. container:: tags + + :bdg-primary:`visual_representation` :bdg-primary:`face_perception` :bdg-primary:`visual_perception` :bdg-light:`object_categorization` :bdg-primary:`visual_string_recognition` + +.. admonition:: Implemented using proprietary software + :class: seealso + + - Software: Presentation (Version 20.1, Neurobehavioral Systems, Inc., Berkeley, CA) +This task belongs to a battery of 8 different localizers that tap on a wide array of cognitive functions provided to us by the `Labex Cortex group `__ at the University of Lyon. This task, described in (`Vidal et al., 2010 `__), is a visual odd-ball paradigm, in which participants were instructed to press a button (index finger) every time they see a fruit. Images of the target category and other non-target categories were rapidly presented in a pre-randomized order. Stimuli were presented for a duration of 200ms every 1000-1200ms in series of 5 pictures interleaved by 3-second pause periods during which patients could freely blink. Each non-target category was presented 50 times during the experiment, and data were acquired in two separated runs. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for Visu + :name: condVisu + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - animal + - Viewing the image of an animal + * - characters + - Viewing a string of random characters + * - face + - Viewing the image of a human face + * - fruit + - Viewing the image of a fruit + * - house + - Viewing the image of a house + * - pseudoword + - Viewing a string that conforms a pseudoword + * - scene + - Viewing the image of a naturalistic scene + * - scrambled + - Scrambled image, used as baseline + * - tool + - Viewing the image of a tool + +.. dropdown:: Contrasts for Visu + :name: contVisu + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - animal + - view an animal + * - animal-scrambled + - view an animal + * - characters + - view characters + * - characters-scrambled + - view characters + * - face + - view a face image + * - face-scrambled + - view a face image + * - house + - view a house + * - house-scrambled + - view a house + * - pseudoword + - view a pseudoword + * - pseudoword-scrambled + - view a pseudoword + * - scene + - view a scene + * - scene-scrambled + - view a scene + * - scrambled + - view a scrambled image + * - target_fruit + - view a target object + * - tool + - view a tool + * - tool-scrambled + - view a tool + +Lec1 +---- + +.. container:: tags + + :bdg-primary:`visual_pseudoword_recognition` :bdg-secondary:`language_processing` :bdg-secondary:`reading` :bdg-primary:`visual_word_recognition` :bdg-primary:`visual_string_recognition` + +.. admonition:: Implemented using proprietary software + :class: seealso + + - Software: Presentation (Version 20.1, Neurobehavioral Systems, Inc., Berkeley, CA) + - Response device: Five-button ergonomic pad (current designs, package 932 with pyka hhsc-1x5-n4) + + - Audio device: MagnaCoil (Magnacoustics) + +This task belongs to a battery of 8 different localizers that tap on a wide array of cognitive functions provided to us by the `Labex Cortex group `__ at the University of Lyon. This task, described in (`Saignavong et al., 2017 `__), was originally used to test whether brain activity can be detected in single trials with intra-cerebral EEG-fMRI recordings. During the task, participants were presented with three vertically-arranged lines, indicated by the presence of two "+" symbols at both sides, and empty space between them. For each row, a different type of verbal stimuli was presented, and the participant was instructed to make a decision depending on the type of stimuli. The top row presented words, and the decision was an animacy decision ("Is it a living entity?"). The middle row presented pseudowords, and the decision was whether the pseudoword had one or two syllables. Finally, the bottom row presented consonant strings, and participants were instructed to answer if the string was all-uppercase or all-lowercase. First option was selected by pressing with the index finger on the response box whereas second option was given with the middle finger. The trials were presented in blocks, and each block contained a sequence of 5 stimuli for each of the three conditions. The order of this conditions inside each block was randomized across blocks, but fixed for all participants. The "+" symbols for the row corresponding to the next condition turned white to indicate which condition was next. There were two runs with 6 blocks each, each block comprising 15 trials, which were presented for 2000 ms, with an inter-stimulus interval of 500 ms. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for Lec1 + :name: condLec1 + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - pseudoword + - A pseudoword in presented and the participant has to answer whether it has one or two syllables + * - random_string + - A string of random consonants is presented and the participant has to answer if it is all-uppercase or all-lowercase + * - word + - A word is presented and the participant has to decide whether it is a living entity or not + +.. dropdown:: Contrasts for Lec1 + :name: contLec1 + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - pseudoword + - read a pseudoword + * - pseudoword-random_string + - read a pseudoword vs. a random string + * - random_string + - read a random string + * - word + - read a word + * - word-pseudoword + - read a word vs. a pseudoword + * - word-random_string + - read a word vs. a random string + +MVEB +---- + +.. container:: tags + + :bdg-light:`string_maintenance` :bdg-primary:`visual_buffer` :bdg-info:`visual_working_memory` :bdg-light:`numerosity` :bdg-primary:`visual_attention` + +.. admonition:: Implemented using proprietary software + :class: seealso + + - Software: Presentation (Version 20.1, Neurobehavioral Systems, Inc., Berkeley, CA) + - Response device: Five-button ergonomic pad (current designs, package 932 with pyka hhsc-1x5-n4) + +This task belongs to a battery of 8 different localizers that tap on a wide array of cognitive functions provided to us by the `Labex Cortex group `__ at the University of Lyon. This task, described in (`Hamamé et al., 2012 `__), aims to assess **verbal working memory** (the name stands for "verbal working memory” task). In this case, the participants were presented with a string of 6 characters, from where two, four or six of them can be letters (the rest are "#" symbols). After the string disappears, a single letter appears in screen. The participant had then to indicate if this single letter was part of the previously presented string. This was indicated by the participant with a 5-button response box, with one button for "yes" (index finger) and another for "no" (middle finger). The cognitive load was manipulated with the number of letters, and one condition was included where all the letters of the initial string would be the same one. Each trial commenced with the presentation of a 1500 ms fixation cross, followed by the array of characters (probe) for 1500 ms. After an intermediate period of 3000 ms, and the cue character was presented for 1500 ms. 36 trials were presented in each run. Data were acquired in two separated runs. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for MVEB + :name: condMVEB + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - 2_letters_different + - The subject must remember 2 characters from a presented string of different letters + * - 2_letters_same + - The subject must remember the presented character from a string of 2 identical letters + * - 4_letters_different + - The subject must remember 4 characters from a presented string of different letters + * - 4_letters_same + - The subject must remember the presented character from a string of 4 identical letters + * - 6_letters_different + - The subject must remember 6 characters from a presented string of different letters + * - 6_letters_same + - The subject must remember the presented character from a string of 6 identical letters + * - letter_occurrence_response + - Subject's index finger response, indicating whether the letter was part of of the previously presented string + +.. dropdown:: Contrasts for MVEB + :name: contMVEB + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - 2_letters_different + - maintaining two letters + * - 2_letters_different-same + - maintaining two letters vs. one + * - 2_letters_same + - maintaining one letter + * - 4_letters_different + - maintaining four letters + * - 4_letters_different-same + - maintaining four letters vs. one + * - 4_letters_same + - maintaining one letter + * - 6_letters_different + - maintaining six letters + * - 6_letters_different-2_letters_different + - maintaining six letters vs. two + * - 6_letters_different-same + - maintaining six letters vs. one + * - 6_letters_same + - maintaining one letter + * - letter_occurrence_response + - respond by button pressing whether the letter currently displayed was presented before or not + +MVIS +---- + +.. container:: tags + + :bdg-info:`spatial_working_memory` :bdg-info:`visual_working_memory` :bdg-light:`numerosity` :bdg-primary:`visual_attention` + +.. admonition:: Implemented using proprietary software + :class: seealso + + - Software: Presentation (Version 20.1, Neurobehavioral Systems, Inc., Berkeley, CA) + - Response device: Five-button ergonomic pad (current designs, package 932 with pyka hhsc-1x5-n4) + +This task belongs to a battery of 8 different localizers that tap on a wide array of cognitive functions provided to us by the `Labex Cortex group `__ at the University of Lyon. This task, described in (`Hamamé et al., 2012 `__), and whose name stands for **visuo-spatial working memory** task, consists on a series of events in which the participant will be presented with a 4x4 grid in which two, four or six dots will appear at different positions, after that, the grid would become empty and finally a single dot would appear on it. The participant had then to indicate if this single dot was in the same position than any of the previously presented ones. This was indicated by the participant with a 5-button response box, with one button for "yes" (index finger) and another for "no" (middle finger). The cognitive load was manipulated with the number of dots, and one condition was included where one of the dots would be highlighted, signifying that was the only position to retain. Each trial commenced with the presentation of a 1500 ms fixation cross, followed by the array of dots (probe) for 1500 ms. The empty grid was presented for 3000ms, and the cue dot was presented for 1500 ms. 36 trials were presented on each run. The data were acquired in two runs. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for MVIS + :name: condMVIS + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - 2_dots + - 2 positions to remember + * - 2_dots_control + - 1 position to remember because of the highlighted dot + * - 4_dots + - 4 positions to remember + * - 4_dots_control + - 1 position to remember because of the highlighted dot + * - 6_dots + - 6 positions to remember + * - 6_dots_control + - 1 position to remember because of the highlighted dot + * - dot_displacement_response + - Subject's index finger response, indicating whether the dot was in the same position as any of the previously presented ones + +.. dropdown:: Contrasts for MVIS + :name: contMVIS + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - 2_dots-2_dots_control + - maintain position of two dots vs. one + * - 4_dots-4_dots_control + - maintain position of four dots vs. one + * - 6_dots-2_dots + - maintain position of six dots vs. two + * - 6_dots-6_dots_control + - maintain position of six dots vs. one + * - dot_displacement_response + - respond by button pressing whether the dot currently displayed share the same location as any of those shown before + * - dots-control + - maintain position of two to six dots vs. one + +Moto +---- + +.. container:: tags + + :bdg-warning:`saccadic_eye_movement` :bdg-secondary:`reading` + +.. admonition:: Implemented using proprietary software + :class: seealso + + - Software: Presentation (Version 20.1, Neurobehavioral Systems, Inc., Berkeley, CA) + - Response device: In-house custom-made sticks featuring one-top button, each one to be used in each hand + +This task belongs to a battery of 8 different localizers that tap on a wide array of cognitive functions provided to us by the `Labex Cortex group `__ at the University of Lyon. This task is a basic **motor localizer** for several body parts. The participants are presented with three small gray squares over a black background image. At the beginning of each block, a text prompt will appear on screen to indicate the body part that will be moved next. Afterwards, the left and right squares will turn white to indicate movement of the corresponding part. For example, for the hands condition, the participant is required to perform a small movement of the left hand when the left square turns white, and likewise for the right hand. Ten movements were prompted for each block, five for the right body part and five for the left, consecutively for each direction and always in the same order. There were two distinct blocks for each body part. For each trial, the white square was presented during 1000 ms, with 1500 ms between trials, for a total duration of 25 s per block, with a total of 12 blocks. Data were acquired in two separated runs. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for Moto + :name: condMoto + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - finger_left + - Movement of the left index finger, indicated by a button-press + * - finger_right + - Movement of the right index finger, indicated by a button-press + * - fixation + - Gaze fixation on the central square + * - foot_left + - Movement of the left foot + * - foot_right + - Movement of the right foot + * - hand_left + - Movement of the left hand + * - hand_right + - Movement of the right hand + * - saccade_left + - Movement of the eyes to the left + * - saccade_right + - Movement of the eyes to the right + * - tongue_left + - Movement of the tongue to the left + * - tongue_right + - Movement of the tongue to the right + +.. dropdown:: Contrasts for Moto + :name: contMoto + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - finger_left-fixation + - left finger tapping vs. any movement + * - finger_right-fixation + - right finger tapping vs. any movement + * - foot_left-fixation + - move left foot vs. any movement + * - foot_right-fixation + - move right foot vs. any movement + * - hand_left-fixation + - move left hand vs. any movement + * - hand_right-fixation + - move right hand vs. any movement + * - instructions + - read instructions + * - saccade-fixation + - saccade vs. any movement + * - tongue-fixation + - move tongue vs. any movement + +MCSE +---- + +.. container:: tags + + :bdg-primary:`visual_search` :bdg-primary:`upper-right_vision` :bdg-primary:`upper-left_vision` :bdg-primary:`lower-right_vision` :bdg-light:`salience` + +.. admonition:: Implemented using proprietary software + :class: seealso + + - Software: Presentation (Version 20.1, Neurobehavioral Systems, Inc., Berkeley, CA) + - Response device: Five-button ergonomic pad (current designs, package 932 with pyka hhsc-1x5-n4) + + - :octicon:`video;1em;` `See demo `__ + +This task belongs to a battery of 8 different localizers that tap on a wide array of cognitive functions provided to us by the `Labex Cortex group `__ at the University of Lyon. This task described in (`Ossandón et al., 2012 `__) was originally used to study whether visual search processes of a salient target can be thought as a purely bottom-up process, or if it requires action from top-down attentional processes. The task consisted in the presentation of an array of 35 "L" letters, rotated at different angles, together with a target "T" letter (total 36 stimuli in each trial). Subjects were instructed to search for the target and indicate whether it was on the left or right side of the grid, by pressing respectively with the index or middle finger on a 5-button response box. There were two conditions: high-salience (the target is gray while the other stimuli is black) and low-salience (all stimuli are gray). The two conditions were presented alternatively in blocks, with 6 blocks of 10 trials each. Each trial was presented for 3 s with an inter-stimulus interval of 1 s. There was also a 20 s fixation cross between blocks. Data were acquired in two separated runs. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for MCSE + :name: condMCSE + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - high_salience_left + - Looking for a salient letter in the left visual field + * - high_salience_right + - Looking for a salient letter in the right visual field + * - low_salience_left + - Looking for a non-salient letter in the left visual field + * - low_salience_right + - Looking for a non-salient letter in the right visual field + +.. dropdown:: Contrasts for MCSE + :name: contMCSE + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - high-low_salience + - looking for a high-salient symbol + * - high_salience_left + - looking for a salient symbol in left visual field + * - high_salience_right + - looking for a salient symbol in right visual field + * - low+high_salience + - looking for a symbol + * - low-high_salience + - looking for a low-salient symbol + * - low_salience_left + - looking for a low-salient symbol in left visual field + * - low_salience_right + - looking for a low-salient symbol in right visual field + * - salience_left-right + - looking for a symbol in left vs. right visual field + * - salience_right-left + - looking for a symbol in right vs. left visual field + +Audio +----- + +.. container:: tags + + :bdg-success:`voice_perception` :bdg-success:`listening` :bdg-success:`sound_perception` :bdg-success:`auditory_attention` :bdg-secondary:`language_processing` + +.. admonition:: Implementation + :class: seealso + + - Software: Expyriment 0.9.0 (Python 3.6) + - Response device: Five-button ergonomic pad (current designs, package 932 with pyka hhsc-1x5-n4) + + - Audio device: MagnaCoil (Magnacoustics) + +This task, originally described in (`Santoro et al., 2017 `__), is an auditory localizer. During each run, the participants were presented with sounds from different categories, and were instructed to press a button with the index finger whenever two consecutive sounds were identical. From a group of 288 sounds, divided into 6 different categories, 4 sets were created. Each set contained 72 sounds of each of the categories, and each one was present only in one of the sets. Furthermore, each set was pre-randomized in 3 different orders, and the same sequences were used for all participants. On top of the 72 sounds, each run also included 5 silences and 5 repeated sounds from the original 72. In total, each run consisted of 82 trials of 2 seconds each. It is important to note that the data for this task was acquired using an interrupted acquisition sequence, to minimize the effect that scanner noise can have in the auditory processing targeted by the experiment. To this end, the inter-stimulus interval was programmed in a sequence of 4, 4, and 6 seconds, meaning that the interval between stimuli would be 4s for the first trial, 4s for the second, 6s for the third, and then the sequence repeats until the end of the run. The variability of the ISI and the silence trials avoided stimulus' presentation to be predictable in time. + +**Note:** We used the MagnaCoil (Magnacoustics) audio device for all subjects except for *subject-08*, for whom we employed Optoacoustics. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for Audio + :name: condAudio + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - animal + - Sound of animal noises + * - catch + - Repetition of the previous sound + * - music + - Musical sound + * - nature + - Naturalistic sound + * - silence + - No sound + * - speech + - Human speech sound + * - tool + - Sound of tool usage + * - voice + - Non-speech human sound + +.. dropdown:: Contrasts for Audio + :name: contAudio + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - animal + - listen to animals + * - animal-others + - listen to animals vs. other sounds + * - animal-silence + - listen to animals vs. silence + * - mean-silence + - listening to sounds vs. silence + * - music + - listen to music + * - music-others + - listen to music vs. other sounds + * - music-silence + - listen to music vs. silence + * - nature + - listen to nature + * - nature-others + - listen to nature vs. other sounds + * - nature-silence + - listen to nature vs. silence + * - speech + - listen to speech + * - speech-others + - listen to speech vs. other sounds + * - speech-silence + - listen to speech vs. silence + * - tool + - listen to tool + * - tool-others + - listen to tool vs. other sounds + * - tool-silence + - listen to tool vs. silence + * - voice + - listen to voice + * - voice-others + - listen to voice vs. other sounds + * - voice-silence + - listen to voice vs. silence + +Attention +--------- + +.. container:: tags + + :bdg-warning:`saccadic_eye_movement` :bdg-light:`spatial_attention` :bdg-warning:`saccadic_eye_mocement` :bdg-light:`selective_attention` :bdg-light:`attentional_focusing` + +.. admonition:: Implementation + :class: seealso + + - Software: JavaScript, Python 2.7 + - Response device: Five-button ergonomic pad (current designs, package 932 with pyka hhsc-1x5-n4) + +This task is a part of a battery of several tasks coming from the `experiment factory `__ published in (`Eisenberg et al., 2017 `__) and presented using `expfactory-python `__ package. The battery was used to capture several aspects of self-regulation, including behavioral inhibition, decision making and planning abilities, among others. The adjustments concerned the translation to all written stimuli and instructions into french, as well as fixing a total time limit for experimentsthat allowed the participants their own pace for responding. All these modifications were done with extreme care of not altering the psychological state that the original tasks were designed to capture during scanning. + +The Attention is a version of the classical flanker task (`Eriksen and Eriksen, 1974 `__), where the participant has to judge the direction the target flanker (an arrow) is pointing to (left/right). The target flanker is surrounded by other 4 flankers that can be congruent or incongruent with the target one, thus capturing selective attention and inhibitory processes. Two different buttons (index and middle fingers' button, respectively) were assigned to left/right responses, and the participant had to indicate the direction of the central arrow from an horizontal group of 5 arrows. In each trial, one or two positional cues were presented above and below the center of the screen. When one cue was given, the flankers would appear centered around it, whereas when two cues where presented, the flankers would appear centered around one of them. The four flankers surrounding the target would always point to the same direction, and can be congruent or incongruent with the direction the target flanker is facing. The task was acquired in two runs, within the same session as other tasks from the battery and using different phase-encoding directions. + +For the original version of this task, the authors provide a `simulator `__, which contains the original design. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for Attention + :name: condAttention + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - congruent + - The stimulus is congruent (same direction) with the rest of the arrows shown. + * - double_cue + - The stimulus is not spatially cued, so the subject doesn't know where the arrows will be shown (both stars appear). + * - incongruent + - The stimulus is not congruent (opposite direction) with the rest of the arrows shown. + * - spatial + - The stimulus is spatially cued, so the subject knows where the arrows will be shown (only one star appears). + +.. dropdown:: Contrasts for Attention + :name: contAttention + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - double_congruent + - no spatial cue + no distractors in the probe + * - double_cue + - cues appear in both possible location of the probe at the same time + * - double_incongruent + - no spatial cue + distractors in the probe + * - double_incongruent-double_congruent + - ignore distractors vs. no distractors without spatial cue + * - incongruent-congruent + - ignore distractors vs. no distractors + * - spatial_congruent + - cued probe no distractors + * - spatial_cue + - cued probe + * - spatial_cue-double_cue + - cued vs. uncued probe + * - spatial_incongruent + - cued probe with distractors in the probe + * - spatial_incongruent-spatial_congruent + - ignore distractors vs. no distractors with spatial cue + +StopSignal +---------- + +.. container:: tags + + :bdg-light:`proactive_control` :bdg-primary:`shape_recognition` :bdg-primary:`shape_perception` + +.. admonition:: Implementation + :class: seealso + + - Software: JavaScript, Python 2.7 + - Response device: Five-button ergonomic pad (current designs, package 932 with pyka hhsc-1x5-n4) + +This task is a part of a battery of several tasks coming from the `experiment factory `__ published in (`Eisenberg et al., 2017 `__) and presented using `expfactory-python `__ package. The battery was used to capture several aspects of self-regulation, including behavioral inhibition, decision making and planning abilities, among others. The adjustments concerned the translation to all written stimuli and instructions into french, as well as fixing a total time limit for experimentsthat allowed the participants their own pace for responding. All these modifications were done with extreme care of not altering the psychological state that the original tasks were designed to capture during scanning. StopSignal task was originally used to localize activation relative to inhibition of a prominent motor response (`Bissett and Logan, 2011 `__). + +Four different polygonal shapes composed the set from which one of them was presented in each trial. Two of them were assigned to the button corresponding to the index finger, and two of them to the button corresponding to the middle finger. The participants were instructed to press the correct button as fast as possible, except if a red-colored star appeared on top of the target stimulus. There were 12 practice trials followed by 123 test trials divided in 3 blocks of 41 trials each, with a resting period of 9 seconds in between blocks. During practice, feedback was provided to indicate correct and incorrect responses, as well as to indicate if the responses were too slow. No stop trials (red star) were present during practice, although the instructions pertaining the red star were presented before practice. This was to build a predominant motor response in order to better capture inhibitory processes. There was a jittered delay between the stop signal and the target stimulus in stop trials that ranged from 400 to 1000 ms. The duration of the stop signal was fixed to 500 ms, the duration of the target stimulus was 850 ms and there was a fixation cross between trials centered around 2250 ms. The task was acquired in two runs, within the same session as other tasks from the battery and using different phase-encoding directions. + +For the original version of this task, the authors provide a `simulator `__ which contains the original design. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for StopSignal + :name: condStopSignal + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - go + - Answer to the stim + * - stop + - Hold motor response + +.. dropdown:: Contrasts for StopSignal + :name: contStopSignal + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - go + - shape recognition + * - stop + - shape recognition, stopped response + * - stop-go + - response inhibition + +TwoByTwo +-------- + +.. container:: tags + + :bdg-primary:`visual_perception` :bdg-light:`cue_switch` + +.. admonition:: Implementation + :class: seealso + + - Software: JavaScript, Python 2.7 + - Response device: Five-button ergonomic pad (current designs, package 932 with pyka hhsc-1x5-n4) + +This task is a part of a battery of several tasks coming from the `Experiment Factory `__ published in (`Eisenberg et al., 2017 `__) and presented using `expfactory-python `__ package. The battery was used to capture several aspects of self-regulation, including behavioral inhibition, decision making and planning abilities, among others. The adjustments concerned the translation to all written stimuli and instructions into french, as well as fixing a total time limit for experimentsthat allowed the participants their own pace for responding. All these modifications were done with extreme care of not altering the psychological state that the original tasks were designed to capture during scanning. + +TwoByTwo protocol aimed to study the responses to task-switching and cue-switching in every trial, with the aim to asses the activity elicted by switching either or both task and cue, and how switching one affects the response to the other. It consisted of presenting colored single-digit numbers from 1 to 9, preceded by a cue string indicating which task must be performed. For each trail, the task could either be to judge if the number is greater or less than 5; or to judge whether the digit shown is colored in blue or orange. For each of the two tasks, two different strings could be used as cue: for the first, the cue could display either 'Magnitude' or 'High/Low', both strings indicating the participant must judge the quantity; for the second task, the subject could read either 'Color' or 'Orange/Blue' as cues, both strings indicating the task is to judge the color. Two different buttons (index/middle finger) were assigned to the orange/high and blue/low options, respectively. The task is composed by 16 practice trials, followed by 240 trials divided in 3 blocks of 80 trials each. The order of cue and task switching was randomized. The task was acquired in two runs, within the same session as other tasks from the battery and using different phase-encoding directions. + +For the original version of this task, the authors provide a `simulator `__, it contains a slightly different version of the task in which they switch between three different tasks instead of two. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for TwoByTwo + :name: condTwoByTwo + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - cue_taskstay_cuestay + - Appearance of the cue on screen when both the task and the cue are the same with respect to the previous trial + * - cue_taskstay_cueswitch + - Appearance of the cue on screen when only the cue switches with respect of the previous trial, for example the color task is repeated but the cue changes from 'Color’ to 'Orange/Blue’ + * - cue_taskswitch_cuestay + - Appearance of the cue on screen when the task switches but the cue stays the same it was the previous trial for that task. For example, the task switches from color to number and the presented cue is the same as the previous number trial + * - cue_taskswitch_cueswitch + - Appearance of the cue on screen when both the task and the cue switch, for example the task goes from color to number and the cue changes from 'Magnitude’ to 'High/Low' compared to the previous number trial + * - stim_taskstay_cuestay + - Appearance of the stimulus on screen when both the task and the cue are the same with respect to the previous trial + * - stim_taskstay_cueswitch + - Appearance of the stimulus on screen when only the cue switches with respect of the previous trial, for example the color task is repeated but the cue changes from 'Color’ to 'Orange/Blue' + * - stim_taskswitch_cuestay + - Appearance of the stimulus on screen when the task switches but the cue stays the same it was the previous trial for that task. For example, the task switches from color to number and the presented cue is the same as the previous number trial + * - stim_taskswitch_cueswitch + - Appearance of the stimulus on screen when both the task and the cue switch, for example the task goes from color to number and the cue changes from 'Magnitude’ to 'High/Low' compared to the previous number trial + +.. dropdown:: Contrasts for TwoByTwo + :name: contTwoByTwo + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - cue_switch-stay + - effect of cur switch + * - cue_taskstay_cuestay + - both task and cue repeats + * - cue_taskstay_cueswitch + - task repeats cue switch + * - cue_taskswitch_cuestay + - both task and cue switch + * - cue_taskswitch_cueswitch + - both task and cue switch + * - stim_taskstay_cuestay + - both task and cue repeats + * - stim_taskstay_cueswitch + - task repeats cue switch + * - stim_taskswitch_cuestay + - both task and cue switch + * - stim_taskswitch_cueswitch + - both task and cue switch + * - task_switch-stay + - effect of task switch + +Discount +-------- + +.. container:: tags + + :bdg-dark:`incentive_salience` :bdg-light:`selective_control` + +.. admonition:: Implementation + :class: seealso + + - Software: JavaScript, Python 2.7 + - Response device: Five-button ergonomic pad (current designs, package 932 with pyka hhsc-1x5-n4) + +This task is a part of a battery of several tasks coming from the `experiment factory `__ published in (`Eisenberg et al., 2017 `__) and presented using `expfactory-python `__ package. The battery was used to capture several aspects of self-regulation, including behavioral inhibition, decision making and planning abilities, among others. The adjustments concerned the translation to all written stimuli and instructions into french, as well as fixing a total time limit for experimentsthat allowed the participants their own pace for responding. All these modifications were done with extreme care of not altering the psychological state that the original tasks were designed to capture during scanning. + +Discount is a a decision-making task, where the participant has to decide whether to take a figurative amount of 20 dollars today or a bigger amount in a set number of days. The task is composed by 1 practice trial, followed by 120 test trials divided in 2 blocks of 60 trials each. The amount of money and the number of days is different for each trial. Each trial lasts for 4 seconds. The task was acquired in two runs, within the same session as other tasks from the battery and using different phase-encoding directions. + +For the original version of this task, the authors provide a `simulator `__, it contains a slightly different version of the task in which they ask participants to choose between two different amounts on different periods, instead of the set 20-dollars-today set-up. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for Discount + :name: condDiscount + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - amount + - Effect of reward gain + * - delay + - Effect of reward delay + +.. dropdown:: Contrasts for Discount + :name: contDiscount + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - amount + - effect of reward gain + * - delay + - effect of delay on reward + +SelectiveStopSignal +------------------- + +.. container:: tags + + :bdg-light:`proactive_control` :bdg-primary:`shape_recognition` :bdg-primary:`shape_perception` + +.. admonition:: Implementation + :class: seealso + + - Software: JavaScript, Python 2.7 + - Response device: Five-button ergonomic pad (current designs, package 932 with pyka hhsc-1x5-n4) + +This task is a part of a battery of several tasks coming from the `experiment factory `__ published in (`Eisenberg et al., 2017 `__) and presented using `expfactory-python `__ package. The battery was used to capture several aspects of self-regulation, including behavioral inhibition, decision making and planning abilities, among others. The adjustments concerned the translation to all written stimuli and instructions into french, as well as fixing a total time limit for experimentsthat allowed the participants their own pace for responding. All these modifications were done with extreme care of not altering the psychological state that the original tasks were designed to capture during scanning. + +Similar to the `StopSignal`_ task, SelectiveStopSignal task required participants to refrain from responding if a red star appears after the target stimulus is presented. In this task, however, the red star only indicates the need to inhibit the motor response in one of the two sides (critical side), while it should be ignored for the other (noncritical side). Motor response is to be given by pressing with the index finger on the corresponding button of the response box. The task is composed by 12 practice trials, followed by 250 test trials divided in 5 blocks of 50 trials each. The task was acquired in two runs, within the same session as other tasks from the battery and using different phase-encoding directions. + +For the original version of this task, the authors provide a `simulator `__ which contains the original design. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for SelectiveStopSignal + :name: condSelectiveStopSignal + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - go_critical + - Answer to the visual stimulus (critical side) + * - go_noncritical + - Answer to the visual stimulus (noncritical side) + * - ignore + - Answer regardless of the stop signal + * - stop + - Hold motor response + +.. dropdown:: Contrasts for SelectiveStopSignal + :name: contSelectiveStopSignal + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - go_critical + - respond with the correct finger depending on the image displayed (side instructed to stop if the stop signal appears) + * - go_critical-stop + - inhibit the motor response + * - go_noncritical + - respond with the correct finger depending on the image displayed (side instructed to ignore the stop signal) + * - go_noncritical-ignore + - ignore stop signal vs. simply respond + * - ignore + - respond anyway even if the stop signal appears + * - ignore-stop + - ignore stop signal vs. inhibit motor response + * - stop + - stop the response if the stop signal appears + * - stop-ignore + - inhibit motor response vs. ignore stop signal + +Stroop +------ + +.. container:: tags + + :bdg-light:`proactive_control` :bdg-primary:`visual_perception` :bdg-light:`conflict_detection` + +.. admonition:: Implementation + :class: seealso + + - Software: JavaScript, Python 2.7 + - Response device: Five-button ergonomic pad (current designs, package 932 with pyka hhsc-1x5-n4) + +This task is a part of a battery of several tasks coming from the `experiment factory `__ published in (`Eisenberg et al., 2017 `__) and presented using `expfactory-python `__ package. The battery was used to capture several aspects of self-regulation, including behavioral inhibition, decision making and planning abilities, among others. The adjustments concerned the translation to all written stimuli and instructions into french, as well as fixing a total time limit for experimentsthat allowed the participants their own pace for responding. All these modifications were done with extreme care of not altering the psychological state that the original tasks were designed to capture during scanning. + +In this adaptation of the classic Stroop task (`Stroop, 1935 `__), the participants must press one of three buttons depending on the color of the presented word. In contrast to the classic pen and paper version of the task, the congruent and incongruent trials are intermixed. The three words/colors presented were red, green and blue, whose button presses corresponded on the response box respectively to the index, middle and ring fingers. The amount of money and the number of days is different for each trial. + +For the original version of this task, the authors provide a `simulator `__ which contains the original design. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for Stroop + :name: condStroop + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - congruent + - Color and word are the same + * - incongruent + - Color and word are different + +.. dropdown:: Contrasts for Stroop + :name: contStroop + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - congruent + - word and word color are the same + * - incongruent + - word and color are not the same + * - incongruent-congruent + - conflict between automatic and instructed response + +ColumbiaCards +------------- + +.. container:: tags + + :bdg-dark:`risk_processing` :bdg-dark:`risk_aversion` :bdg-dark:`reward_processing` + +.. admonition:: Implementation + :class: seealso + + - Software: JavaScript, Python 2.7 + - Response device: Five-button ergonomic pad (current designs, package 932 with pyka hhsc-1x5-n4) + +This task is a part of a battery of several tasks coming from the `experiment factory `__ published in (`Eisenberg et al., 2017 `__) and presented using `expfactory-python `__ package. The battery was used to capture several aspects of self-regulation, including behavioral inhibition, decision making and planning abilities, among others. The adjustments concerned the translation to all written stimuli and instructions into french, as well as fixing a total time limit for experimentsthat allowed the participants their own pace for responding. All these modifications were done with extreme care of not altering the psychological state that the original tasks were designed to capture during scanning. + +The ColumbiaCards task is a gambling task in where the participants are presented with a set of cards facing down. In each trial, a different number of cards appear and the participant is informed of the amount gained per good card uncovered, the amount loss when uncovering the bad card, and the number of bad cards in the set. The participant can uncover as many cards as they want, by pressing the index finger's button on the response box, before pressing the middle finger's button to end the trial and start the next one. Uncovering a bad card automatically ends the trial. In each trial, the number of total cards, the number of bad cards, the amount gained per card uncovered and the amount lost if a bad card was uncovered changed. The order in which the cards is pre-determined for each trial, but the participant does not know it. The task is composed by 88 trials divided in 4 blocks of 22 trials each and was acquired in two runs, within the same session as other tasks from the battery and using different phase-encoding directions. + +For the original version of this task, the authors provide a `simulator `__ which contains the original design. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for ColumbiaCards + :name: condColumbiaCards + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - gain + - Expected gain in gambling + * - loss + - Expected loss in gambling + * - num_loss_cards + - Probability of losing in gambling + +.. dropdown:: Contrasts for ColumbiaCards + :name: contColumbiaCards + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - gain + - expected gain + * - loss + - expected loss + * - num_loss_cards + - probability of losing + +DotPatterns +----------- + +.. container:: tags + + :bdg-light:`proactive_control` :bdg-primary:`shape_recognition` + +.. admonition:: Implementation + :class: seealso + + - Software: JavaScript, Python 2.7 + - Response device: Five-button ergonomic pad (current designs, package 932 with pyka hhsc-1x5-n4) + +This task is a part of a battery of several tasks coming from the `experiment factory `__ published in (`Eisenberg et al., 2017 `__) and presented using `expfactory-python `__ package. The battery was used to capture several aspects of self-regulation, including behavioral inhibition, decision making and planning abilities, among others. The adjustments concerned the translation to all written stimuli and instructions into french, as well as fixing a total time limit for experimentsthat allowed the participants their own pace for responding. All these modifications were done with extreme care of not altering the psychological state that the original tasks were designed to capture during scanning. + +DotPatterns task presents the participant with pairs of stimuli, separated by a fixation cross. The participant has to press a button (index finger) as fast as possible after the presentation of the probe, and only one specific combination of cue-probe is instructed to be responded to differently. This task was designed to capture activation relative to the expectancy of the probe elicited by the correct cue. The task is composed by 160 trials divided in 4 blocks of 40 trials each. Each cue and probe lasted for 500ms, with a fixation cross that separates both lasting for 2000ms. It was acquired in two runs, within the same session as other tasks from the battery and using different phase-encoding directions. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for DotPatterns + :name: condDotPatterns + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - correct_cue_correct_probe + - Target pair, captures expectancy after correct cue + * - correct_cue_incorrect_probe + - Nontarget pair that also captures expectancy after correct cue + * - cue + - Look at the stimulus to provide a description of the cue + * - incorrect_cue_correct_probe + - Incorrect pair. The probe is correct but the cue is not + * - incorrect_cue_incorrect_probe + - Incorrect pair, both are incorrect + +.. dropdown:: Contrasts for DotPatterns + :name: contDotPatterns + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - correct_cue-incorrect_cue + - effect of cognitive control + * - correct_cue_correct_probe + - both cue and probe are correct (AX) + * - correct_cue_incorrect_probe + - the cue is correct but the probe is not (AY) + * - correct_cue_incorrect_probe-correct_cue_correct_probe + - incorrect vs. correct probe with correct cue + * - correct_cue_incorrect_probe-incorrect_cue_correct_probe + - effect of cognitive control + * - cue + - attend to cue + * - incorrect_cue_correct_probe + - cue is incorrect but probe is correct (BX) + * - incorrect_cue_incorrect_probe + - both cue and probe are incorrect (BY) + * - incorrect_cue_incorrect_probe-correct_cue_incorrect_probe + - effect of cognitive control + * - incorrect_cue_incorrect_probe-incorrect_cue_correct_probe + - shape recognition + * - incorrect_probe-correct_probe + - shape recognition + +WardAndAllport +-------------- + +.. container:: tags + + :bdg-light:`goal_hierarchy` :bdg-primary:`visual_perception` :bdg-light:`search_depth` :bdg-light:`planning` :bdg-info:`working_memory` + +.. admonition:: Implementation + :class: seealso + + - Software: JavaScript, Python 2.7 + - Response device: Five-button ergonomic pad (current designs, package 932 with pyka hhsc-1x5-n4) + +This task is a part of a battery of several tasks coming from the `experiment factory `__ published in (`Eisenberg et al., 2017 `__) and presented using `expfactory-python `__ package. The battery was used to capture several aspects of self-regulation, including behavioral inhibition, decision making and planning abilities, among others. The adjustments concerned the translation to all written stimuli and instructions into french, as well as fixing a total time limit for experimentsthat allowed the participants their own pace for responding. All these modifications were done with extreme care of not altering the psychological state that the original tasks were designed to capture during scanning. + +WardAndAllport task is a digital version of the WATT3 task (`Ward, Allport, 1997 `__, `Shallice, 1982 `__), and its main purpose is to capture activation related to planning abilities. For this, the task uses a factorial manipulation of 2 task parameters: search depth and goal hierarchy. Search depth involves mentally constructing the steps necessary to reach the goal state, and the interdependecy between steps in order to do so. This is expressed by the presence or absence of intermediate movements necessary for an optimal solution of each problem. Goal hierarchy refers to whether the order in which the three balls have to be put in their goal positions can be completely extracted from looking at the goal state or if it requires the participant to integrate information between goal and starting states (which result in unambiguous or partially ambiguous goal states, respectively). Detailed explanations and examples of each one of the four categories can be found in `Kaller et al., 2011 `__. + +The task was divided in 4 practice trials, followed by 48 test trials divided in 3 blocks of 14 trials each, separated by 10 seconds of resting period. Data was only acquired during the test trials, although the practice trials were also performed inside the scanner with its corresponding equipment. In each trial, the participant would see two configurations of the towers: the test towers on the left, and the target towers on the right. The towers of the right showed the final configuration of balls required to complete the trial. Three buttons were assigned to the left (index finger' button), middle (middle finger's button) and right (ring finger's button) columns respectively, and each button press would either take the upper ball of the selected column or drop the ball in hand at the top of the selected column. On the upper left corner, a gray square with the text "Ball in hand" would show the ball currently in hand. All trials could be solved in 3 movements, considering taking a ball and putting it elsewhere as a single movement. The time between the end of one trial and the beginning of the next one was 1000 ms. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for WardAndAllport + :name: condWardAndAllport + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - planning_ambiguous_direct + - Partially ambiguous goal state without intermediate movement + * - planning_ambiguous_intermediate + - Partially ambiguous goal state with intermediate movement + * - planning_unambiguous_direct + - Unambiguous goal state without intermediate movement + * - planning_unambiguous_intermediate + - Unambiguous goal state with intermediate movement + +.. dropdown:: Contrasts for WardAndAllport + :name: contWardAndAllport + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - ambiguous-unambiguous + - effect of goal hierarchy + * - intermediate-direct + - effect of search depth + * - move_ambiguous_direct + - complex goal hierarchy + simple search depth + * - move_ambiguous_intermediate + - complex goal hierarchy + complex search depth + * - move_unambiguous_direct + - simple goal hierarchy + simple search depth + * - move_unambiguous_intermediate + - simple goal hierarchy + complex search depth + * - planning_ambiguous_direct + - complex goal hierarchy + simple search depth + * - planning_ambiguous_intermediate + - complex goal hierarchy + complex search depth + * - planning_unambiguous_direct + - simple goal hierarchy + simple search depth + * - planning_unambiguous_intermediate + - simple goal hierarchy + complex search depth + +LePetitPrince +------------- + +.. container:: tags + + + +.. admonition:: Implementation + :class: seealso + + - Software: Expyriment 0.9.0 (Python 3.6) + - Audio device: OptoACTIVE (Optoacoustics) + +This experiment is a natural language comprehension protocol, originally implemented by (`Bhattasali et al., 2019 `__, `Hale et al., 2022 `__). Each run of this task comprised three chapters of The Little Prince story by Antoine de Saint-Exupery in french (Le Petit Prince). During each run, the participant was presented with the audio of the story. In between runs, the experimenters would ask some multiple choice questions, as well as two or three open ended questions about the contents of the previous run, to keep participants engaged. The length of the runs varied between nine and thirteen minutes. Data were acquired in two different sessions, each one comprising five and four runs, respectively. The protocol also included a six-minutes localizer at the end of the second acquisition, in order to accurately map language areas for each participant, see :ref:`LPPLocalizer` for details. + +**Note:** We used the OptoACTIVE (Optoacoustics) audio device for all subjects except for *subject-08*, for whom we employed MRConfon MKII. + + +LPPLocalizer +------------ + +.. container:: tags + + :bdg-success:`voice_perception` :bdg-success:`listening` :bdg-success:`sound_perception` :bdg-success:`auditory_sentence_recognition` :bdg-success:`auditory_attention` + +.. admonition:: Implementation + :class: seealso + + - Software: Expyriment 0.9.0 (Python 3.6) + - Audio device: OptoACTIVE (Optoacoustics) + +**Le Petit Prince Localizer** was included as part of the :ref:`LePetitPrince` task and was performed at the end of the second acquisition. It aimed to accurately map the language areas of each participant, which would later be used for further analysis. The stimuli consisted of two types of audio clips: phrases and their reversed versions. The phrases were 2-second voice recordings (audio only) of context-free sentences in French. The reversed stimuli used the same clips but played backward, making the content unintelligible. The run consisted of alternating blocks of 3 trials with phrases (French trials) and 3 trials with reversed phrases (control trials). This localizer was conducted in a single run, lasting 6 minutes and 32 seconds. + +**Note:** We used the OptoACTIVE (Optoacoustics) audio device for all subjects except for *subject-08*, for whom we employed MRConfon MKII. + + +.. dropdown:: Contrasts for LPPLocalizer + :name: contLPPLocalizer + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - control + - reversed, intelligible speech + * - french + - audio clips of phrases in french + * - french-control + - audio clips of phrases in french vs. reversed, intelligible speech + +BiologicalMotion1 +----------------- + +.. container:: tags + + :bdg-warning:`local_motion_coherence` :bdg-light:`vertical_flip` :bdg-warning:`global_motion_coherence` :bdg-warning:`biological_motion` :bdg-warning:`motion_detection` + +.. admonition:: Implementation + :class: seealso + + - Software: Psychophysics Toolbox Version 3 (PTB-3), aka Psychtoolbox-3, for GNU Octave + - Response device: Five-button ergonomic pad (current designs, package 932 with pyka hhsc-1x5-n4) + + - :octicon:`video;1em;` `See demo `__ + +The phenomenon known as *biological motion* was first introduced in (`Johansson, 1973 `__), and consisted in point-light displays arranged and moving in a way that resembled a person moving. The task that we used was originally developed by (`Chang et al., 2018 `__). During the task, the participants were shown a point-light "walker", and they had to decide if the walker's orientation was to the left or right, by pressing on the response box respectively on the index finger's button or the middle finger's button. The stimuli were divided in 6 different categories: three types of walkers, as well as their reversed versions. The division of the categories focuses on three types of information that the participant can get from the walker: global information, local information and orientation. Global information refers to the general structure of the body and the spatial relationships between its parts. Local information refers to kinematics, speed of the points and mirror-symmetric motion. Please see `Chang et al., 2018 `__ for more details about the stimuli. The data was acquired in 4 runs. Each run comprises 12 blocks with 8 trials per block. The stimulus duration was 500ms and the inter-stimulus interval 1500ms (total 16s per block). Each of the blocks was followed by a fixation block, that also lasted 16s. Each run contained 4 of the six conditions, repeated 3 times each. There were 2 different types of runs: type 1 and 2. This section refers to run type 1, which contained both global types (natural and inverted) and both local naturals. For run type 2 refer to :ref:`BiologicalMotion2`. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for BiologicalMotion1 + :name: condBiologicalMotion1 + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - global_inverted + - Global walker, inverted upside-down + * - global_upright + - Structural information is preserved, but individual local trajectories are mirror-symmetric. Global-only in the original paper + * - natural_inverted + - Local natural walker, inverted along the horizontal axis + * - natural_upright + - Local information is preserved, but the points are randomly shuffled along the X-axis, rendering global cues uninformative. "Local-natural" in the original experiment + +.. dropdown:: Contrasts for BiologicalMotion1 + :name: contBiologicalMotion1 + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - global-natural + - effect of global information on motion perception + * - global_inverted + - global reversed biological motion vs. fixation + * - global_upright + - global biological motion vs. fixation + * - global_upright-global_inverted + - effect of orientation on motion perception + * - global_upright-natural_upright + - effect of global information on motion perception + * - inverted-upright + - effect of orientation on motion perception + * - natural-global + - Negative effect of global information on motion perception + * - natural_inverted + - local reversed biological motion vs. fixation + * - natural_upright + - local biological motion vs. fixation + * - natural_upright-natural_inverted + - effect of orientation on motion perception + +BiologicalMotion2 +----------------- + +.. container:: tags + + :bdg-warning:`local_motion_coherence` :bdg-light:`vertical_flip` :bdg-warning:`scrambled_motion` :bdg-warning:`biological_motion` :bdg-warning:`motion_detection` + +.. admonition:: Implementation + :class: seealso + + - Software: Psychophysics Toolbox Version 3 (PTB-3), aka Psychtoolbox-3, for GNU Octave + - Response device: Five-button ergonomic pad (current designs, package 932 with pyka hhsc-1x5-n4) + +The phenomenon known as *biological motion* was first introduced in (`Johansson, 1973 `__), and consisted in point-light displays arranged and moving in a way that resembled a person moving. The task that we used was originally developed by (`Chang et al., 2018 `__). During the task, the participants were shown a point-light "walker", and they had to decide if the walker's orientation was to the left or right, by pressing on the response box respectively on the index finger's button or the middle finger's button. The stimuli was divided in 6 different categories: three types of walkers, as well as their reversed versions. The division of the categories focuses on three types of information that the participant can get from the walker: global information, local information and orientation. Global information refers to the general structure of the body and the spatial relationships between its parts. Local information refers to kinematics, speed of the points and mirror-symmetric motion. Please see `Chang et al., 2018 `__ for more details about the stimuli. The data was acquired in 4 runs. Each run comprises 12 blocks with 8 trials per block. The stimulus duration was 500ms and the inter-stimulus interval 1500ms (total 16s per block). Each of the blocks was followed by a fixation block, that also lasted 16s. Each run contained 4 of the six conditions, repeated 3 times each. This section refers to run type 2, which contained both local naturals and both local modified. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for BiologicalMotion2 + :name: condBiologicalMotion2 + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - modified_inverted + - Local modified walker, inverted along the horizontal axis + * - modified_upright + - Neither structural or local information is carried out by this type of walker, it uses both types of modifications used for the previous two categories. "Local modified" in the original study + * - natural_inverted + - Local natural walker, inverted along the horizontal axis + * - natural_upright + - Local information is preserved, but the points are randomly shuffled along the X-axis, rendering global cues uninformative. "Local-natural" in the original experiment + +.. dropdown:: Contrasts for BiologicalMotion2 + :name: contBiologicalMotion2 + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - inverted-upright + - effect of orientation on motion perception + * - modified-natural + - Negative effect of local information on motion perception + * - modified_inverted + - no motion information reversed vs. fixation + * - modified_upright + - scrambled motion information vs. fixation + * - modified_upright-modified_inverted + - effect of orientation on motion perception + * - natural-modified + - effect of local information on motion perception + * - natural_inverted + - local reversed biological motion vs. fixation + * - natural_upright + - local biological motion vs. fixation + * - natural_upright-modified_upright + - effect of local information on motion perception + * - natural_upright-natural_inverted + - effect of orientation on motion perception + +MathLanguage +------------ + +.. container:: tags + + :bdg-secondary:`sentence_comprehension` :bdg-primary:`visual_sentence_comprehension` :bdg-success:`auditory_sentence_recognition` :bdg-secondary:`narrative_comprehension` :bdg-success:`auditory_word_recognition` + +.. admonition:: Implementation + :class: seealso + + - Software: Expyriment 0.9.0 (Python 3.6) + - Response device: In-house custom-made sticks featuring one-top button, each one to be used in each hand + + - Audio device: OptoACTIVE (Optoacoustics) + + - :octicon:`mark-github;1em;` `Repository `__ + + - :octicon:`video;1em;` `See demo `__ + +The **Mathematics and Language** protocol was taken from (`Amalric et al., 2016 `__). This task aims to comprehensively capture the activation related with several types of mathematical and other types of facts, presented as sentences. During the task, the participants are presented a series of sentences, each one in either of two modalities: auditory or visual. Some of the categories include theory of mind statements, arithmetic facts and geometry facts. After each sentence, the participant has to indicate whether they believe the presented fact to be true or false, by respectively pressing the button in the left or right hand. A second version of each run (runs *B*) was generated reverting the modality for each trial, so those being visual in the original runs (runs *A*), would be auditory in their corresponding *B* version, and vice-versa. Each participant performed four A-type runs, followed three B-type runs due to time constraints. Each run had an equal number of trials of each category, and the order of the trials was the same for all subjects. + +**Note:** We used the OptoACTIVE (Optoacoustics) audio device for all subjects except for *subject-05* and *subject-08*, who completed the session using MRConfon MKII. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for MathLanguage + :name: condMathLanguage + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - arithmetic_fact_auditory + - Listen to arithmetic fact + * - arithmetic_fact_visual + - Read arithmetic fact + * - arithmetic_principle_auditory + - Listen to arithmetic principle + * - arithmetic_principle_visual + - Read arithmetic principle + * - colorlessg_auditory + - Jabberwocky sentence presented as auditory stimulus + * - colorlessg_visual + - Jabberwocky sentence presented as visual stimulus + * - context_auditory + - Beep sound indicating that the following stimuli will be audio + * - context_visual + - Red cross indicating that the following stimuli will be visual + * - general_auditory + - Listen to sentence + * - general_visual + - Read sentence + * - geometry_fact_auditory + - Listen to geometric fact + * - geometry_fact_visual + - Read geometric fact + * - theory_of_mind_auditory + - Listen to false-belief tale + * - theory_of_mind_visual + - Read false-belief tale + * - wordlist_auditory + - Listen to word list + * - wordlist_visual + - Read word list + +.. dropdown:: Contrasts for MathLanguage + :name: contMathLanguage + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - arithmetic_fact-othermath + - arithmetic fact vs other maths + * - arithmetic_fact_auditory + - listen to arithmetic fact + * - arithmetic_fact_visual + - read arithmetic fact + * - arithmetic_principle-othermath + - arithmetic principle vs other maths + * - arithmetic_principle_auditory + - listen to arithmetic principle + * - arithmetic_principle_visual + - read to arithmetic principle + * - auditory-visual + - list to vs read instruction + * - colorlessg-wordlist + - jabberwocky vs word list + * - colorlessg_auditory + - auditory jabberwocky sentence parsing + * - colorlessg_visual + - visual jabberwocky sentence parsing + * - context-general + - cue vs language statement + * - context-theory_of_mind + - cue vs false belief + * - context_auditory + - audio cue + * - context_visual + - visual cue + * - general-colorlessg + - listen to sentence vs jabberwocky + * - general_auditory + - listen to sentence + * - general_visual + - read sentence + * - geometry-othermath + - geometry vs other maths + * - geometry_fact_auditory + - listen to geometric fact + * - geometry_fact_visual + - read geometric fact + * - math-nonmath + - math vs others + * - nonmath-math + - others vs math + * - theory_of_mind-context + - false belief vs cue + * - theory_of_mind-general + - false belief vs general statement + * - theory_of_mind_and_context-general + - false belief and cue vs general statement + * - theory_of_mind_auditory + - auditory false-belief tale + * - theory_of_mind_visual + - read false-belief tale + * - visual-auditory + - read vs to listen to instruction + * - wordlist_auditory + - listen to word list + * - wordlist_visual + - read word list + +SpatialNavigation +----------------- + +.. container:: tags + + :bdg-primary:`visual_search` :bdg-info:`spatial_working_memory` :bdg-light:`navigation` :bdg-info:`spatial_memory` :bdg-light:`spatial_localization` + +.. admonition:: Implemented using proprietary software + :class: seealso + + - Software: Vizard 6 + - Response device: Five-button ergonomic pad (current designs, package 932 with pyka hhsc-1x5-n4) + + - :octicon:`mark-github;1em;` `Repository `__ + +This protocol, an adaptation from the one used in (`Diersch et al., 2021 `__), was originally designed to capture the effects of spatial encoding and orientation learning in different age groups. The task demands subjects to navigate and orientate themselves in a complex virtual environment that resembled a typical German historic city center, consisting of town houses, shops and restaurant. There are three parts of this task: introduction (outside of the scanner), encoding (in scanner) and retrieval (in scanner). Before entering the scanner, the participants went through an introduction phase, during which they had the freedom to navigate the virtual environment with the objective of collecting eight red balls scattered throughout various streets of the virtual city. During this part, the participants could familiarize themselves with the different buildings and learn the location of the two target buildings: Town Hall and Church. After they collect all the red balls, a short training of the main task was performed to ensure the correct understanding of the instructions. + +Then, participants went to the scanner. The task began with the encoding phase. During this period, the participant had to passively watch the camera move from one target building to the other, in such a way that every street of the virtual environment is passed through in every direction possible. Participants were instructed to pay close attention to the spatial layout of the virtual environment and the location of the target landmarks. Passive transportation instead of self-controlled traveling was chosen to ensure that every participant experienced the virtual environment for the same amount of time. After the encoding phase, the retrieval phase started, which consisted of 8 experimental trials and 4 control trials per run. In each trial, the participant was positioned near an intersection within the virtual environment, which was enveloped in a dense fog, limiting visibility. Subsequently, the camera automatically approached the intersection and centered itself. The participant’s task was to indicate the direction of the target building, which was displayed as a miniature picture at the bottom of the screen. Control and experimental trials were identical, but during control trials the participant had to point to one of the buildings of the intersection that had been colored in blue instead of the target building. All of the runs, except the first one, began with the encoding phase, followed by the retrieval phase. In the initial run, a control trial of the retrieval phase preceded the standard design of the encoding phase followed by the retrieval phase. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for SpatialNavigation + :name: condSpatialNavigation + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - control + - Camera approaches intersection in an control trial + * - encoding_phase + - Encode location of key building + * - experimental + - Camera approaches intersection in an experimental trial + * - intersection + - Camera approaches intersection during encoding phase + * - navigation + - Camera navigates through the streets during encoding phase + * - pointing_control + - Participant rotates camera to point to blue building in control trial + * - pointing_experimental + - Participant rotates camera to point to key building in experimental trial + +.. dropdown:: Contrasts for SpatialNavigation + :name: contSpatialNavigation + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - control + - spatial navigation + * - experimental + - spatial navigation + * - experimental-control + - spatial navigation + * - experimental-intersection + - spatial navigation + * - intersection + - spatial localization + * - navigation + - spatial navigation + * - pointing_control + - pointing a landmark + * - pointing_experimental + - pointing a landmark + * - retrieval + - retrieving a landmark + +GoodBadUgly +----------- + +.. container:: tags + + + +.. admonition:: Implementation + :class: seealso + + - Software: Expyriment 0.9.0 (Python 2.7) +The GoodBadUgly task was adapted from the study by (`Mantini et al., 2012 `__), which was dedicated to investigate the correspondence between monkey and human brains using naturalistic stimuli. The task relies on watching - viewing and listening - the whole movie "The Good, the Bad and the Ugly" by Sergio Leone. For IBC, the French-dubbed version "Le Bon, la Brute et le Truand" was presented. The original 177-minute movie was cut into approximately 10-minute segments to match the segment length of the original study, which presented only three 10-minute segments from the middle of the movie. This resulted in a total of 18 segments (the last segment being only 4.5 minutes long). This task was performed during three acquisition sessions with seven segments each, one segment per run. The first three segments were repeated during the final acquisition after the entire movie had been completed. **Note:** there was some lag between the onset of each run and the initiation of the stimuli (movie), which might vary between runs and subjects. This lag should probably be considered when analyzing the data. Find more details in the section :ref:`Lags in GoodBadUgly movie`. + + +EmoMem +------ + +.. container:: tags + + :bdg-primary:`visual_cue` :bdg-primary:`visual_perception` :bdg-danger:`negative_emotion` :bdg-danger:`positive_emotion` :bdg-light:`imagination` + +.. admonition:: Implementation + :class: seealso + + - Software: Octave 4.4 + Psychtoolbox 3.0 + - Response device: Five-button ergonomic pad (Current Designs, Package 932 with Pyka HHSC-1x5-N4) + +This task is a part of the CamCAN (`Cambridge Centre for Ageing and Neuroscience `__) battery, designed to understand how individuals can best retain cognitive abilities into old age. The adjustments concerned the translation of all stimuli and instructions into french, replacing Matlab functions with Octave functions as needed, and eliminating the use of a custom Matlab toolbox `mrisync `__ that was used to interface with the MRI Scanner (3T Siemens Prisma) over a National Instruments card. All modifications were done taking care to not alter the psychological state that the original tasks were designed to capture. The **Emotional Memory** task was designed to provide an assessment of implicit and explicit memory, and how it is affected by emotional valence. At the IBC we only conducted the encoding part of the task the Study phase as mentioned in (`Shafto et al., 2014 `__) but not the Test phase that happened outside the scanner in the original study. In each trial, participants were presented with a background picture for 2 seconds, followed by a foreground picture of an object superimposed on it. Participants were instructed to imagine a "story" linking the background and foreground picture, and after an 8-second presentation, the next trial began. The manipulation of emotional valence exclusively affected the background image, which could be negative, neutral, or positive. Participants were asked to indicate the moment they thought of a story or a connection between the object and the background image by pressing a button. In all, 120 trials were presented over 2 runs. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for EmoMem + :name: condEmoMem + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - negative_image + - Negative background image + * - neutral_image + - Neutral background image + * - object + - Neutral object + * - positive_image + - Positive background image + +.. dropdown:: Contrasts for EmoMem + :name: contEmoMem + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - negative-neutral_image + - negative vs neutral image + * - negative_image + - viewing a negative image + * - neutral_image + - viewing a neutral image + * - object + - foreground object and imagination task + * - positive-neutral_image + - positive vs neutral image + * - positive_image + - viewing a positive image + +EmoReco +------- + +.. container:: tags + + :bdg-danger:`emotional_expression` :bdg-danger:`negative_emotion` :bdg-light:`gender_perception` :bdg-primary:`face_perception` + +.. admonition:: Implemented using proprietary software + :class: seealso + + - Software: E-Prime 2.0 + - Response device: Five-button ergonomic pad (Current Designs, Package 932 with Pyka HHSC-1x5-N4) + + - :octicon:`video;1em;` `See demo `__ + +This task is a part of the CamCAN (`Cambridge Centre for Ageing and Neuroscience `__) battery, designed to understand how individuals can best retain cognitive abilities into old age. The adjustments concerned the translation of all stimuli and instructions into french, replacing Matlab functions with Octave functions as needed, and eliminating the use of a custom Matlab toolbox `mrisync `__ that was used to interface with the MRI Scanner (3T Siemens Prisma) over a National Instruments card. All modifications were done taking care to not alter the psychological state that the original tasks were designed to capture. The **Emotion Recognition** task compares brain activity when observing angry versus neutral expressions, and assesses how individuals differ in how they regulate responses to negative emotional expressions (`Shafto et al., 2014 `__). The expressions were presented on female and male faces (15 each), and each face had an angry and a neutral expression version. Emotions were presented in blocks of angry and neutral, with equal numbers of female and male faces in each block. In each trial, participants were asked to report the gender of the face by pressing the corresponding button. There were 12 blocks of each emotion and each block consisted of 5 trials. In all, 60 trials were presented in each of the 2 runs. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for EmoReco + :name: condEmoReco + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - angry_female + - Angry emotion on female face + * - angry_male + - Angry emotion on male face + * - neutral_female + - Neutral emotion on female face + * - neutral_male + - Neutral emotion on male face + +.. dropdown:: Contrasts for EmoReco + :name: contEmoReco + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - angry + - angry face perception + * - angry-neutral + - angry vs neutral face perception + * - angry_female + - angry female face perception + * - angry_male + - angry male face perception + * - female-male + - female vs male face perception + * - male-female + - male vs female face + * - neutral + - neutral face perception + * - neutral-angry + - neutral vs angry face perception + * - neutral_female + - neutral female face perception + * - neutral_male + - neutral male face perception + +StopNogo +-------- + +.. container:: tags + + :bdg-light:`proactive_control` :bdg-primary:`shape_recognition` :bdg-primary:`shape_perception` + +.. admonition:: Implemented using proprietary software + :class: seealso + + - Software: Presentation + - Response device: Five-button ergonomic pad (Current Designs, Package 932 with Pyka HHSC-1x5-N4) + + - :octicon:`video;1em;` `See demo `__ + +This task is a part of the CamCAN (`Cambridge Centre for Ageing and Neuroscience `__) battery, designed to understand how individuals can best retain cognitive abilities into old age. The adjustments concerned the translation of all stimuli and instructions into french, replacing Matlab functions with Octave functions as needed, and eliminating the use of a custom Matlab toolbox `mrisync `__ that was used to interface with the MRI Scanner (3T Siemens Prisma) over a National Instruments card. All modifications were done taking care to not alter the psychological state that the original tasks were designed to capture. The StopNogo task assesses systems involved in action restraint and action cancellation by randomly interleaving *Go*, *Stop* and *No-Go* trials (`Shafto et al., 2014 `__). On *Go* trials, participants viewed a black arrow pointing left or right for 1000 ms, and indicated the direction of the arrow by pressing left/right buttons with their right hand. On *Stop* trials, the black arrow changed color (from black to red), after a short variable stop-signal delay. Participants were instructed that to not respond to the red arrow, so stop signal trials required canceling the initial response to the black arrow. The Stop-Signal delay varied trial-to-trial in steps of 50 ms, and a staircase procedure was used to maintain a performance level of 66% successful inhibition. Finally, in *No-Go* trials, the arrow was colored in red since the start of the trial (stop-signal delay of 0) and participants were required to make no response. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for StopNogo + :name: condStopNogo + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - go + - Arrow stays black; press button corresponding to arrow direction + * - nogo + - Arrow starts out red so do not press button + * - successful_stop + - Arrow starts out black but turns red; motor response inhibited + * - unsuccessful_stop + - Arrow starts out black but turns red; motor response not inhibited + +.. dropdown:: Contrasts for StopNogo + :name: contStopNogo + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - go + - shape recognition + * - nogo + - no response + * - nogo-go + - response inhibition + * - successful+nogo-unsuccessful + - failed to inhibit response + * - successful_stop + - shape recognition, stopped response + * - unsuccessful-successful_stop + - effect of failed inhibition + * - unsuccessful_stop + - shape recognition, failed stopped response + +Catell +------ + +.. container:: tags + + :bdg-primary:`visual_form_discrimination` :bdg-light:`oddball_detection` + +.. admonition:: Implementation + :class: seealso + + - Software: Octave 4.4 + Psychtoolbox 3.0 + - Response device: Five-button ergonomic pad (Current Designs, Package 932 with Pyka HHSC-1x5-N4) + + - :octicon:`video;1em;` `See demo `__ + +This task is a part of the CamCAN (`Cambridge Centre for Ageing and Neuroscience `__) battery, designed to understand how individuals can best retain cognitive abilities into old age. The adjustments concerned the translation of all stimuli and instructions into french, replacing Matlab functions with Octave functions as needed, and eliminating the use of a custom Matlab toolbox `mrisync `__ that was used to interface with the MRI Scanner (3T Siemens Prisma) over a National Instruments card. All modifications were done taking care to not alter the psychological state that the original tasks were designed to capture. The Catell task was used to provide a measure of neural activity underpinning fluid intelligence (`Shafto et al., 2014 `__). On each trial, participants were presented with 4 images and had to identify the "odd one out". While some trials presented easily identifiable differences between the oddball and the other images, others were more challenging, requiring participants to detect abstract patterns to identify the oddball. Participants completed alternating blocks of easy and difficult trials, each lasting 30 seconds. In total, they performed four blocks of easy problems and four blocks of difficult problems. In each trial, a stimulus appeared on the screen and remained until the participant responded, with the block automatically ending after 30 seconds and the next block beginning immediately. Participants were encouraged to take as much time as needed and were advised to respond only when confident in their answers. This design led to variable number of trials per block among individuals, while maintaining a consistent duration for each type of problem (easy and hard). + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for Catell + :name: condCatell + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - easy + - Easy oddball trial where the non-oddball images are similar and very different from the oddball + * - hard + - Difficult oddball trial where all images are similar + +.. dropdown:: Contrasts for Catell + :name: contCatell + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - easy + - easy oddball task + * - hard + - hard oddball task + * - hard-easy + - easy vs hard oddball task + +FingerTapping +------------- + +.. container:: tags + + :bdg-warning:`motor_control` :bdg-warning:`motor_planning` + +.. admonition:: Implementation + :class: seealso + + - Software: Octave 4.4 + Psychtoolbox 3.0 + - Response device: Five-button ergonomic pad (Current Designs, Package 932 with Pyka HHSC-1x5-N4) + + - :octicon:`video;1em;` `See demo `__ + +This task is a part of the CamCAN (`Cambridge Centre for Ageing and Neuroscience `__) battery, designed to understand how individuals can best retain cognitive abilities into old age. The adjustments concerned the translation of all stimuli and instructions into french, replacing Matlab functions with Octave functions as needed, and eliminating the use of a custom Matlab toolbox `mrisync `__ that was used to interface with the MRI Scanner (3T Siemens Prisma) over a National Instruments card. All modifications were done taking care to not alter the psychological state that the original tasks were designed to capture. The FingerTapping task studied executive control and action decisions in aging and neurodegenerative diseases (`Shafto et al., 2014 `__). Participants were presented with an image of a right hand and were instructed to press a button with one of their four right hand fingers in response to a cue. The cue was either a *specified* cue in which a single opaque circle indicates which finger to press, or a *chosen* cue in which 3 circles appeared opaque indicating participants must choose on of the 3 opaque fingers to press. Cues were presented for 1 second with a stimulus onset asynchrony of 2.5 seconds, and were pseudorandomly ordered so that participants did not see four or more responses of the same condition (action selection, specified or null) in a row. The task included 40 specified trials (10 for each finger) and 40 chosen trials, interspersed with 40 blank trials in which no cue is presented. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for FingerTapping + :name: condFingerTapping + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - chosen + - Participant chooses 1 out of 3 highlighted fingers to tap + * - null + - No finger tap + * - specified + - Finger to tap is highlighted + +.. dropdown:: Contrasts for FingerTapping + :name: contFingerTapping + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - chosen + - uncued finger tapping + * - chosen-null + - uncued vs inhibited finger tapping + * - chosen-specified + - uncued vs cued finfer tapping + * - null + - inhibited finger tapping + * - specified + - cued finger tapping + * - specified-null + - cued vs inhibited finger tapping + +VSTMC +----- + +.. container:: tags + + :bdg-info:`spatial_working_memory` :bdg-primary:`visual_attention` + +.. admonition:: Implementation + :class: seealso + + - Software: Octave 4.4 + Psychtoolbox 3.0 + - Response device: Five-button ergonomic pad (Current Designs, Package 932 with Pyka HHSC-1x5-N4) + + - :octicon:`video;1em;` `See demo `__ + +This task is a part of the CamCAN (`Cambridge Centre for Ageing and Neuroscience `__) battery, designed to understand how individuals can best retain cognitive abilities into old age. The adjustments concerned the translation of all stimuli and instructions into french, replacing Matlab functions with Octave functions as needed, and eliminating the use of a custom Matlab toolbox `mrisync `__ that was used to interface with the MRI Scanner (3T Siemens Prisma) over a National Instruments card. All modifications were done taking care to not alter the psychological state that the original tasks were designed to capture. The **Visual Short-Term Memory** task was designed to assess the neural process underlying visual short-term memory. In each trial, participants saw three arrays of colored dots: one red, one yellow, and one blue. The dot displays were presented in rapid succession beginning with a 250 ms fixation period followed by a 500 ms presentation of the dot display. To manipulate set size, one, two, or three of the dot displays moved in a single direction, which had to be remembered. The remaining displays rotated around a central axis and served as distractors, which had to be ignored. After the presentation of the third display, an 8-second delay followed, during which participants had to remember the direction(s) of motion for the non-rotating dots. Subsequently, the probe display appeared, with a colored circle indicating which dot display to recall (red, yellow, or blue). Within the circle, there was a pointer that had to be adjusted to indicate the direction in which the target dot display had been moving. Participants were given 5 seconds to adjust the pointer to match the direction of the to-be-remembered dot display. On 90% of trials the probed movements were in one of three directions: 7, 127, or 247 degrees. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for VSTMC + :name: condVSTMC + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - resp_load1 + - Response period of stim_load1 trials + * - resp_load2 + - Response period of stim_load2 trials + * - resp_load3 + - Response period of stim_load3 trials + * - stim_load1 + - Dots in only one colour move coherently in a given direction + * - stim_load2 + - Dots in two colours move coherently in two different directions + * - stim_load3 + - Dots in all three colours move coherently in 3 different directions + +.. dropdown:: Contrasts for VSTMC + :name: contVSTMC + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - resp + - response to motion + * - resp_load1 + - response to motion direction of one set of points + * - resp_load2 + - response to motion direction of two sets of points + * - resp_load3 + - response to motion direction of three sets of points + * - resp_load3-load1 + - difference in response to one vs three sets of points + * - stim + - attending to sets of points + * - stim_load1 + - attending to one set of points + * - stim_load2 + - attending to two sets of points + * - stim_load3 + - attending to three set of points + * - stim_load3-load1 + - difference in attending to motion of one vs three sets of points + +RewProc +------- + +.. container:: tags + + :bdg-dark:`reward_valuation` :bdg-primary:`upper-right_vision` :bdg-dark:`reward_processing` :bdg-dark:`loss_aversion` :bdg-light:`color_perception` + +.. admonition:: Implementation + :class: seealso + + - Software: Psychopy 2021.1.3 (Python 3.8.5) + - Response device: Five-button ergonomic pad (Current Designs, Package 932 with Pyka HHSC-1x5-N4) + + - :octicon:`mark-github;1em;` `Repository `__ + + - :octicon:`video;1em;` `See demo `__ + +The **Reward Processing** protocol was adapted from `O'Doherty et al., 2001 `__ and `O'Doherty et al., 2003 `__, which aimed at discerning the role of the orbitofrontal cortex (OFC) using a similar emotion-related visual reversal-learning task in which choice of the correct stimulus led to a probabistically determined "monetary" reward and choice of the incorrect stimulus led to a monetary loss. + +In each trial of a run of this protocol, two unfamiliar and easily discriminable fractal patterns were displayed on a gray background, positioned to the left and right of a central fixation cross. At the beginning of the task, one of these two patterns was arbitrarily designated as "correct" and the other as "incorrect". The task for the participants was to select one of these two patterns. Selecting the correct pattern led to a monetary gain with a 70% probability, and a monetary loss with a 30% probability. Selecting the incorrect pattern led to a monetary gain with a 30% probability and a monetary loss with a 70% probability (reversed gain-loss probability contingencies). After selecting either pattern, a black box appeared around the chosen pattern, followed by feedback indicating the amount of symbolic money (either 20 or 10 units) that was gained or lost in the particular trial which could be either 20 or 10 units. The probability of receiving either 10 or 20 units of money was equal. Furthermore, if the participant consecutively selected the correct pattern for a specified criterion, i.e. 5 consecutive times, a reversal of the gain-loss probability contingencies occurred after a Poisson process. This meant that there was a 25% probability that a reversal took place in the gain-loss probabilities on any post-criterion trial. + +The data was acquired in 2 runs during one scanning session. Each run comprised 85 trials. he timing of trial events in the IBC implementation of the task differed from those in the two aforementioned studies. This adjustment was made after a discussion with the authors, who believed that the timing in the final IBC-implementation version was more appropriate for achieving adequately separated events to minimize temporal correlations while maintaining a reasonable total trial length. Specifically, the pre-fixation cross was displayed for a duration ranging from 500 to 1500 ms. The stimuli remained on the screen for less than 3000 ms for participant selection, and the outcome feedback was presented with a 1750 ms delay, lasting for 1750 ms. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for RewProc + :name: condRewProc + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - green + - Subject selected the green pattern + * - left + - Selected pattern was in the left side of the screen + * - minus_10 + - Lost 10 units of reward as a result of the selection + * - minus_20 + - Lost 20 units of reward as a result of the selection + * - plus_10 + - Gained 10 units of reward as a result of the selection + * - plus_20 + - Gained 20 units of reward as a result of the selection + * - purple + - Subject selected the purple pattern + * - right + - Selected pattern was in the right side of the screen + * - stay + - Selected pattern was the same as the one selected in previous trial + * - switch + - Selected pattern was different from the one selected in previous trial + +.. dropdown:: Contrasts for RewProc + :name: contRewProc + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - gain + - gained 20 or 10 units of reward + * - gain-loss + - gained vs lost 20 or 10 units of reward + * - green-purple + - green vs purple pattern selected + * - left-right + - selected pattern on the left vs right side + * - loss + - lost 20 or 10 units of reward + * - loss-gain + - lost vs gained 20 or 10 units of reward + * - minus_10 + - lost 10 units of reward + * - minus_20 + - lost 20 units of reward + * - plus_10 + - gained 10 units of reward + * - plus_20 + - gained 20 units of reward + * - purple-green + - purple vs green pattern selected + * - right-left + - selected pattern on the right vs left side + * - stay + - selected the same pattern than previous trial + * - stay-switch + - selected the same vs different pattern + * - stim + - appearance of the cue images + * - switch + - selected a different pattern than previous trial + * - switch-stay + - selected a different vs same pattern + +NARPS +----- + +.. container:: tags + + :bdg-dark:`reward_anticipation` :bdg-dark:`reward_valuation` :bdg-dark:`reward_processing` :bdg-dark:`loss_aversion` :bdg-dark:`risk_processing` + +.. admonition:: Implementation + :class: seealso + + - Software: Psychtoolbox-3 (Octave 5.2.0) + - Response device: Five-button ergonomic pad (Current Designs, Package 932 with Pyka HHSC-1x5-N4) + + - :octicon:`mark-github;1em;` `Repository `__ + + - :octicon:`video;1em;` `See demo `__ + +This protocol is more commonly know as the mixed gambles task and was adapted from the **Neuroimaging Analysis Replication and Prediction Study** (NARPS) (`Botvinik-Nezer et al., 2019 `__) study, that aimed to estimate the variability of neuroscientific results across analysis teams. The mixed gambles task though, is originally from (`Tom et al., 2007 `__) that studied the neural basis of loss aversion, which is the phenomenon that suggests that people tend to be more sensitive to losses as compared to equal-sized gains. The study therefore, investigated whether potential losses elicit negative emotions, which then drive loss aversion or rather the same neural systems, encoding subjective value, asymmetrically respond to losses compared to gains. + +In each trial, participants were presented with a mixed gamble where they had a 50% chance of either gaining one amount of symbolic money or losing another amount. The possible gains and losses both ranged between 5-20 units (equal range condition), in increments of 1 unit and all 256 possible combinations of gains and losses were presented to each subject in the same sequence. The stimulus consisted of a circle presented on a gray screen and divided into two halves: on one side the gain amount was presented in green with a plus (+) sign before the number, and on the other side the loss amount was presented in red with a minus (-) sign before the number. Subjects were then asked to decide whether or not they would like to accept the gambles presented to them, with four possible responses for each gamble: strongly accept, weakly accept, weakly reject or strongly reject. :raw-html:`
`The data was acquired in four runs during one scanning session. Each run comprised 64 trials. The gamble was presented on the screen until the participant responded or four seconds have passed, followed by a grey screen until the onset of the next trial. In the aforementioned NARPS study, the same amount of data was also acquired for an equal indifference condition where the possible gains ranged between 10-40 units while losses ranged between 5-20 units. This was not done for the IBC implementation, as no significant differences were observed between the two task designs in the NARPS study. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for NARPS + :name: condNARPS + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - gain + - Significant parametric increase in BOLD signal to increasing potential gains + * - loss + - Significant parametric decrease in BOLD signal to increasing potential losses + * - stim + - Mixed gamble stimulus with given units of potential gain and loss (amounts could vary between 5-20) + * - strongly_accept + - Subject accepted the gamble with high confidence + * - strongly_reject + - Subject rejected the gamble with high confidence + * - weakly_accept + - Subject accepted the gamble with low confidence + * - weakly_reject + - Subject rejected the gamble with low confidence + +.. dropdown:: Contrasts for NARPS + :name: contNARPS + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - accept-reject + - gambles accepted vs gambles rejected + * - gain + - potential gains during stim events + * - loss + - potential losses during stim events + * - reject-accept + - gambles rejected vs gambles accepted + * - strongly_accept + - accept the gamble with high confidence + * - strongly_reject + - reject the gamble with high confidence + * - weakly_accept + - accept the gamble with low confidence + * - weakly_reject + - reject the gamble with low confidence + +FaceBody +-------- + +.. container:: tags + + :bdg-primary:`visual_number_recognition` :bdg-primary:`visual_letter_recognition` :bdg-primary:`visual_place_recognition` :bdg-light:`updating` :bdg-primary:`face_maintenance` + +.. admonition:: Implementation + :class: seealso + + - Software: Psychtoolbox-3 (Octave 5.2.0) + - Response device: Five-button ergonomic pad (Current Designs, Package 932 with Pyka HHSC-1x5-N4) + + - :octicon:`mark-github;1em;` `Repository `__ + + - :octicon:`video;1em;` `See demo `__ + +This protocol was adapted from (`Stigliani A 2015 `__), where it was used to define category-selective cortical regions that respond preferentially to different categories. A detailed description and code for the original protocol is available `here `__. In the IBC implementation, participants were presented with images of the following categories: faces, places, bodies, objects and characters. Each of the five stimulus categories was associated with two related subcategories with 144 images per subcategory, see `conditions table `__. The protocol used a mini-block design in which 12 stimuli of the same subcategory were presented in each block. The sequence of the blocks was randomized over the ten subcategories and a blank baseline condition, and each subject was presented with the same sequence. To ensure that the subjects remained alert throughout the experiment, they were asked to press a button when an image is repeated as a mirrored image (flipped 1-back task). Data were acquired in four runs during one scanning session. Every run comprised 76 blocks, with each block containing 12 images displayed for a total duration of 6 seconds (500 ms per image). + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for FaceBody + :name: condFaceBody + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - bodies_body + - Images of body parts (category) with full bodies without faces (subcategory) + * - bodies_limb + - Images of body parts (category) with just limbs (subcategory) + * - characters_number + - Images of printed characters (category) with just numbers (subcategory) + * - characters_word + - Images of printed characters (category) with just words (subcategory) + * - faces_adult + - Images of faces (category) of adults (subcategory) + * - faces_child + - Images of faces (category) of children (subcategory) + * - objects_car + - Images of objects (category) with just cars (subcategory) + * - objects_instrument + - Images of objects (category) with just musical instruments (subcategory) + * - places_corridor + - Images of places (category) with just corridors (subcategory) + * - places_house + - Images of places (category) with just houses (subcategory) + +.. dropdown:: Contrasts for FaceBody + :name: contFaceBody + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - bodies-others + - body image 1-back task vs. rest of categories + * - bodies_body + - body image 1-back task vs. fixation + * - bodies_limb + - body image 1-back task vs. fixation + * - characters-others + - character images 1-back vs. rest of categories + * - characters_number + - character images 1-back vs fixation + * - characters_word + - words images 1-back vs fixation + * - faces-others + - face image 1-back task vs. rest of categories + * - faces_adult + - face image 1-back task vs. fixation + * - faces_child + - face image 1-back task vs. fixation + * - objects-others + - object image 0-back task vs. rest of categories + * - objects_car + - object image 0-back task vs. fixation + * - objects_instrument + - object image 1-back task vs. fixation + * - places-others + - place image 1-back task vs. rest of categories + * - places_corridor + - place image 1-back task vs. fixation + * - places_house + - place image 1-back task vs. fixation + +Scene +----- + +.. container:: tags + + :bdg-light:`spatial_attention` :bdg-primary:`visual_search` :bdg-primary:`visual_scene_perception` :bdg-primary:`upper-right_vision` :bdg-primary:`upper-left_vision` + +.. admonition:: Implemented using proprietary software + :class: seealso + + - Software: E-Prime 2.0 Professional (Psychological Software Tools, Inc.) + - Response device: Five-button ergonomic pad (Current Designs, Package 932 with Pyka HHSC-1x5-N4) + + - :octicon:`video;1em;` `See demo `__ + +This protocol was adapted from (`Douglas et al., 2017 `__) and was designed to identify how the brain combines spatial elements to form a coherent percept. To this end, participants judged whether Escher-like scenes were possible or impossible. 56 scenes were designed so that they appeared spatially incoherent when viewed from a particular angle, and were termed *impossible scenes*. Possible counterparts were created to each impossible scene, and these were termed *possible scenes*. For comparison, baseline non-scene images were created by scrambling the scenes and matched for low-level visual properties. A partially transparent circle was overlaid at a pseudo-random location on each of the scrambled scenes, such that half of these dots were found on the left and half on the right of the baseline scrambled images. On these scrambled image dot trials, participants indicated the left/right location of the dot. There were easy and hard versions that depended on the transparency of the overlaid circle. The data were acquired in four runs during one scanning session. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for Scene + :name: condScene + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - dot_easy_left + - More opaque dot on left + * - dot_easy_right + - More opaque dot on right + * - dot_hard_left + - More transparent dot on left + * - dot_hard_right + - More transparent dot on right + * - scene_impossible_correct + - Impossible scene trial that the subject identified correctly + * - scene_impossible_incorrect + - Impossible scene trial that the subject identified incorrectly + * - scene_possible_correct + - Possible scene trial that the subject identified correctly + * - scene_possible_incorrect + - Possible scene trial that the subject identified incorrectly + +.. dropdown:: Contrasts for Scene + :name: contScene + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - dot_easy_left + - looking for a salience dot in left visual field + * - dot_easy_right + - looking for a salience dot in right visual field + * - dot_hard-easy + - looking for low-salience vs high-salience dot + * - dot_hard_left + - looking for a low-salience dot in left visual field + * - dot_hard_right + - looking for a low-salience dot in right visual field + * - dot_left-right + - looking for a dot in left vs right visual field + * - scene_correct-dot_correct + - assessing scenes vs detecting a dot + * - scene_impossible_correct + - successful identification of an impossible scene + * - scene_impossible_incorrect + - failed identification an impossible scene + * - scene_possible_correct + - successful identification of an possible scene + * - scene_possible_correct-scene_impossible_correct + - successful identification of an possible vs impossible scene + +BreathHolding +------------- + +.. container:: tags + + :bdg-light:`breath_holding` :bdg-light:`self_monitoring` + +.. admonition:: Implemented using proprietary software + :class: seealso + + - Software: E-Prime 2.0 Professional (Psychological Software Tools, Inc.) + - Response device: Five-button ergonomic pad (Current Designs, Package 932 with Pyka HHSC-1x5-N4) + +This task was a part of the Function Biomedical Informatics Research Network (FBIRN) (`Keator et al., 2016 `__) battery of protocols designed to, among other goals, assess the major sources of variation in fMRI studies conducted across scanners, including instrumentation, acquisition protocols, challenge tasks, and analysis methods. All modifications were done taking care to not alter the psychological state that the original tasks were designed to capture. The BreathHolding task was designed to measure vascular response. In a block design, the participant alternated between breathing normally for 20 s and holding their breath for 16 s. They were given a warning 2 s before the hold breath signal was given, so they could prepare to hold their breath. This cycle was repeated 10 times. No response was required in this task. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for BreathHolding + :name: condBreathHolding + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - breathe + - Breathe normally + * - get_ready + - Prepare to hold breath + * - hold_breath + - Hold breath + +.. dropdown:: Contrasts for BreathHolding + :name: contBreathHolding + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - breathe + - breathe normally + * - breathe-hold + - breathe normally vs hold breath + * - hold-breathe + - hold breath vs breathe normally + * - hold_breath + - hold breath + +Checkerboard +------------ + +.. container:: tags + + :bdg-light:`preattentive_processing` :bdg-primary:`visual_perception` :bdg-light:`central_fixation` + +.. admonition:: Implemented using proprietary software + :class: seealso + + - Software: E-Prime 2.0 Professional (Psychological Software Tools, Inc.) + - Response device: Five-button ergonomic pad (Current Designs, Package 932 with Pyka HHSC-1x5-N4) + +This task was a part of the Function Biomedical Informatics Research Network (FBIRN) (`Keator et al., 2016 `__) battery of protocols designed to, among other goals, assess the major sources of variation in fMRI studies conducted across scanners, including instrumentation, acquisition protocols, challenge tasks, and analysis methods. All modifications were done taking care to not alter the psychological state that the original tasks were designed to capture. The Checkerboard task is a block design sensorimotor task with alternating 16s long blocks of rest and visual stimulation with a checkerboard stimulus. In the checkerboard block, a checkerboard filling the visual field was presented for a period of 200 ms at random intervals (avg. ISI=762 ms, range: 500-1000 ms), and the subject pressed a button each time the checkerboard appeared on screen. The run starts and ends with fixation blocks, and 11 blocks of checkerboard stimulation are presented. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for Checkerboard + :name: condCheckerboard + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - checkerboard + - Checkerboard block + * - fixation + - Fixation block + +.. dropdown:: Contrasts for Checkerboard + :name: contCheckerboard + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - checkerboard + - checkerboard + * - checkerboard-fixation + - checkerboard vs baseline + * - fixation + - period in between checkerboards + +FingerTap +--------- + +.. container:: tags + + :bdg-light:`preattentive_processing` :bdg-light:`central_fixation` + +.. admonition:: Implemented using proprietary software + :class: seealso + + - Software: E-Prime 2.0 Professional (Psychological Software Tools, Inc.) + - Response device: Five-button ergonomic pad (Current Designs, Package 932 with Pyka HHSC-1x5-N4) + +This task was a part of the Function Biomedical Informatics Research Network (FBIRN) (`Keator et al., 2016 `__) battery of protocols designed to, among other goals, assess the major sources of variation in fMRI studies conducted across scanners, including instrumentation, acquisition protocols, challenge tasks, and analysis methods. All modifications were done taking care to not alter the psychological state that the original tasks were designed to capture. The FingerTap task is a block design reaction time task in which subjects press one of the four keypad buttons when they see a corresponding visual cue ('1' for button1, '2' for button2 and so on). The stimuli appear at 1 s intervals and subjects get 2 s to make their response. The run starts and ends with task blocks, with 4 task blocks per run and 64 trials per task block. The task blocks are interleaved with rest blocks lasting 15 s. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for FingerTap + :name: condFingerTap + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - fingertap + - Press button corresponding to visual stimulus + * - rest + - Rest block + +.. dropdown:: Contrasts for FingerTap + :name: contFingerTap + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - fingertap + - button press in response to a cue + * - fingertap-rest + - button press vs rest + * - rest + - rest period + +ItemRecognition +--------------- + +.. container:: tags + + :bdg-info:`spatial_working_memory` :bdg-primary:`visual_number_recognition` :bdg-light:`numerosity` :bdg-primary:`visual_attention` + +.. admonition:: Implemented using proprietary software + :class: seealso + + - Software: E-Prime 2.0 Professional (Psychological Software Tools, Inc.) + - Response device: Five-button ergonomic pad (Current Designs, Package 932 with Pyka HHSC-1x5-N4) + + - :octicon:`video;1em;` `See demo `__ + +This task was a part of the Function Biomedical Informatics Research Network (FBIRN) (`Keator et al., 2016 `__) battery of protocols designed to, among other goals, assess the major sources of variation in fMRI studies conducted across scanners, including instrumentation, acquisition protocols, challenge tasks, and analysis methods. All modifications were done taking care to not alter the psychological state that the original tasks were designed to capture. The Item Recognition task is a working memory (WM) task with load 1, 3 and 5. There were four conditions in this task; on three of them, participants were shown series of either one, three or five targets (digits), displayed in red, and were asked to memorize them. They were then presented with probes (also digits) displayed in green, and were required to indicate whether the probe matched one of the targets or not. In the fourth condition, participants were shown a series of arrows and were asked to indicate the direction of the arrows (left or right). This task followed a block-design format with 2 blocks for each of the 3 working memory conditions, along with 2 blocks for the arrow condition. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for ItemRecognition + :name: condItemRecognition + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - arrow_left + - Leftward pointing arrow + * - arrow_right + - Rightward pointing arrow + * - encode1 + - Encode digit of load 1 blocks + * - encode3 + - Encode digit of load 3 blocks + * - encode5 + - Encode digit of load 5 blocks + * - load1_instr + - Instruction signaling start of load 1 blocks + * - load3_instr + - Instruction signaling start of load 3 blocks + * - load5_instr + - Instruction signaling start of load 5 blocks + * - probe1_mem + - Probe digit that was encoded at the start of load 1 blocks + * - probe1_new + - Probe digit that is new for load 1 blocks + * - probe3_mem + - Probe digit that was encoded at the start of load 3 blocks + * - probe3_new + - Probe digit that is new for load 3 blocks + * - probe5_mem + - Probe digit that was encoded at the start of load 5 blocks + * - probe5_new + - Probe digit that is new for load 5 blocks + +.. dropdown:: Contrasts for ItemRecognition + :name: contItemRecognition + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - arrow_left + - leftward pointing arrow + * - arrow_left-arrow_right + - identifying a left vs right pointing arrow + * - arrow_right + - rightward pointing arrow + * - encode + - encoding 1 3 and 5 items + * - encode1 + - memorize 1 digit + * - encode3 + - memorize 3 digits + * - encode5 + - memorize 5 digits + * - encode5-encode1 + - encoding 5 vs 1 item + * - prob-arrow + - probing digits vs trials of pointing arrows + * - probe1_mem + - probe encoded digit from load 1 + * - probe1_new + - probe new digit from load 1 + * - probe3_mem + - probe encoded digit from load 3 + * - probe3_new + - probe new digit from load 3 + * - probe5_mem + - probe encoded digit from load 5 + * - probe5_mem-probe1_mem + - probing an encoded digit in a load of 5 vs 1 + * - probe5_new + - probe new digit from load 5 + * - probe5_new-probe1_new + - probing a new digit in a load 5 vs 1 + +VisualSearch +------------ + +.. container:: tags + + :bdg-primary:`visual_pattern_recognition` :bdg-primary:`visual_search` :bdg-primary:`visual_form_discrimination` :bdg-info:`working_memory_maintenance` :bdg-info:`visual_working_memory` + +.. admonition:: Implementation + :class: seealso + + - Software: Expyriment 0.10.0 (Python 3.8.5) + - Response device: Five-button ergonomic pad (Current Designs, Package 932 with Pyka HHSC-1x5-N4) + + - :octicon:`mark-github;1em;` `Repository `__ + + - :octicon:`video;1em;` `See demo `__ + +The **Visual search, Working memory** protocol was adapted from (`Kuo BC et al., 2016 `__). It aimed to elaborate the neurophysiological mechanism underlying the spatially specific activation of sensory codes while searching for a visual or remembered target. A set of eight stimuli items was selected from a set of 100 novel and difficult to verbalize closed shape contours previously developed by (`Endo N et al. 2003 `__) in the original as well as in the IBC implementation of the study. Each run of the protocol involved two kinds of trials - visual search and working memory search. In visual search trials, the participants were first shown an abstract item (sample item) and then they had to search for that item in a set of two or four items (search array). In the working memory search trials, the participants were first shown a set of two or four items (memory array) and then they had to tell whether a subsequently shown item (probe item) was present in the previously shown set of items. Thus, in addition to the type of search (visual or working memory) and search response (target present or absent), the array load (two or four items) was also varied in each trial. + +The data was acquired in four runs during one scanning session. Each run comprised forty-eight trials. In the original study, the participants also performed a separate session for a visual localizer task, where they viewed the stimuli passively without making any responses. This session was excluded from the IBC implementation of the protocol. Furthermore, the response period was also increased from 1000 msec to 2000 msec and the stimuli size from 1.72 to 1.80 degrees of visual angle, following the feedback from the pilot sessions. Apart from these changes, the rest of the task design was similar to that of the original study. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for VisualSearch + :name: condVisualSearch + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - delay_vis + - Delay period between sample item and search array in visual search trials + * - delay_wm + - Delay period between memory array and probe item in working memory trials + * - memory_array_four + - Array of four items, with or without the item to search for (probe item) - in working memory trials + * - memory_array_two + - Array of two items, with or without the item to search for (probe item) - in working memory trials + * - probe_item_four_absent + - Item to search for but was absent in memory array of four items - in working memory trials + * - probe_item_four_present + - Item to search for and was present in memory array of four items - in working memory trials + * - probe_item_two_absent + - Item to search for but was absent in memory array of two items - in working memory trials + * - probe_item_two_present + - Item to search for and was present in memory array of two items - in working memory trials + * - response_hit + - Subject responded correctly + * - response_miss + - Subject responded incorrectly + * - sample_item + - Item to search for in an array of two or four items (search array) - in visual search trials + * - search_array_four_absent + - Array of four items, without sample item - in visual search trials + * - search_array_four_present + - Array of four items, with sample item - in visual search trials + * - search_array_two_absent + - Array of two items, without sample item - in visual search trials + * - search_array_two_present + - Array of two items, with sample item - in visual search trials + +.. dropdown:: Contrasts for VisualSearch + :name: contVisualSearch + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - delay_vis + - delay period on visual search + * - delay_vis-delay_wm + - delay period on visual search vs on working memory + * - delay_wm + - delay period on working memory + * - memory_array_four + - array of four items with or without the item to search for + * - memory_array_two + - array of two items with or without the item to search for + * - probe_item + - probing an item absent or present + * - probe_item_absent + - probing an absent item from array of two or four + * - probe_item_absent-probe_item_present + - probing an absent vs present item + * - probe_item_four + - probing an absent or present item from array of four + * - probe_item_four-probe_item_two + - probing an item from an array of four vs two + * - probe_item_four_absent + - probing an absent item from array of four + * - probe_item_four_present + - probing a present item from array of four + * - probe_item_present + - probing a present item from array of two or four + * - probe_item_two + - probing an absent or present item from array of two + * - probe_item_two_absent + - probing an absent item from array of four + * - probe_item_two_present + - probing a present item from array of four + * - response_hit + - subject's correct response + * - response_miss + - subject's incorrect response + * - sample_item + - item to search for in an array of two or four items + * - search_array + - array of items four or two + * - search_array_absent + - array of two or four items without sample item + * - search_array_absent-search_array_present + - array of items without vs with sample item + * - search_array_four + - array of four items with or without the sample item + * - search_array_four-search_array_two + - array of four vs two items + * - search_array_four_absent + - array of four items without sample item + * - search_array_four_present + - array of four items with sample item + * - search_array_present + - array of two or four items with sample item + * - search_array_two + - array of two items with or without the sample item + * - search_array_two_absent + - array of two items without the sample item + * - search_array_two_present + - array of two items with the sample item + +MonkeyKingdom +------------- + +.. container:: tags + + + +.. admonition:: Implementation + :class: seealso + + - Software: Expyriment 0.9.0 (Python 2.7) + - Audio device: MRConfon MKII + +The movie-watching task Monkey Kingdom (Au royaume des singes in french) was adapted from a study done in Wim Vanduffel’s `Laboratory for Neuro- and Psychophysiology `__ at the University of Leuven, Belgium. This task was dedicated to investigate correspondence between monkey and human brains using naturalistic stimuli. The task consisted on watching - viewing and listening - the whole Disney movie "Monkey Kingdom". The original 81-minute movie was cut into 15-minute segments, resulting in a total of 5 segments, each one presented in a separated run. The acquisition of the 5 runs was completed in one session. **Note:** there was some lag between the onset of each run and the initiation of the stimuli (movie), which might vary between runs and subjects. This lag should probably be considered when analyzing the data. Find more details in the section :ref:`Lags in MonkeyKingdom movie`. + + +Color +----- + +.. container:: tags + + :bdg-light:`color_perception` :bdg-info:`working_memory` + +.. admonition:: Implementation + :class: seealso + + - Software: Psychopy 2021.1.3 (Python 3.8.5) + - Response device: Five-button ergonomic pad (Current Designs, Package 932 with Pyka HHSC-1x5-N4) + + - :octicon:`mark-github;1em;` `Repository `__ + +This protocol was adapted from (`McKeefry et al., 1997 `__), that aimed at investigating the position and variability of the color center in the human brain. This protocol used a mini-block design, in which each run consisted of two types of blocks: chromatic and achromatic. The chromatic stimuli consisted of Mondrian patterns: abstract images with no recognizable objects, each image composed of 20 circular blobs of different isoluminant colors. The achromatic stimuli consisted of gray-scaled versions of the described Mondrian patterns. In each block, 12 images of the same type (chromatic or achromatic) were presented. Each block was followed by an inter-block fixation cross that stayed in screen for 5 seconds. Both conditions were equally represented in each run and the same randomized sequence was presented to each subject. To ensure that the subjects remained alert throughout the experiment, they were asked to press a button when an image repeated (1-back task). The data was acquired in four runs during one scanning session. Each run had 36 blocks. The images presented were 16 x 16 degrees of visual angle. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for Color + :name: condColor + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - achromatic + - Achromatic Mondrian patterns + * - chromatic + - Chromatic Mondrian patterns + * - response + - Subject's response to 1-back task i.e. when the same color pattern was presented twice consecutively + +.. dropdown:: Contrasts for Color + :name: contColor + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - achromatic + - attending to achromatic mondrian patterns + * - chromatic + - attending to chromatic mondrian patterns + * - chromatic-achromatic + - chromatic vs achromatic mondrian patterns + * - response + - response to repeated mondrian patterns + +Motion +------ + +.. container:: tags + + :bdg-primary:`upper-left_vision` :bdg-light:`color_perception` :bdg-primary:`lower-left_vision` :bdg-primary:`visual_awareness` :bdg-warning:`coherent_motion` + +.. admonition:: Implementation + :class: seealso + + - Software: Psychopy 2021.1.3. (Python 3.8.5) + - Response device: Five-button ergonomic pad (Current Designs, Package 932 with Pyka HHSC-1x5-N4) + + - :octicon:`mark-github;1em;` `Repository `__ + +This protocol was adapted from (`Helfrich et al., 2013 `__), that aimed at delineating areas of the visual cortex that responded to coherent motion under controlled attention and fixation. In this protocol, the stimulus was composed of a rectangular random dot pattern with white dots on a dark background. Each run consisted of trials with three different conditions: stationary, coherent motion and incoherent motion. In the coherent motion condition, all dots moved in the same direction, while in the incoherent motion condition, the dots moved independently in various directions. For the two motion conditions, the direction of motion changed every 2 seconds in steps of 60 degrees. Consequently, the coherent motion condition was further divided into two types: motion direction changing clockwise and motion direction changing counter-clockwise. Additionally, during the stationary condition (baseline), a static random dot pattern was presented with a limited lifetime of 1000 ms. Apart from the motion conditions, the field of presentation of the stimuli also varied during the experiment. Some stimuli within a run were presented only on the right side, others only on the left side and some on full screen. The stimuli were extended to 40 degrees in the horizontal and 20 degrees in the vertical direction. The central visual area of 3 x 3 degrees was not stimulated. Each dot (including the fixation dot) moved at 6 degrees/sec. During all the runs the subjects were asked fixate on the central fixation point, which changed colors at a rate of 2 Hz. The colors were randomly selected from six options: red, yellow, blue, green, magenta and white. To ensure that the subjects remained alert throughout the experiment, they were asked to press a button when this fixation point turned blue. The conditions were counterbalanced and were presented in the same randomized sequence to each subject. The randomized sequence of the changing colors of the fixation point was also the same for each subject. Data were acquired in four runs during one scanning session. Each run consisted of 32 trials, and each trial was followed by an inter-trial fixation cross that stayed on the screen for 2 seconds. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for Motion + :name: condMotion + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - anti + - Trials with direction of coherent motion changing in the anti-clockwise direction + * - clock + - Trials with direction of coherent motion changing in the clockwise direction + * - coherent + - Motion condition when dots were moving coherently in the same direction + * - incoherent + - Motion condition when dots were moving incoherently in random directions + * - left + - Trials where dot pattern was presented only in the left visual field + * - right + - Trials where dot pattern was presented only in the right visual field + * - stationary + - Motion condition when dots stayed stationary but each dot was respawned in a different location after 1 sec + +.. dropdown:: Contrasts for Motion + :name: contMotion + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - anti + - anti-clockwise motion + * - clock + - motion in clockwise direction + * - clock-anti + - clockwise vs anti-clockwise motion + * - coherent + - dots moving coherently + * - coherent-incoherent + - dots moving coherently vs coherently + * - coherent-stationary + - dots moving coherently vs staying stationary + * - incoherent + - dots moving incoherently + * - incoherent-stationary + - dots moving incoherently vs staying stationary + * - left-right + - dot pattern in left vs right visual field + * - response + - fixation point turning blue + * - stationary + - stationary dots appearing in different locations + +OptimismBias +------------ + +.. container:: tags + + :bdg-light:`self-reference_effect` :bdg-info:`memory_retrieval` :bdg-info:`emotional_memory` :bdg-light:`episodic_future_thinking` :bdg-light:`episodic_simulation` + +.. admonition:: Implementation + :class: seealso + + - Software: Psychopy 2021.1.3. (Python 3.8.5) + - Response device: Five-button ergonomic pad (Current Designs, Package 932 with Pyka HHSC-1x5-N4) + + - :octicon:`mark-github;1em;` `Repository `__ + +This protocol was adapted from (`Sharot et al. 2007 `__), that aimed at examining the neurobiological basis of optimism. Subjects were presented with a series of events as text that described a life episode along with the word “past” or “future”. This cue indicated that subjects had to think of the given event as if it had occurred in the past or might occur in the future. They were instructed to press a button once the memory or projection had begun to form in their mind. They then had to rate the memory or projection based on how emotionally arousing it was (very, a little, or not at all) and judge its valence (whether it was negative or positive). Each event was displayed for 14 seconds on the screen, and they had 2 seconds for each rating task (emotional arousal and valence). In the original study, 80 unique events were presented over 4 runs, whereas for IBC we added a fifth run where the events were picked randomly out of the given 80 and the past and future contingencies were reversed. Each run was 10 minutes long. Trials were labeled based on the ratings received from the subjects: those with high ("very") or medium ("a little") arousal ratings and negative valence were labeled negative, while those with high or medium arousal ratings and positive valence were labeled positive. In case of other combinations, trials were labeled neutral and in absence of either or both responses they were labeled inconclusive. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for OptimismBias + :name: condOptimismBias + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - future_negative + - Future, negative valence and very or a little arousing event + * - future_neutral + - Future, negative or positive valence and not at all arousing event + * - future_positive + - Future, positive valence and very or a little arousing event + * - inconclusive + - Absence of either or both responses + * - past_negative + - Past, negative valence and very or a little arousing event + * - past_neutral + - Past, negative or positive valence and not at all arousing event + * - past_positive + - Past, positive valence and very or a little arousing event + +.. dropdown:: Contrasts for OptimismBias + :name: contOptimismBias + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - all_events + - all events + * - future_positive_vs_negative + - future positive vs negative + * - future_vs_past + - future vs past events + * - interaction + - interaction of (future vs past) and (positive vs negative) + * - optimism_bias + - future negative vs other events + * - past_positive_vs_negative + - past positive vs negative + * - positive_vs_negative + - positive vs negative events + +MovieAomic +---------- + +.. container:: tags + + + +.. admonition:: Implementation + :class: seealso + + - Software: Expyriment 0.9.0 (Python 2.7) + - Audio device: MRConfon MKII + +This passive movie-watching task was part of the Amsterdam Open MRI Collection (AOMIC) battery, described in (`Snoek et al., 2021 `__). This movie clip was 11 minutes long and consisted of continuous compilation of 22 natural scenes taken from the movie Koyaanisqatsi (`Reggio G. Koyaanisqatsi, 1982 `__) with music composed by Philip Glass. The resolution was adjusted to subtend 16 degrees of visual angle. According to Snoek et al., the scenes were selected because they broadly sample a set of visual parameters (textures and objects with different sizes and different rates of movement). It is important to note that the focus on variation of visual parameters means that the movie lacks a narrative, and thus may be inappropriate for investigating semantic or other high-level processes. + + +HarririAomic +------------ + +.. container:: tags + + :bdg-primary:`emotional_face_recognition` :bdg-primary:`face_perception` :bdg-primary:`visual_orientation` :bdg-danger:`emotional_expression` :bdg-primary:`shape_recognition` + +.. admonition:: Implemented using proprietary software + :class: seealso + + - Software: Presentation (Version 20.1, Neurobehavioral Systems, Inc., Berkeley, CA) + - Response device: Five-button ergonomic pad (Current Designs, Package 932 with Pyka HHSC-1x5-N4) + +This protocol is a part of the AOMIC (`Amsterdam Open MRI Collection `__) battery and is published in (`Snoek et al., 2021 `__). HarririAomic explored the processes related to (facial) emotion processing. In each trial, participants were shown three images arranged in the form of a triangle, with one image at the top and two at the bottom. Their task was to identify which of the two bottom images matched the top image and respond accordingly. During a shape condition trial, they had to match the shape of the images, specifically whether the oval shape was oriented vertically or horizontally. During an emotion condition trial, they had to match the emotion/facial expression (either fear or anger) in the images. The stimulus disappeared after 4.8 seconds or as soon as the participant responded, and new trial always began 5 seconds after the onset of each trial. There were two runs for this task, with trials presented in a block design alternating shape and emotion blocks. Each condition included four blocks. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for HarririAomic + :name: condHarririAomic + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - emotion + - When the presented trial was that with emotions + * - index_response + - When subject responded with index finger, meaning the image on left matched with image on top + * - middle_response + - When subject responded with middle finger, meaning the image on right matched with image on top + * - shape + - Viewing a shape + +.. dropdown:: Contrasts for HarririAomic + :name: contHarririAomic + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - emotion + - match the facial expression + * - emotion-shape + - match facial expression vs the shape of image + * - index_response + - matching left image to top cue + * - middle_response + - matching right image to top cue + * - shape + - match the shape of images + +FacesAomic +---------- + +.. container:: tags + + :bdg-primary:`facial_expression` :bdg-light:`gender_perception` :bdg-primary:`face_perception` :bdg-danger:`negative_emotion` :bdg-danger:`emotional_expression` + +.. admonition:: Implemented using proprietary software + :class: seealso + + - Software: Presentation (Version 20.1, Neurobehavioral Systems, Inc., Berkeley, CA) + - Response device: Five-button ergonomic pad (Current Designs, Package 932 with Pyka HHSC-1x5-N4) + +This protocol is a part of the AOMIC (`Amsterdam Open MRI Collection `__) battery and is published in (`Snoek et al., 2021 `__). FacesAomic explored the processes related to emotional facial perception. The stimuli consisted of videos showing facial expressions of individuals, either male and female, expressing emotions such as pride, contempt, anger, joy, or no expression. As in the original study, each face was also labeled as either Northern European or Mediterranean. For this IBC implementation, the run duration was extended from 4 to 6 minutes, and an extra run was added by including additional video stimuli from the `Amsterdam Dynamic Facial Expression Set (ADFES) `__ (`Schalk et al., 2011 `__) . Videos featured both female and male models, and a post-acquisition task was added to control for attention: participants were instructed to try to remember each face and expression combination they had seen, and after each run, they were shown a specific combination and had to indicate whether they had seen it. Each video was 4 seconds long, with 5 seconds of inter-trial interval and 8 videos in each run. Each video was associated with three conditions: emotions, gender and ethnicity; which were counterbalanced within and across runs. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for FacesAomic + :name: condFacesAomic + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - anger + - Video of a face expressing anger + * - contempt + - Video of a face expressing contempt + * - european + - Video with an european ethnicity model expressing some emotion + * - female + - Video with female face expressing some emotion + * - joy + - Video of a face expressing joy + * - male + - Video with male face expressing some emotion + * - mediterranean + - Video with a mediterranean ethnicity model expressing some emotion + * - neutral + - Baseline, when no emotion was expressed + * - pride + - Video of a face expressing pride + +.. dropdown:: Contrasts for FacesAomic + :name: contFacesAomic + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - all-neutral + - attending to expressive vs neutral faces + * - anger + - attending to face expressing anger + * - anger-neutral + - attending to angry vs neutral face + * - contempt + - attending to face expressing contempt + * - contempt-neutral + - attending to contempt vs neutral face + * - european-mediterranean + - attending to european vs mediterranean ethnicity face + * - female-male + - attending to female vs male face + * - joy + - attending to face expressing joy + * - joy-neutral + - attending to joyful vs neutral face + * - male-female + - attending to male vs female face + * - mediterranean-european + - attending to mediterranean vs european ethnicity face + * - neutral + - attending to neutral face + * - pride + - attending to face expressing pride + * - pride-neutral + - attending to pride vs neutral face + +StroopAomic +----------- + +.. container:: tags + + :bdg-light:`gender_perception` :bdg-primary:`face_perception` :bdg-light:`conflict_detection` :bdg-primary:`visual_word_recognition` + +.. admonition:: Implemented using proprietary software + :class: seealso + + - Software: Presentation (Version 20.1, Neurobehavioral Systems, Inc., Berkeley, CA) + - Response device: Five-button ergonomic pad (Current Designs, Package 932 with Pyka HHSC-1x5-N4) + +This protocol is a part of the AOMIC (`Amsterdam Open MRI Collection `__) battery and is published in (`Snoek et al., 2021 `__). StroopAomic explored the processes related to cognitive conflict and control. Participants were presented with greyscale images of faces of female and male models with certain words in red overlaid on top. The words were related to gender terms, using the french words for "man", "sir", "woman" and "lady", in either lower or upper case. Their task was to identify whether the image depicted a male or female model while ignoring the overlaid word. Differently from the original study, the face images for this IBC implementation were taken from the stimuli set provided by `Morrison2017 `__, since the original ones were not available. There were two runs of this task, in which each face-word composite stimulus was presented for 0.5 seconds in an event-related design, and was either congruent (same sex of face and word) or incongruent (different sex of face and word). The congruent and incongruent conditions were counterbalanced in each run. In total, 96 composite stimuli were presented. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for StroopAomic + :name: condStroopAomic + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - congruent + - The presented word matches the face shown + * - face_female + - Female face shown + * - face_male + - Male face shown + * - incongruent + - The presented word did not match the face shown + * - word_female + - The presented word corresponds to a female + * - word_male + - The presented word corresponds to a male + +.. dropdown:: Contrasts for StroopAomic + :name: contStroopAomic + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - congruent-incongruent + - word and face matched vs did not match + * - congruent_word_female_face_female + - attending to female face while reading 'female' + * - congruent_word_male_face_male + - attending to male face while reading 'male' + * - face_male-face_female + - male vs female face + * - incongruent-congruent + - word and face did not match vs matched + * - incongruent_word_female_face_male + - attending to male face while reading 'female' + * - incongruent_word_male_face_female + - attending to female face while reading 'male' + * - index-middle + - indicate the face is of male vs of female + * - index_response + - identifying a male face + * - middle-index + - indicate the face is of female vs of male + * - middle_response + - identifying a female face + * - word_male-word_female + - word 'male' vs 'female' + +WMAomic +------- + +.. container:: tags + + :bdg-primary:`visual_orientation` :bdg-info:`visual_working_memory` :bdg-primary:`visual_attention` + +.. admonition:: Implemented using proprietary software + :class: seealso + + - Software: Presentation (Version 20.1, Neurobehavioral Systems, Inc., Berkeley, CA) + - Response device: Five-button ergonomic pad (Current Designs, Package 932 with Pyka HHSC-1x5-N4) + +This protocol is a part of the AOMIC (`Amsterdam Open MRI Collection `__) battery and is published in (`Snoek et al., 2021 `__). **Working Memory Aomic** explored the processes related to visual working memory. Subjects were presented with a set of six white bars arranged in a circle around a fixation cross, where each bar had a random orientation: either 0, 45, 90, or 135 degrees (retention phase). Subsequently, one of these bars reappeared with either the same or a different orientation (test phase). Participants were required to indicate whether the orientation was the same or different (response phase). Each trial lasted around 6 seconds and was assigned to one of three conditions: active change, active no change, or passive. In passive trials, bars were not displayed. Across two runs, there were a total of 8 passive trials, 16 trials each for active change and active no change, and 20 null trials, which functioned as additional inter-stimulus intervals. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for WMAomic + :name: condWMAomic + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - active_change + - The probe had a different orientation than it had on the array + * - active_no_change + - The probe had the same orientation than it had on the array + * - passive + - Passive trials, the bars were not displayed + +.. dropdown:: Contrasts for WMAomic + :name: contWMAomic + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - active-passive + - assess probe orientation vs null event + * - active_change + - probe did not match previous orientation + * - active_change-active_no_change + - probe did not match vs matched orientation + * - active_no_change + - probe matched previous orientation + * - passive + - null event + +LocalizerAbstraction +-------------------- + +.. container:: tags + + :bdg-primary:`vertical_checkerboard` :bdg-primary:`visual_number_recognition` :bdg-primary:`face_perception` :bdg-primary:`visual_place_recognition` :bdg-primary:`visual_face_recognition` + +.. admonition:: Implementation + :class: seealso + + - Software: Psychtoolbox-3 (MATLAB 2021b) + - Response device: Five-button ergonomic pad (Current Designs, Package 932 with Pyka HHSC-1x5-N4) + +This protocol was adapted from a study conducted by colleagues at NeuroSpin, CEA Saclay (Morfoisse et al.). The goal of the study is to understand the neural representations of real-world things from different semantic categories at various levels of abstraction/rendering, and with that aim, they encountered the need to have a special run to localize areas or regions specific to different categories before presenting them on different levels of abstraction. The localizer was different from the four runs in that the images were from eight different categories - faces, human body, words, non-sense words, numbers, places, objects and checkerboards. Each category in the localizer was presented in a block of 6 seconds with each image being displayed for 100 ms followed by a 200 ms inter-stimuli interval. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for LocalizerAbstraction + :name: condLocalizerAbstraction + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - localizer_checkerboards + - Checkerboards images + * - localizer_faces + - Face images + * - localizer_humanbody + - Body images + * - localizer_nonsensewords + - Non-sense words images + * - localizer_numbers + - Number images + * - localizer_objects + - Object Images + * - localizer_places + - Place images + * - localizer_words + - Words images + * - response + - Subject's button press when they saw a star + +.. dropdown:: Contrasts for LocalizerAbstraction + :name: contLocalizerAbstraction + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - localizer_checkerboards + - localizer for checkerboards + * - localizer_checkerboards-other + - checkerboards vs other categories + * - localizer_faces + - localizer for human faces + * - localizer_faces-other + - human faces vs other categories + * - localizer_humanbody + - localizer for human bodies + * - localizer_humanbody-other + - human bodies vs other categories + * - localizer_nonsensewords + - localizer for nonsense words + * - localizer_nonsensewords-other + - nonsense words vs other categories + * - localizer_numbers + - localizer for numbers + * - localizer_numbers-other + - numbers vs other categories + * - localizer_objects + - localizer for objects + * - localizer_objects-other + - objects vs other categories + * - localizer_places + - localizer for places + * - localizer_places-other + - places vs other categories + * - localizer_words + - localizer for words + * - localizer_words-other + - words vs other categories + * - response + - response to star image as control + +Abstraction +----------- + +.. container:: tags + + :bdg-primary:`visual_representation` :bdg-primary:`face_perception` :bdg-light:`edge_detection` :bdg-light:`Naturalistic_Scenes` :bdg-primary:`visual_place_recognition` + +.. admonition:: Implementation + :class: seealso + + - Software: Psychtoolbox-3 (MATLAB 2021b) + - Response device: Five-button ergonomic pad (Current Designs, Package 932 with Pyka HHSC-1x5-N4) + +This protocol was adapted from a study conducted by colleagues at NeuroSpin, CEA Saclay (Morfoisse et al.). They aimed to understand the neural representations of real-world things across different semantic categories at varying levels of abstraction or rendering. Participants were presented with images from six different semantic categories - human body, animals, faces, flora, objects and places places - rendered at three different levels: geometry, edges and photos, in ascending order of detail. There were four different pictures from each category. Additionally, to control for attention, five images of a star were intermingled, and participants were instructed to press a button whenever they saw them. Thus, each run consisted of a total of 77 images (6 categories x 4 examples x 3 renderings + 5 star probes). Each image was presented for 300 ms with a variable inter-stimulus durations of 4, 6 or 8 seconds. Participants completed eight runs of approximately 8 minutes each, except for *sub-06* and *sub-11*, who performed the task in a setup of 4 runs lasting 16 minutes each, with the same total number of images presented. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for Abstraction + :name: condAbstraction + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - animals_bird_edge + - Images of a bird presented with the edge render + * - animals_bird_geometry + - Images of a bird presented with the geometry render + * - animals_bird_photo + - Images of a bird presented with the photo render + * - animals_butterfly_edge + - Images of a butterfly presented with the edge render + * - animals_butterfly_geometry + - Images of a butterfly presented with the geometry render + * - animals_butterfly_photo + - Images of a butterfly presented with the photo render + * - animals_fish_edge + - Images of a fish presented with the edge render + * - animals_fish_geometry + - Images of a fish presented with the geometry render + * - animals_fish_photo + - Images of a fish presented with the photo render + * - animals_giraffe_edge + - Images of a giraffe presented with the edge render + * - animals_giraffe_geometry + - Images of a giraffe presented with the geometry render + * - animals_giraffe_photo + - Images of a giraffe presented with the photo render + * - faces_cat_edge + - Image of a cat face presented with the edge render + * - faces_cat_geometry + - Image of a cat face presented with the geometry render + * - faces_cat_photo + - Image of a cat face presented with the photo render + * - faces_eyes_edge + - Image of eyes presented with the edge render + * - faces_eyes_geometry + - Image of eyes presented with the geometry render + * - faces_eyes_photo + - Image of eyes presented with the photo render + * - faces_face2_edge + - Image of a different face presented with the edge render + * - faces_face2_geometry + - Image of a different face presented with the geometry render + * - faces_face2_photo + - Image of a different face presented with the photo render + * - faces_face_edge + - Image of a face presented with the edge render + * - faces_face_geometry + - Image of a face presented with the geometry render + * - faces_face_photo + - Image of a face presented with the photo render + * - flora_carrot_edge + - Image of a carrot presented with the photo render + * - flora_carrot_geometry + - Image of a carrot presented with the photo render + * - flora_carrot_photo + - Image of a carrot presented with the photo render + * - flora_cherry_edge + - Image of a cherry presented with the photo render + * - flora_cherry_geometry + - Image of a cherry presented with the photo render + * - flora_cherry_photo + - Image of a cherry presented with the photo render + * - flora_flower_edge + - Image of a flower presented with the photo render + * - flora_flower_geometry + - Image of a flower presented with the photo render + * - flora_flower_photo + - Image of a flower presented with the photo render + * - flora_tree_edge + - Image of a tree presented with the photo render + * - flora_tree_geometry + - Image of a tree presented with the photo render + * - flora_tree_photo + - Image of a tree presented with the photo render + * - humanbody_hand_edge + - Edge rendering of hands + * - humanbody_hand_geometry + - Geometry rendering of hands + * - humanbody_hand_photo + - Photo rendering of hands + * - humanbody_legs_edge + - Edge rendering of legs + * - humanbody_legs_geometry + - Geometry rendering of legs + * - humanbody_legs_photo + - Photo rendering of legs + * - humanbody_standing_edge + - Edge rendering of standing human + * - humanbody_standing_geometry + - Geometry rendering of standing human + * - humanbody_standing_photo + - Photo rendering of standing human + * - humanbody_walking_edge + - Edge rendering of walking human + * - humanbody_walking_geometry + - Geometry rendering of walking human + * - humanbody_walking_photo + - Photo rendering of walking human + * - objects_camera_edge + - Image of a camara presented with the photo render + * - objects_camera_geometry + - Image of a camara presented with the photo render + * - objects_camera_photo + - Image of a camara presented with the photo render + * - objects_key_edge + - Image of a key presented with the photo render + * - objects_key_geometry + - Image of a key presented with the photo render + * - objects_key_photo + - Image of a key presented with the photo render + * - objects_truck_edge + - Image of a truck presented with the photo render + * - objects_truck_geometry + - Image of a truck presented with the photo render + * - objects_truck_photo + - Image of a truck presented with the photo render + * - objects_watch_edge + - Image of a watch presented with the photo render + * - objects_watch_geometry + - Image of a watch presented with the photo render + * - objects_watch_photo + - Image of a watch presented with the photo render + * - places_house_edge + - Image of a house presented with the photo render + * - places_house_geometry + - Image of a house presented with the photo render + * - places_house_photo + - Image of a house presented with the photo render + * - places_mountain_edge + - Image of a mountain presented with the photo render + * - places_mountain_geometry + - Image of a mountain presented with the photo render + * - places_mountain_photo + - Image of a mountain presented with the photo render + * - places_road_edge + - Image of a road presented with the photo render + * - places_road_geometry + - Image of a road presented with the photo render + * - places_road_photo + - Image of a road presented with the photo render + * - places_windmill_edge + - Image of a windmill presented with the photo render + * - places_windmill_geometry + - Image of a windmill presented with the photo render + * - places_windmill_photo + - Image of a windmill presented with the photo render + * - response + - Subject's button press when they saw a star + +.. dropdown:: Contrasts for Abstraction + :name: contAbstraction + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - animals-other + - renders of animals vs of rest of categories + * - animals_edge + - edge renders of an animals + * - animals_edge-animals_other + - edge vs geometry and photo render of animals + * - animals_geometry + - geometry renders of animals + * - animals_geometry-animals_other + - geometry vs edge and photo render of animals + * - animals_photo + - photo of an animal + * - animals_photo-animals_other + - photo vs geometry and edge render of animals + * - edge-other + - edge vs geometry and photo render + * - faces-other + - renders of faces vs of rest of categories + * - faces_edge + - edge renders of human faces + * - faces_edge-faces_other + - edge vs geometry and photo render of faces + * - faces_geometry + - geometry renders of human faces + * - faces_geometry-faces_other + - geometry vs edge and photo render of faces + * - faces_photo + - photos of human faces + * - faces_photo-faces_other + - photo vs geometry and edge render of faces + * - flora-other + - renders of flora vs of rest of categories + * - flora_edge + - edge renders of flora + * - flora_edge-flora_other + - edge vs geometry and photo render of flora + * - flora_geometry + - geometry renders of flora + * - flora_geometry-flora_other + - geometry vs edge and photo render of flora + * - flora_photo + - photos of flora + * - flora_photo-flora_other + - photo vs geometry and edge render of flora + * - geometry-other + - geometry vs edge and photo render + * - humanbody-other + - renders of human bodies vs of rest of categories + * - humanbody_edge + - edges renders of human bodies + * - humanbody_edge-humanbody_other + - edge vs geometry and photo render of human bodies + * - humanbody_geometry + - geometry renders of human bodies + * - humanbody_geometry-humanbody_other + - geometry vs edge and photo render of human bodies + * - humanbody_photo + - photos of human bodies + * - humanbody_photo-humanbody_other + - photo vs geometry and edge render of human bodies + * - objects-other + - renders of objects vs of rest of categories + * - objects_edge + - edge renders of objects + * - objects_edge-objects_other + - edge vs geometry and photo render of objects + * - objects_geometry + - geometry renders of objects + * - objects_geometry-objects_other + - geometry vs edge and photo render of objects + * - objects_photo + - photos of objects + * - objects_photo-objects_other + - photo vs geometry and edge render of objects + * - photo-other + - photo vs geometry and edge render + * - places-other + - renders of places vs of rest of categories + * - places_edge + - edge renders of places + * - places_edge-places_other + - edge vs geometry and photo render of places + * - places_geometry + - geometry renders of places + * - places_geometry-places_other + - geometry vs edge and photo render of places + * - places_photo + - photos of places + * - places_photo-places_other + - photo vs geometry and edge render of places + * - response + - button press to star + +MDTB +---- + +.. container:: tags + + :bdg-primary:`visual_pattern_recognition` :bdg-light:`action_perception` :bdg-light:`theory_of_mind` :bdg-secondary:`combinatorial_semantics` :bdg-info:`visual_working_memory` + +.. admonition:: Implementation + :class: seealso + + - Software: Psychopy 2021.1.3. (Python 3.8.5) + - Response device: Five-button ergonomic pad (Current Designs, Package 932 with Pyka HHSC-1x5-N4) + +The **Multi-Domain Task Battery** was adapted from a study conducted by (`King et al., 2019 `__), where they aimed to investigate the functional organization of the cerebellum cortex by running a fMRI study with a collection of more than 20 tasks. The authors made the paradigm's code and parameters openly available for 9 of those tasks at the time `here `__, which allowed us to integrate them in the IBC project. The implementation was different than usual; here we presented all 9 tasks in one run, instead of dedicating a separated run for each task. + +The protocol consisted of a short training session outside the scanner and 4 runs inside the scanner. In every run each task was performed twice in blocks of 35 seconds. In every run each task was performed twice in blocks of 35 seconds. At the beginning of each block, the instructions were displayed for 5 seconds so that subjects remember the instructions and expected actions. Immediately after, the task was performed continuously for 30 seconds, therefore each run lasted around 10 minutes and 30 seconds. If the task involved response from the subjects, they received feedback on their performance, which was given in form of a green check mark or a red cross, for correct or incorrect answers. At the end of each run, the success rates for each task were displayed followed by a video of a knot being tied as a part of an attention control task during the action observation task (described below). + +Following are detailed descriptions for each task: + +**1) Visual search:** Several 'L' shaped characters rotated at different angles were shown on each trial and subjects were asked to search for the standard (correct) orientation and press with their index finger if it was present, or with their middle finger if it was not. On each run, this task was performed twice, and for each time there were 12 trials, half of them being True (the correct 'L' shape was present). The order of True and False trials was randomized for each block on each run. + +**2) Action observation:** Videos of knots being tied were displayed along with their name tags, and subjects were asked to remember the knot and its name. Two different knots were presented per run, and at the end of each run, another video of a knot was shown, this time without the name tag. We then asked subjects if this particular knot was displayed during the run, and if so, say the name. Only for run 3 the knot displayed at end was presented during the run. + +**3) Flexion - extension:** Alternating cues with the words 'Extension' and 'Flexion' were presented, to indicate the participants to do so with their toes. + +**4) Finger sequence:** A sequence of 6 digits from 1 to 4 were displayed and subjects were asked to press the keys corresponding to the numbers in the shown sequence. The mapping went from index being 1 to pinky being 4. Each block consisted of 8 trials and two blocks were presented during each run. The trials could be either simple or complex: the simple trials involving one or two consecutive fingers, and the complex involving three or four fingers, not necessarily consecutive. As the subject pressed the buttons, the digits became green if the correct key was pressed or red if not. At the end of each trial, if all the digits on the sequence were accurately followed, a green check appeared as feedback, if one or more was incorrect, a red cross appeared. Each trial lasted for 3.5 seconds, if the subject didn't complete the sequence before the end of the trial, it was counted as incorrect and the red cross appeared. + +**5) Theory of mind:** The subject was presented with a short paragraph narrating a story, followed by a related statement. Subjects must decide whether the statement is true based on the initial paragraph by pressing with their index finger, or false by pressing with their middle finger. Four trials in total were performed per run, half of them being true. If the subject answered correctly, a green check appeared, and on the contrary, a red cross appeared. Each trial lasted 14 seconds, if the subject did not reply during that period, the trial was counted as a mistake and the negative feedback appeared. + +**6) 2-back:** Several images were presented, one after another. For each presented image, participants had to press with their index finger if it is the same that was presented 2 images before or with their middle finger if it was not. The trials were divided into easy and hard. The easy trials were the ones where the current image presented was not displayed two images before, and the hard trials were those where it was. There were 12 trials per block, 7 of the easy type and 5 of the hard type. As the rest of tasks, this was performed twice, leading to 24 trials in total per run. Each image was displayed for 2 seconds, followed by the feedback which was once again a green check or a red cross. + +**7) Semantic prediction**: Words from a sentence were shown, one at a time. Subjects must decide whether the last word fits into the sentence or not, by pressing with their index or middle finger, respectively. There were 4 trials per block, leading to 8 trials per run. Each block consisted of 2 'True' and 2 'False' trials, and the order of appearance was randomized. Each trial could be either hard or easy to perform, depending on the ambiguity of the sentence and there were 2 easy and 2 hard trials per block. The subjects received feedback after their response, a green check or a red cross, consistent with the tasks described above. + +**8) Romance movie watching:** A 30 second clip from the 2009 Disney Pixar movie 'Up' was presented without any sound. Subjects were instructed to watch passively. Two such clips were presented on each run, and no clip was repeated across or within the runs. + +**9) Rest:** Short resting-state period, a fixation cross was displayed and subjects were asked to fixate on it and not move. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for MDTB + :name: condMDTB + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - 2back_easy + - Easy 2-back trial, it is easy to remember whether the image was shown 2 images ago. + * - 2back_hard + - Hard 2-back trial, it is hard to remember whether the image was shown 2 images ago. + * - action_action + - Watching a pair of hands make a specific knot + * - action_control + - The resulting knot is shown from different angles + * - finger_complex + - Sequence of button presses that is hard to complete (with no consecutive or repeated fingers) + * - finger_simple + - Sequence of button presses that is easy to complete (using consecutive or repeated fingers) + * - flexion_extension + - Continuous flexion and extension of toes + * - search_easy + - It is easy to judge whether there is a right-oriented 'L' shape present on the array + * - search_hard + - It is hard to judge whether there is a right-oriented 'L' shape present on the array + * - semantic_easy + - Easy to decide whether the last word fits in the sentence, natural sequence + * - semantic_hard + - Hard to decide whether the last word fits in the sentence, ambiguous sequence + * - tom_belief + - The statement presented relates to thoughts or believes the characters from the paragraph might have + * - tom_photo + - The statement presented relates to facts described on the paragraph + +.. dropdown:: Contrasts for MDTB + :name: contMDTB + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - 2back_easy + - easy 2-back + * - 2back_hard + - hard 2-back + * - 2back_hard-easy + - hard vs easy 2-back + * - action_action + - watching hands make a specific knot + * - action_action-control + - hands making specific knot vs resulting knot + * - action_control + - resulting knot shown from different angles + * - finger_complex + - hard sequence of button presses + * - finger_complex-simple + - hard vs easy button sequence + * - finger_simple + - easy sequence of button presses + * - flexion_extension + - continuous toes flexion-extension + * - search_easy + - easy to look for the right-oriented shape + * - search_hard + - hard to look for the right-oriented shape + * - search_hard-easy + - hard vs easy to find the correct shape + * - semantic_easy + - easy to decide whether the last word fits in a sentence + * - semantic_hard + - hard to decide whether the last word fits in a sentence + * - semantic_hard-easy + - ambiguous vs natural sequence + * - tom_belief + - statement relates to character's believes + * - tom_belief-photo + - statement relates to believes vs facts + * - tom_photo + - statement relates to facts from paragraph + +Emotion +------- + +.. container:: tags + + :bdg-primary:`visual_perception` :bdg-danger:`negative_emotion` :bdg-primary:`visual_scene_perception` :bdg-danger:`emotional_self-evaluation` + +.. admonition:: Implementation + :class: seealso + + - Software: Psychtoolbox-3 (MATLAB 2021b) + - Response device: Five-button ergonomic pad (Current Designs, Package 932 with Pyka HHSC-1x5-N4) + +This tasks was adapted from (`Favre et al., 2021 `__). This protocol aimed to examine emotional processing and the regions engaged on it. The subjects were presented with a series of pictures divided in two categories: neutral and negative images. The scenes depicted were mainly in a social context, for instance people chatting or eating during the neutral block; and people suffering or fighting during the negative block. The task consisted of two runs and a short training session before the acquisition. Each run consisted of 12 blocks of 10 images, alternating between neutral and negative blocks. Every picture was displayed for 2 seconds, and the subjects were instructed to press with their index finger if the scene occurred indoors, either inside a building or a car. The inter-block interval lasted 2 seconds, in which a fixation cross was shown. In the middle and in the end of the run the subjects were presented with two questions: *How do you feel?* and *How nervous do you feel?*, along with a scale for them to answer, going from *not well* to *extremely well* for the former question and *not nervous* to *extremely nervous* for the latter. The subjects used their index and middle fingers to slide through the scale and had 7 seconds to give their answer. + +The images used for stimuli were extracted from different databases: the International Affective Picture system (IAPS) (`Lang et al., 2008 `__), the Geneva Affective Picture Database (GAPED) (`Dan-Glauser and Scherer, 2011 `__), the Socio-Moral Image Databade (SMID) (`Crone et al., 2018 `__), the Complex Affective Scene Set (COMPASS) (`Weierich et al., 2019 `__), the Besançon Affective Picture Set-Adolescents (BAPS-Ado) (`Szymanska et al., 2015 `__) and the EmoMadrid database (`Carretié et al., 2019 `__). The training session was performed inside the scanner before running the experiment, in order to get the subject familiar with the task and the slider used to answer. The training consisted of 3 blocks: neutral, negative and neutral images, followed by the two questions. We therefore had three main conditions for the task: *neutral*, *negative* and *assesment*. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for Emotion + :name: condEmotion + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - negative_image + - Block of negative images + * - neutral_image + - Block of neutral images + * - valence_scale + - Subject’s rating of emotional state + +.. dropdown:: Contrasts for Emotion + :name: contEmotion + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - negative-neutral + - images with negative vs neutral valence + * - negative_image + - images with negative valence + * - neutral_image + - images with neutral valence + * - valence_scale + - assessment of emotional state + +MultiModal +---------- + +.. container:: tags + + :bdg-success:`voice_perception` :bdg-primary:`face_perception` :bdg-primary:`visual_recognition` :bdg-light:`animacy_detection` :bdg-info:`visual_working_memory` + +.. admonition:: Implementation + :class: seealso + + - Software: Psychopy 2021.1.3. (Python 3.8.5) + - Response device: Five-button ergonomic pad (Current Designs, Package 932 with Pyka HHSC-1x5-N4) + + - Audio device: MRConfon MKII + + - Hardware: LabJack-U3, custom-made computer-controlled pneumatic system + +This protocol was derived from work by colleagues at `Laboratory for Neuro- and Psychophysiology `__ from the KU Leuven Medical School, who aimed to compare evoked responses to the same sensory stimulation across two different cohorts of human and non-human primates. Three categories of stimuli were used: visual, tactile and auditory. Visual stimuli consisted of gray-scale pictures of ten classes: monkey and human faces, monkey and human bodies (without the head), four-legged mammals, birds, man-made objects that looked either like a human or a monkey's body (e.g. guitar or kettle), fruits/vegetables and body-like sculptures. We presented 10 pictures per class, giving a total of 100 images, which were presented superimposed onto a pink noise background that filled the entire display. Tactile stimuli consisted of compressed air puffs delivered on both left and right side of the subjects' face on three different locations: above the upper lip, around the cheek area or middle lip and beneath the lower lip. The air puffs were delivered using 6 plastic pipes, one to each target location, with an intensity of 0.5 bars, at a distance of approximately 5 mm to the face, without touching it. The plastic pipes were connected to a custom-made computer controlled pneumatic system in the console room. Auditory stimuli consisted of 1-second clips of different natural sounds from six classes: human speech, human no-speech (e.g. baby crying, cough), monkey calls, animal sounds (e.g. horse), tool sounds and musical instruments (e.g. scissors, piano), and sounds from nature (e.g. rain, thunder). There were 10 different sounds per class, thus 60 different sound-clips in total. MR-compatible headphones were used. + +To be congruent with the study from our colleagues, the auditory stimuli needed to be presented during silent periods, meaning no scanner noise, to ensure they were clearly audible and distinguishable (`Erb et al., 2018 `__). To achieve that, the repetition time (TR) for this protocol was modified to 2.6 seconds, during which we had a silence period (no data acquired, no scanner noise) of 1.2 seconds for stimuli presentation and 1.4 seconds of acquisition time (TA). To ensure uniformity across the experiment, all three types of stimuli were presented during the silent period. Due to the change on TR and TA, some parameters were also updated to maintain a good enough spatial-resolution. :ref:`This table ` contains the final set of acquisition parameters used for this protocol. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for MultiModal + :name: condMultiModal + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - audio_animal + - Animal sounds different from monkeys used as audio stimulus + * - audio_monkey + - Monkey sounds used as audio stimulus + * - audio_nature + - Nature sounds (i.e. rain) used as audio stimulus + * - audio_silence + - Control condition for both audio and visual stimuli, no sound played and no image displayed + * - audio_speech + - Speech sounds used as audio stimulus + * - audio_tools + - Tools sounds used as audio stimulus + * - audio_voice + - Human sounds (i.e. laugh, cough) used as audio stimulus + * - image_animals + - Image of an animal different from monkeys and birds used as visual stimulus + * - image_birds + - Image of a bird used as visual stimulus + * - image_fruits + - Image of fruits used as visual stimulus + * - image_human_body + - Image of a human body (without the head) used as visual stimulus + * - image_human_face + - Image of a human face used as visual stimulus + * - image_human_object + - Image of an object used by humans (i.e. guitar) used as visual stimulus + * - image_monkey_body + - Image of a monkey body (without the head) used as visual stimulus + * - image_monkey_face + - Image of a monkey face used as visual stimulus + * - image_monkey_object + - Image of an object used by monkeys (i.e. drinker) used as visual stimulus + * - image_sculpture + - Image of a sculpture used as visual stimulus + * - tactile_bottom + - Air puffs delivered beneath the lower lips as tactile stimulus + * - tactile_middle + - Air puffs delivered by the middle lips as tactile stimulus + * - tactile_novalve + - Control condition for tactile stimulus, air is sent to the pair of pipes placed outside the coil, so it doesn't touch the subject + * - tactile_top + - Air puffs delivered above the upper lips as tactile stimulus + +.. dropdown:: Contrasts for MultiModal + :name: contMultiModal + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - animate-inanimate + - images of all faces, bodies or animals vs rest of visual stimuli + * - audio + - all audio stimuli + * - audio-control + - all audio stimuli vs silence + * - audio-tactile + - audio vs tactile stimuli + * - audio-visual + - audio vs visual stimuli + * - audio_animal + - different animal sounds (except for monkey) + * - audio_monkey + - monkey sounds + * - audio_nature + - sound made by nature + * - audio_silence + - no visual or audio stimuli + * - audio_speech + - speech sounds + * - audio_tools + - sound of noise made by tools + * - audio_voice + - sounds made by humans (laugh, cough) + * - body-non_face + - images of human or monkey faces or bodies vs rest of visual stimuli + * - body-other + - images of human or monkey bodies vs rest of visual stimuli + * - face-other + - images of human or monkey faces vs rest of visual stimuli + * - image_animals + - images of animals (no monkeys or birds) + * - image_birds + - images of birds + * - image_fruits + - images of fruits + * - image_human_body + - images of head-less human bodies + * - image_human_face + - images of human faces + * - image_human_object + - images of vertical objects + * - image_monkey_body + - images of head-less monkey bodies + * - image_monkey_face + - images of monkey faces + * - image_monkey_object + - images of round objects + * - image_sculpture + - images of sculptures + * - monkey_speech-other + - monkey sounds vs rest of audio stimuli + * - speech+voice-other + - speech or human sounds vs rest of audio stimuli + * - speech-other + - speech sounds vs rest of audio stimuli + * - tactile + - all tactile stimuli + * - tactile-audio + - tactile vs audio stimuli + * - tactile-control + - all tactile stimuli vs no stimuli + * - tactile-visual + - tactile vs visual stimuli + * - tactile_bottom + - air puff on bottom lip + * - tactile_middle + - air puff on middle lip + * - tactile_top + - air puff on upper lip + * - visual + - all visual stimuli + * - visual-audio + - visual vs audio stimuli + * - visual-control + - all visual stimuli vs pink-noise + * - visual-tactile + - visual vs tactile stimuli + +Mario +----- + +.. container:: tags + + :bdg-dark:`reward_processing` :bdg-success:`auditory_feedback` :bdg-primary:`visual_scene_perception` :bdg-dark:`reward_valuation` :bdg-light:`spatial_cognition` + +.. admonition:: Implementation + :class: seealso + + - Software: Psychopy 2021.1.3. (Python 3.8.5) + - Response device: MR-compatible video game controller + + - Audio device: MRConfon MKII + +This task involves a video game protocol where participants played Super Mario Bros. We adapted the implementation from our colleagues at the `Courtois-Neuromod `__ project, who used it with their own cohort, based on the premise that video game playing engages various cognitive domains such as constant reward processing, strategic planning, environmental monitoring, and action-taking (`Bellec and Boyle, 2019 `__). Therefore, monitoring brain activity during video game play provides an intriguing window into the interaction of these cognitive processes. Our colleagues at the Courtois-Neuromod team also designed an MRI-compatible video game controller, which closely resembles the shape and essence of commercial controllers, ensuring a familiar gaming experience. We replicated this controller for the IBC project; for more details, refer to (`Harel et al., 2023 `__). This implementation was created using OpenAI's `GymRetro `__ package. + +The game consisted of eight different worlds, each with three levels. Participants were instructed to play freely and complete as many levels as possible within the session, resulting in varying time spent on each level for each participant. None of the participants completed the entire game, but the majority reached the last world. Participants had unlimited lives but were allowed only three attempts to complete a level, meaning that if they lost twice consecutively, they would return to the last checkpoint in the current level. Losing a third time would restart the level and reset the count. This task was conducted over two sessions, with each session consisting of six runs lasting 10 minutes each. Each session began anew. Additionally, subsequent runs picked up where the previous one left off. For example, if a player was halfway through a level when an acquisition run ended, they would resume from the same point in the next run. + +The conditions for this task are described in `this table `__ and the main contrasts derived from those conditions are described in `this table `__. + +.. dropdown:: Conditions for Mario + :name: condMario + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Condition + - Description + * - action_jump + - Player jumps by pressing a special key + * - action_leftrun + - Player runs to the left (backwards) constrained by the current field of view + * - action_leftwalk + - Player walks to the left (backwards) constrained by the current field of view + * - action_rightrun + - Player runs to the right, advancing into the current world and level + * - action_rightwalk + - Player walks to the right, advancing into the current world and level + * - loss_dying + - Player loses size or dies + * - onscreen_enemy + - Enemy appears in the field of view + * - reward_coin + - Player earns coins, either visible or hidden coins + * - reward_enemykill_impact + - The enemy is defeated by the player in various ways: by smashing the brick underneath the enemy's position, by activating a lethal element directed towards the enemy, or by temporarily gaining the power to eliminate the enemy upon contact. + * - reward_enemykill_kick + - Player kicks the enemy, making it fall or die + * - reward_enemykill_stomp + - Player kills the enemy by stomping + * - reward_powerup_taken + - Player gets a shot of life and size by catching a specified element (mushroom) + +.. dropdown:: Contrasts for Mario + :name: contMario + + .. list-table:: + :header-rows: 1 + :widths: 25 75 + + * - Contrast + - Description + * - action + - jumping, running or walking + * - keypress_jump + - jumping by pressing designated key + * - keypress_left + - go backwards by pressing left key + * - keypress_right + - go forward by pressing right key + * - loss + - losing power or dying + * - onscreen_enemy + - enemy appearance on screen + * - reward + - getting a reward + * - reward-loss + - getting a reward vs losing + * - reward_coin + - getting coins + * - reward_enemykill-others + - getting a reward for killing enemy vs other rewards + * - reward_enemykill_stomp + - killing an enemy by stomp + diff --git a/_sphinx_design_static/design-tabs.js b/_sphinx_design_static/design-tabs.js new file mode 100644 index 0000000..b25bd6a --- /dev/null +++ b/_sphinx_design_static/design-tabs.js @@ -0,0 +1,101 @@ +// @ts-check + +// Extra JS capability for selected tabs to be synced +// The selection is stored in local storage so that it persists across page loads. + +/** + * @type {Record} + */ +let sd_id_to_elements = {}; +const storageKeyPrefix = "sphinx-design-tab-id-"; + +/** + * Create a key for a tab element. + * @param {HTMLElement} el - The tab element. + * @returns {[string, string, string] | null} - The key. + * + */ +function create_key(el) { + let syncId = el.getAttribute("data-sync-id"); + let syncGroup = el.getAttribute("data-sync-group"); + if (!syncId || !syncGroup) return null; + return [syncGroup, syncId, syncGroup + "--" + syncId]; +} + +/** + * Initialize the tab selection. + * + */ +function ready() { + // Find all tabs with sync data + + /** @type {string[]} */ + let groups = []; + + document.querySelectorAll(".sd-tab-label").forEach((label) => { + if (label instanceof HTMLElement) { + let data = create_key(label); + if (data) { + let [group, id, key] = data; + + // add click event listener + // @ts-ignore + label.onclick = onSDLabelClick; + + // store map of key to elements + if (!sd_id_to_elements[key]) { + sd_id_to_elements[key] = []; + } + sd_id_to_elements[key].push(label); + + if (groups.indexOf(group) === -1) { + groups.push(group); + // Check if a specific tab has been selected via URL parameter + const tabParam = new URLSearchParams(window.location.search).get( + group + ); + if (tabParam) { + console.log( + "sphinx-design: Selecting tab id for group '" + + group + + "' from URL parameter: " + + tabParam + ); + window.sessionStorage.setItem(storageKeyPrefix + group, tabParam); + } + } + + // Check is a specific tab has been selected previously + let previousId = window.sessionStorage.getItem( + storageKeyPrefix + group + ); + if (previousId === id) { + // console.log( + // "sphinx-design: Selecting tab from session storage: " + id + // ); + // @ts-ignore + label.previousElementSibling.checked = true; + } + } + } + }); +} + +/** + * Activate other tabs with the same sync id. + * + * @this {HTMLElement} - The element that was clicked. + */ +function onSDLabelClick() { + let data = create_key(this); + if (!data) return; + let [group, id, key] = data; + for (const label of sd_id_to_elements[key]) { + if (label === this) continue; + // @ts-ignore + label.previousElementSibling.checked = true; + } + window.sessionStorage.setItem(storageKeyPrefix + group, id); +} + +document.addEventListener("DOMContentLoaded", ready, false); diff --git a/_sphinx_design_static/sphinx-design.min.css b/_sphinx_design_static/sphinx-design.min.css new file mode 100644 index 0000000..860c36d --- /dev/null +++ b/_sphinx_design_static/sphinx-design.min.css @@ -0,0 +1 @@ +.sd-bg-primary{background-color:var(--sd-color-primary) !important}.sd-bg-text-primary{color:var(--sd-color-primary-text) !important}button.sd-bg-primary:focus,button.sd-bg-primary:hover{background-color:var(--sd-color-primary-highlight) !important}a.sd-bg-primary:focus,a.sd-bg-primary:hover{background-color:var(--sd-color-primary-highlight) !important}.sd-bg-secondary{background-color:var(--sd-color-secondary) !important}.sd-bg-text-secondary{color:var(--sd-color-secondary-text) !important}button.sd-bg-secondary:focus,button.sd-bg-secondary:hover{background-color:var(--sd-color-secondary-highlight) !important}a.sd-bg-secondary:focus,a.sd-bg-secondary:hover{background-color:var(--sd-color-secondary-highlight) !important}.sd-bg-success{background-color:var(--sd-color-success) !important}.sd-bg-text-success{color:var(--sd-color-success-text) !important}button.sd-bg-success:focus,button.sd-bg-success:hover{background-color:var(--sd-color-success-highlight) !important}a.sd-bg-success:focus,a.sd-bg-success:hover{background-color:var(--sd-color-success-highlight) !important}.sd-bg-info{background-color:var(--sd-color-info) !important}.sd-bg-text-info{color:var(--sd-color-info-text) !important}button.sd-bg-info:focus,button.sd-bg-info:hover{background-color:var(--sd-color-info-highlight) !important}a.sd-bg-info:focus,a.sd-bg-info:hover{background-color:var(--sd-color-info-highlight) !important}.sd-bg-warning{background-color:var(--sd-color-warning) !important}.sd-bg-text-warning{color:var(--sd-color-warning-text) !important}button.sd-bg-warning:focus,button.sd-bg-warning:hover{background-color:var(--sd-color-warning-highlight) !important}a.sd-bg-warning:focus,a.sd-bg-warning:hover{background-color:var(--sd-color-warning-highlight) !important}.sd-bg-danger{background-color:var(--sd-color-danger) !important}.sd-bg-text-danger{color:var(--sd-color-danger-text) !important}button.sd-bg-danger:focus,button.sd-bg-danger:hover{background-color:var(--sd-color-danger-highlight) !important}a.sd-bg-danger:focus,a.sd-bg-danger:hover{background-color:var(--sd-color-danger-highlight) !important}.sd-bg-light{background-color:var(--sd-color-light) !important}.sd-bg-text-light{color:var(--sd-color-light-text) !important}button.sd-bg-light:focus,button.sd-bg-light:hover{background-color:var(--sd-color-light-highlight) !important}a.sd-bg-light:focus,a.sd-bg-light:hover{background-color:var(--sd-color-light-highlight) !important}.sd-bg-muted{background-color:var(--sd-color-muted) !important}.sd-bg-text-muted{color:var(--sd-color-muted-text) !important}button.sd-bg-muted:focus,button.sd-bg-muted:hover{background-color:var(--sd-color-muted-highlight) !important}a.sd-bg-muted:focus,a.sd-bg-muted:hover{background-color:var(--sd-color-muted-highlight) !important}.sd-bg-dark{background-color:var(--sd-color-dark) !important}.sd-bg-text-dark{color:var(--sd-color-dark-text) !important}button.sd-bg-dark:focus,button.sd-bg-dark:hover{background-color:var(--sd-color-dark-highlight) !important}a.sd-bg-dark:focus,a.sd-bg-dark:hover{background-color:var(--sd-color-dark-highlight) !important}.sd-bg-black{background-color:var(--sd-color-black) !important}.sd-bg-text-black{color:var(--sd-color-black-text) !important}button.sd-bg-black:focus,button.sd-bg-black:hover{background-color:var(--sd-color-black-highlight) !important}a.sd-bg-black:focus,a.sd-bg-black:hover{background-color:var(--sd-color-black-highlight) !important}.sd-bg-white{background-color:var(--sd-color-white) !important}.sd-bg-text-white{color:var(--sd-color-white-text) !important}button.sd-bg-white:focus,button.sd-bg-white:hover{background-color:var(--sd-color-white-highlight) !important}a.sd-bg-white:focus,a.sd-bg-white:hover{background-color:var(--sd-color-white-highlight) !important}.sd-text-primary,.sd-text-primary>p{color:var(--sd-color-primary) !important}a.sd-text-primary:focus,a.sd-text-primary:hover{color:var(--sd-color-primary-highlight) !important}.sd-text-secondary,.sd-text-secondary>p{color:var(--sd-color-secondary) !important}a.sd-text-secondary:focus,a.sd-text-secondary:hover{color:var(--sd-color-secondary-highlight) !important}.sd-text-success,.sd-text-success>p{color:var(--sd-color-success) !important}a.sd-text-success:focus,a.sd-text-success:hover{color:var(--sd-color-success-highlight) !important}.sd-text-info,.sd-text-info>p{color:var(--sd-color-info) !important}a.sd-text-info:focus,a.sd-text-info:hover{color:var(--sd-color-info-highlight) !important}.sd-text-warning,.sd-text-warning>p{color:var(--sd-color-warning) !important}a.sd-text-warning:focus,a.sd-text-warning:hover{color:var(--sd-color-warning-highlight) !important}.sd-text-danger,.sd-text-danger>p{color:var(--sd-color-danger) !important}a.sd-text-danger:focus,a.sd-text-danger:hover{color:var(--sd-color-danger-highlight) !important}.sd-text-light,.sd-text-light>p{color:var(--sd-color-light) !important}a.sd-text-light:focus,a.sd-text-light:hover{color:var(--sd-color-light-highlight) !important}.sd-text-muted,.sd-text-muted>p{color:var(--sd-color-muted) !important}a.sd-text-muted:focus,a.sd-text-muted:hover{color:var(--sd-color-muted-highlight) !important}.sd-text-dark,.sd-text-dark>p{color:var(--sd-color-dark) !important}a.sd-text-dark:focus,a.sd-text-dark:hover{color:var(--sd-color-dark-highlight) !important}.sd-text-black,.sd-text-black>p{color:var(--sd-color-black) !important}a.sd-text-black:focus,a.sd-text-black:hover{color:var(--sd-color-black-highlight) !important}.sd-text-white,.sd-text-white>p{color:var(--sd-color-white) !important}a.sd-text-white:focus,a.sd-text-white:hover{color:var(--sd-color-white-highlight) !important}.sd-outline-primary{border-color:var(--sd-color-primary) !important;border-style:solid !important;border-width:1px !important}a.sd-outline-primary:focus,a.sd-outline-primary:hover{border-color:var(--sd-color-primary-highlight) !important}.sd-outline-secondary{border-color:var(--sd-color-secondary) !important;border-style:solid !important;border-width:1px !important}a.sd-outline-secondary:focus,a.sd-outline-secondary:hover{border-color:var(--sd-color-secondary-highlight) !important}.sd-outline-success{border-color:var(--sd-color-success) !important;border-style:solid !important;border-width:1px !important}a.sd-outline-success:focus,a.sd-outline-success:hover{border-color:var(--sd-color-success-highlight) !important}.sd-outline-info{border-color:var(--sd-color-info) !important;border-style:solid !important;border-width:1px !important}a.sd-outline-info:focus,a.sd-outline-info:hover{border-color:var(--sd-color-info-highlight) !important}.sd-outline-warning{border-color:var(--sd-color-warning) !important;border-style:solid !important;border-width:1px !important}a.sd-outline-warning:focus,a.sd-outline-warning:hover{border-color:var(--sd-color-warning-highlight) !important}.sd-outline-danger{border-color:var(--sd-color-danger) !important;border-style:solid !important;border-width:1px !important}a.sd-outline-danger:focus,a.sd-outline-danger:hover{border-color:var(--sd-color-danger-highlight) !important}.sd-outline-light{border-color:var(--sd-color-light) !important;border-style:solid !important;border-width:1px !important}a.sd-outline-light:focus,a.sd-outline-light:hover{border-color:var(--sd-color-light-highlight) !important}.sd-outline-muted{border-color:var(--sd-color-muted) !important;border-style:solid !important;border-width:1px !important}a.sd-outline-muted:focus,a.sd-outline-muted:hover{border-color:var(--sd-color-muted-highlight) !important}.sd-outline-dark{border-color:var(--sd-color-dark) !important;border-style:solid !important;border-width:1px !important}a.sd-outline-dark:focus,a.sd-outline-dark:hover{border-color:var(--sd-color-dark-highlight) !important}.sd-outline-black{border-color:var(--sd-color-black) !important;border-style:solid !important;border-width:1px !important}a.sd-outline-black:focus,a.sd-outline-black:hover{border-color:var(--sd-color-black-highlight) !important}.sd-outline-white{border-color:var(--sd-color-white) !important;border-style:solid !important;border-width:1px !important}a.sd-outline-white:focus,a.sd-outline-white:hover{border-color:var(--sd-color-white-highlight) !important}.sd-bg-transparent{background-color:transparent !important}.sd-outline-transparent{border-color:transparent !important}.sd-text-transparent{color:transparent !important}.sd-p-0{padding:0 !important}.sd-pt-0,.sd-py-0{padding-top:0 !important}.sd-pr-0,.sd-px-0{padding-right:0 !important}.sd-pb-0,.sd-py-0{padding-bottom:0 !important}.sd-pl-0,.sd-px-0{padding-left:0 !important}.sd-p-1{padding:.25rem !important}.sd-pt-1,.sd-py-1{padding-top:.25rem !important}.sd-pr-1,.sd-px-1{padding-right:.25rem !important}.sd-pb-1,.sd-py-1{padding-bottom:.25rem !important}.sd-pl-1,.sd-px-1{padding-left:.25rem !important}.sd-p-2{padding:.5rem !important}.sd-pt-2,.sd-py-2{padding-top:.5rem !important}.sd-pr-2,.sd-px-2{padding-right:.5rem !important}.sd-pb-2,.sd-py-2{padding-bottom:.5rem !important}.sd-pl-2,.sd-px-2{padding-left:.5rem !important}.sd-p-3{padding:1rem !important}.sd-pt-3,.sd-py-3{padding-top:1rem !important}.sd-pr-3,.sd-px-3{padding-right:1rem !important}.sd-pb-3,.sd-py-3{padding-bottom:1rem !important}.sd-pl-3,.sd-px-3{padding-left:1rem !important}.sd-p-4{padding:1.5rem !important}.sd-pt-4,.sd-py-4{padding-top:1.5rem !important}.sd-pr-4,.sd-px-4{padding-right:1.5rem !important}.sd-pb-4,.sd-py-4{padding-bottom:1.5rem !important}.sd-pl-4,.sd-px-4{padding-left:1.5rem !important}.sd-p-5{padding:3rem !important}.sd-pt-5,.sd-py-5{padding-top:3rem !important}.sd-pr-5,.sd-px-5{padding-right:3rem !important}.sd-pb-5,.sd-py-5{padding-bottom:3rem !important}.sd-pl-5,.sd-px-5{padding-left:3rem !important}.sd-m-auto{margin:auto !important}.sd-mt-auto,.sd-my-auto{margin-top:auto !important}.sd-mr-auto,.sd-mx-auto{margin-right:auto !important}.sd-mb-auto,.sd-my-auto{margin-bottom:auto !important}.sd-ml-auto,.sd-mx-auto{margin-left:auto !important}.sd-m-0{margin:0 !important}.sd-mt-0,.sd-my-0{margin-top:0 !important}.sd-mr-0,.sd-mx-0{margin-right:0 !important}.sd-mb-0,.sd-my-0{margin-bottom:0 !important}.sd-ml-0,.sd-mx-0{margin-left:0 !important}.sd-m-1{margin:.25rem !important}.sd-mt-1,.sd-my-1{margin-top:.25rem !important}.sd-mr-1,.sd-mx-1{margin-right:.25rem !important}.sd-mb-1,.sd-my-1{margin-bottom:.25rem !important}.sd-ml-1,.sd-mx-1{margin-left:.25rem !important}.sd-m-2{margin:.5rem !important}.sd-mt-2,.sd-my-2{margin-top:.5rem !important}.sd-mr-2,.sd-mx-2{margin-right:.5rem !important}.sd-mb-2,.sd-my-2{margin-bottom:.5rem !important}.sd-ml-2,.sd-mx-2{margin-left:.5rem !important}.sd-m-3{margin:1rem !important}.sd-mt-3,.sd-my-3{margin-top:1rem !important}.sd-mr-3,.sd-mx-3{margin-right:1rem !important}.sd-mb-3,.sd-my-3{margin-bottom:1rem !important}.sd-ml-3,.sd-mx-3{margin-left:1rem !important}.sd-m-4{margin:1.5rem !important}.sd-mt-4,.sd-my-4{margin-top:1.5rem !important}.sd-mr-4,.sd-mx-4{margin-right:1.5rem !important}.sd-mb-4,.sd-my-4{margin-bottom:1.5rem !important}.sd-ml-4,.sd-mx-4{margin-left:1.5rem !important}.sd-m-5{margin:3rem !important}.sd-mt-5,.sd-my-5{margin-top:3rem !important}.sd-mr-5,.sd-mx-5{margin-right:3rem !important}.sd-mb-5,.sd-my-5{margin-bottom:3rem !important}.sd-ml-5,.sd-mx-5{margin-left:3rem !important}.sd-w-25{width:25% !important}.sd-w-50{width:50% !important}.sd-w-75{width:75% !important}.sd-w-100{width:100% !important}.sd-w-auto{width:auto !important}.sd-h-25{height:25% !important}.sd-h-50{height:50% !important}.sd-h-75{height:75% !important}.sd-h-100{height:100% !important}.sd-h-auto{height:auto !important}.sd-d-none{display:none !important}.sd-d-inline{display:inline !important}.sd-d-inline-block{display:inline-block !important}.sd-d-block{display:block !important}.sd-d-grid{display:grid !important}.sd-d-flex-row{display:-ms-flexbox !important;display:flex !important;flex-direction:row !important}.sd-d-flex-column{display:-ms-flexbox !important;display:flex !important;flex-direction:column !important}.sd-d-inline-flex{display:-ms-inline-flexbox !important;display:inline-flex !important}@media(min-width: 576px){.sd-d-sm-none{display:none !important}.sd-d-sm-inline{display:inline !important}.sd-d-sm-inline-block{display:inline-block !important}.sd-d-sm-block{display:block !important}.sd-d-sm-grid{display:grid !important}.sd-d-sm-flex{display:-ms-flexbox !important;display:flex !important}.sd-d-sm-inline-flex{display:-ms-inline-flexbox !important;display:inline-flex !important}}@media(min-width: 768px){.sd-d-md-none{display:none !important}.sd-d-md-inline{display:inline !important}.sd-d-md-inline-block{display:inline-block !important}.sd-d-md-block{display:block !important}.sd-d-md-grid{display:grid !important}.sd-d-md-flex{display:-ms-flexbox !important;display:flex !important}.sd-d-md-inline-flex{display:-ms-inline-flexbox !important;display:inline-flex !important}}@media(min-width: 992px){.sd-d-lg-none{display:none !important}.sd-d-lg-inline{display:inline !important}.sd-d-lg-inline-block{display:inline-block !important}.sd-d-lg-block{display:block !important}.sd-d-lg-grid{display:grid !important}.sd-d-lg-flex{display:-ms-flexbox !important;display:flex !important}.sd-d-lg-inline-flex{display:-ms-inline-flexbox !important;display:inline-flex !important}}@media(min-width: 1200px){.sd-d-xl-none{display:none !important}.sd-d-xl-inline{display:inline !important}.sd-d-xl-inline-block{display:inline-block !important}.sd-d-xl-block{display:block !important}.sd-d-xl-grid{display:grid !important}.sd-d-xl-flex{display:-ms-flexbox !important;display:flex !important}.sd-d-xl-inline-flex{display:-ms-inline-flexbox !important;display:inline-flex !important}}.sd-align-major-start{justify-content:flex-start !important}.sd-align-major-end{justify-content:flex-end !important}.sd-align-major-center{justify-content:center !important}.sd-align-major-justify{justify-content:space-between !important}.sd-align-major-spaced{justify-content:space-evenly !important}.sd-align-minor-start{align-items:flex-start !important}.sd-align-minor-end{align-items:flex-end !important}.sd-align-minor-center{align-items:center !important}.sd-align-minor-stretch{align-items:stretch !important}.sd-text-justify{text-align:justify !important}.sd-text-left{text-align:left !important}.sd-text-right{text-align:right !important}.sd-text-center{text-align:center !important}.sd-font-weight-light{font-weight:300 !important}.sd-font-weight-lighter{font-weight:lighter !important}.sd-font-weight-normal{font-weight:400 !important}.sd-font-weight-bold{font-weight:700 !important}.sd-font-weight-bolder{font-weight:bolder !important}.sd-font-italic{font-style:italic !important}.sd-text-decoration-none{text-decoration:none !important}.sd-text-lowercase{text-transform:lowercase !important}.sd-text-uppercase{text-transform:uppercase !important}.sd-text-capitalize{text-transform:capitalize !important}.sd-text-wrap{white-space:normal !important}.sd-text-nowrap{white-space:nowrap !important}.sd-text-truncate{overflow:hidden;text-overflow:ellipsis;white-space:nowrap}.sd-fs-1,.sd-fs-1>p{font-size:calc(1.375rem + 1.5vw) !important;line-height:unset !important}.sd-fs-2,.sd-fs-2>p{font-size:calc(1.325rem + 0.9vw) !important;line-height:unset !important}.sd-fs-3,.sd-fs-3>p{font-size:calc(1.3rem + 0.6vw) !important;line-height:unset !important}.sd-fs-4,.sd-fs-4>p{font-size:calc(1.275rem + 0.3vw) !important;line-height:unset !important}.sd-fs-5,.sd-fs-5>p{font-size:1.25rem !important;line-height:unset !important}.sd-fs-6,.sd-fs-6>p{font-size:1rem !important;line-height:unset !important}.sd-border-0{border:0 solid !important}.sd-border-top-0{border-top:0 solid !important}.sd-border-bottom-0{border-bottom:0 solid !important}.sd-border-right-0{border-right:0 solid !important}.sd-border-left-0{border-left:0 solid !important}.sd-border-1{border:1px solid !important}.sd-border-top-1{border-top:1px solid !important}.sd-border-bottom-1{border-bottom:1px solid !important}.sd-border-right-1{border-right:1px solid !important}.sd-border-left-1{border-left:1px solid !important}.sd-border-2{border:2px solid !important}.sd-border-top-2{border-top:2px solid !important}.sd-border-bottom-2{border-bottom:2px solid !important}.sd-border-right-2{border-right:2px solid !important}.sd-border-left-2{border-left:2px solid !important}.sd-border-3{border:3px solid !important}.sd-border-top-3{border-top:3px solid !important}.sd-border-bottom-3{border-bottom:3px solid !important}.sd-border-right-3{border-right:3px solid !important}.sd-border-left-3{border-left:3px solid !important}.sd-border-4{border:4px solid !important}.sd-border-top-4{border-top:4px solid !important}.sd-border-bottom-4{border-bottom:4px solid !important}.sd-border-right-4{border-right:4px solid !important}.sd-border-left-4{border-left:4px solid !important}.sd-border-5{border:5px solid !important}.sd-border-top-5{border-top:5px solid !important}.sd-border-bottom-5{border-bottom:5px solid !important}.sd-border-right-5{border-right:5px solid !important}.sd-border-left-5{border-left:5px solid !important}.sd-rounded-0{border-radius:0 !important}.sd-rounded-1{border-radius:.2rem !important}.sd-rounded-2{border-radius:.3rem !important}.sd-rounded-3{border-radius:.5rem !important}.sd-rounded-pill{border-radius:50rem !important}.sd-rounded-circle{border-radius:50% !important}.shadow-none{box-shadow:none !important}.sd-shadow-sm{box-shadow:0 .125rem .25rem var(--sd-color-shadow) !important}.sd-shadow-md{box-shadow:0 .5rem 1rem var(--sd-color-shadow) !important}.sd-shadow-lg{box-shadow:0 1rem 3rem var(--sd-color-shadow) !important}@keyframes sd-slide-from-left{0%{transform:translateX(-100%)}100%{transform:translateX(0)}}@keyframes sd-slide-from-right{0%{transform:translateX(200%)}100%{transform:translateX(0)}}@keyframes sd-grow100{0%{transform:scale(0);opacity:.5}100%{transform:scale(1);opacity:1}}@keyframes sd-grow50{0%{transform:scale(0.5);opacity:.5}100%{transform:scale(1);opacity:1}}@keyframes sd-grow50-rot20{0%{transform:scale(0.5) rotateZ(-20deg);opacity:.5}75%{transform:scale(1) rotateZ(5deg);opacity:1}95%{transform:scale(1) rotateZ(-1deg);opacity:1}100%{transform:scale(1) rotateZ(0);opacity:1}}.sd-animate-slide-from-left{animation:1s ease-out 0s 1 normal none running sd-slide-from-left}.sd-animate-slide-from-right{animation:1s ease-out 0s 1 normal none running sd-slide-from-right}.sd-animate-grow100{animation:1s ease-out 0s 1 normal none running sd-grow100}.sd-animate-grow50{animation:1s ease-out 0s 1 normal none running sd-grow50}.sd-animate-grow50-rot20{animation:1s ease-out 0s 1 normal none running sd-grow50-rot20}.sd-badge{display:inline-block;padding:.35em .65em;font-size:.75em;font-weight:700;line-height:1;text-align:center;white-space:nowrap;vertical-align:baseline;border-radius:.25rem}.sd-badge:empty{display:none}a.sd-badge{text-decoration:none}.sd-btn .sd-badge{position:relative;top:-1px}.sd-btn{background-color:transparent;border:1px solid transparent;border-radius:.25rem;cursor:pointer;display:inline-block;font-weight:400;font-size:1rem;line-height:1.5;padding:.375rem .75rem;text-align:center;text-decoration:none;transition:color .15s ease-in-out,background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out;vertical-align:middle;user-select:none;-moz-user-select:none;-ms-user-select:none;-webkit-user-select:none}.sd-btn:hover{text-decoration:none}@media(prefers-reduced-motion: reduce){.sd-btn{transition:none}}.sd-btn-primary,.sd-btn-outline-primary:hover,.sd-btn-outline-primary:focus{color:var(--sd-color-primary-text) !important;background-color:var(--sd-color-primary) !important;border-color:var(--sd-color-primary) !important;border-width:1px !important;border-style:solid !important}.sd-btn-primary:hover,.sd-btn-primary:focus{color:var(--sd-color-primary-text) !important;background-color:var(--sd-color-primary-highlight) !important;border-color:var(--sd-color-primary-highlight) !important;border-width:1px !important;border-style:solid !important}.sd-btn-outline-primary{color:var(--sd-color-primary) !important;border-color:var(--sd-color-primary) !important;border-width:1px !important;border-style:solid !important}.sd-btn-secondary,.sd-btn-outline-secondary:hover,.sd-btn-outline-secondary:focus{color:var(--sd-color-secondary-text) !important;background-color:var(--sd-color-secondary) !important;border-color:var(--sd-color-secondary) !important;border-width:1px !important;border-style:solid !important}.sd-btn-secondary:hover,.sd-btn-secondary:focus{color:var(--sd-color-secondary-text) !important;background-color:var(--sd-color-secondary-highlight) !important;border-color:var(--sd-color-secondary-highlight) !important;border-width:1px !important;border-style:solid !important}.sd-btn-outline-secondary{color:var(--sd-color-secondary) !important;border-color:var(--sd-color-secondary) !important;border-width:1px !important;border-style:solid !important}.sd-btn-success,.sd-btn-outline-success:hover,.sd-btn-outline-success:focus{color:var(--sd-color-success-text) !important;background-color:var(--sd-color-success) !important;border-color:var(--sd-color-success) !important;border-width:1px !important;border-style:solid !important}.sd-btn-success:hover,.sd-btn-success:focus{color:var(--sd-color-success-text) !important;background-color:var(--sd-color-success-highlight) !important;border-color:var(--sd-color-success-highlight) !important;border-width:1px !important;border-style:solid !important}.sd-btn-outline-success{color:var(--sd-color-success) !important;border-color:var(--sd-color-success) !important;border-width:1px !important;border-style:solid !important}.sd-btn-info,.sd-btn-outline-info:hover,.sd-btn-outline-info:focus{color:var(--sd-color-info-text) !important;background-color:var(--sd-color-info) !important;border-color:var(--sd-color-info) !important;border-width:1px !important;border-style:solid !important}.sd-btn-info:hover,.sd-btn-info:focus{color:var(--sd-color-info-text) !important;background-color:var(--sd-color-info-highlight) !important;border-color:var(--sd-color-info-highlight) !important;border-width:1px !important;border-style:solid !important}.sd-btn-outline-info{color:var(--sd-color-info) !important;border-color:var(--sd-color-info) !important;border-width:1px !important;border-style:solid !important}.sd-btn-warning,.sd-btn-outline-warning:hover,.sd-btn-outline-warning:focus{color:var(--sd-color-warning-text) !important;background-color:var(--sd-color-warning) !important;border-color:var(--sd-color-warning) !important;border-width:1px !important;border-style:solid !important}.sd-btn-warning:hover,.sd-btn-warning:focus{color:var(--sd-color-warning-text) !important;background-color:var(--sd-color-warning-highlight) !important;border-color:var(--sd-color-warning-highlight) !important;border-width:1px !important;border-style:solid !important}.sd-btn-outline-warning{color:var(--sd-color-warning) !important;border-color:var(--sd-color-warning) !important;border-width:1px !important;border-style:solid !important}.sd-btn-danger,.sd-btn-outline-danger:hover,.sd-btn-outline-danger:focus{color:var(--sd-color-danger-text) !important;background-color:var(--sd-color-danger) !important;border-color:var(--sd-color-danger) !important;border-width:1px !important;border-style:solid !important}.sd-btn-danger:hover,.sd-btn-danger:focus{color:var(--sd-color-danger-text) !important;background-color:var(--sd-color-danger-highlight) !important;border-color:var(--sd-color-danger-highlight) !important;border-width:1px !important;border-style:solid !important}.sd-btn-outline-danger{color:var(--sd-color-danger) !important;border-color:var(--sd-color-danger) !important;border-width:1px !important;border-style:solid !important}.sd-btn-light,.sd-btn-outline-light:hover,.sd-btn-outline-light:focus{color:var(--sd-color-light-text) !important;background-color:var(--sd-color-light) !important;border-color:var(--sd-color-light) !important;border-width:1px !important;border-style:solid !important}.sd-btn-light:hover,.sd-btn-light:focus{color:var(--sd-color-light-text) !important;background-color:var(--sd-color-light-highlight) !important;border-color:var(--sd-color-light-highlight) !important;border-width:1px !important;border-style:solid !important}.sd-btn-outline-light{color:var(--sd-color-light) !important;border-color:var(--sd-color-light) !important;border-width:1px !important;border-style:solid !important}.sd-btn-muted,.sd-btn-outline-muted:hover,.sd-btn-outline-muted:focus{color:var(--sd-color-muted-text) !important;background-color:var(--sd-color-muted) !important;border-color:var(--sd-color-muted) !important;border-width:1px !important;border-style:solid !important}.sd-btn-muted:hover,.sd-btn-muted:focus{color:var(--sd-color-muted-text) !important;background-color:var(--sd-color-muted-highlight) !important;border-color:var(--sd-color-muted-highlight) !important;border-width:1px !important;border-style:solid !important}.sd-btn-outline-muted{color:var(--sd-color-muted) !important;border-color:var(--sd-color-muted) !important;border-width:1px !important;border-style:solid !important}.sd-btn-dark,.sd-btn-outline-dark:hover,.sd-btn-outline-dark:focus{color:var(--sd-color-dark-text) !important;background-color:var(--sd-color-dark) !important;border-color:var(--sd-color-dark) !important;border-width:1px !important;border-style:solid !important}.sd-btn-dark:hover,.sd-btn-dark:focus{color:var(--sd-color-dark-text) !important;background-color:var(--sd-color-dark-highlight) !important;border-color:var(--sd-color-dark-highlight) !important;border-width:1px !important;border-style:solid !important}.sd-btn-outline-dark{color:var(--sd-color-dark) !important;border-color:var(--sd-color-dark) !important;border-width:1px !important;border-style:solid !important}.sd-btn-black,.sd-btn-outline-black:hover,.sd-btn-outline-black:focus{color:var(--sd-color-black-text) !important;background-color:var(--sd-color-black) !important;border-color:var(--sd-color-black) !important;border-width:1px !important;border-style:solid !important}.sd-btn-black:hover,.sd-btn-black:focus{color:var(--sd-color-black-text) !important;background-color:var(--sd-color-black-highlight) !important;border-color:var(--sd-color-black-highlight) !important;border-width:1px !important;border-style:solid !important}.sd-btn-outline-black{color:var(--sd-color-black) !important;border-color:var(--sd-color-black) !important;border-width:1px !important;border-style:solid !important}.sd-btn-white,.sd-btn-outline-white:hover,.sd-btn-outline-white:focus{color:var(--sd-color-white-text) !important;background-color:var(--sd-color-white) !important;border-color:var(--sd-color-white) !important;border-width:1px !important;border-style:solid !important}.sd-btn-white:hover,.sd-btn-white:focus{color:var(--sd-color-white-text) !important;background-color:var(--sd-color-white-highlight) !important;border-color:var(--sd-color-white-highlight) !important;border-width:1px !important;border-style:solid !important}.sd-btn-outline-white{color:var(--sd-color-white) !important;border-color:var(--sd-color-white) !important;border-width:1px !important;border-style:solid !important}.sd-stretched-link::after{position:absolute;top:0;right:0;bottom:0;left:0;z-index:1;content:""}.sd-hide-link-text{font-size:0}.sd-octicon,.sd-material-icon{display:inline-block;fill:currentColor;vertical-align:middle}.sd-avatar-xs{border-radius:50%;object-fit:cover;object-position:center;width:1rem;height:1rem}.sd-avatar-sm{border-radius:50%;object-fit:cover;object-position:center;width:3rem;height:3rem}.sd-avatar-md{border-radius:50%;object-fit:cover;object-position:center;width:5rem;height:5rem}.sd-avatar-lg{border-radius:50%;object-fit:cover;object-position:center;width:7rem;height:7rem}.sd-avatar-xl{border-radius:50%;object-fit:cover;object-position:center;width:10rem;height:10rem}.sd-avatar-inherit{border-radius:50%;object-fit:cover;object-position:center;width:inherit;height:inherit}.sd-avatar-initial{border-radius:50%;object-fit:cover;object-position:center;width:initial;height:initial}.sd-card{background-clip:border-box;background-color:var(--sd-color-card-background);border:1px solid var(--sd-color-card-border);border-radius:.25rem;color:var(--sd-color-card-text);display:-ms-flexbox;display:flex;-ms-flex-direction:column;flex-direction:column;min-width:0;position:relative;word-wrap:break-word}.sd-card>hr{margin-left:0;margin-right:0}.sd-card-hover:hover{border-color:var(--sd-color-card-border-hover);transform:scale(1.01)}.sd-card-body{-ms-flex:1 1 auto;flex:1 1 auto;padding:1rem 1rem}.sd-card-title{margin-bottom:.5rem}.sd-card-subtitle{margin-top:-0.25rem;margin-bottom:0}.sd-card-text:last-child{margin-bottom:0}.sd-card-link:hover{text-decoration:none}.sd-card-link+.card-link{margin-left:1rem}.sd-card-header{padding:.5rem 1rem;margin-bottom:0;background-color:var(--sd-color-card-header);border-bottom:1px solid var(--sd-color-card-border)}.sd-card-header:first-child{border-radius:calc(0.25rem - 1px) calc(0.25rem - 1px) 0 0}.sd-card-footer{padding:.5rem 1rem;background-color:var(--sd-color-card-footer);border-top:1px solid var(--sd-color-card-border)}.sd-card-footer:last-child{border-radius:0 0 calc(0.25rem - 1px) calc(0.25rem - 1px)}.sd-card-header-tabs{margin-right:-0.5rem;margin-bottom:-0.5rem;margin-left:-0.5rem;border-bottom:0}.sd-card-header-pills{margin-right:-0.5rem;margin-left:-0.5rem}.sd-card-img-overlay{position:absolute;top:0;right:0;bottom:0;left:0;padding:1rem;border-radius:calc(0.25rem - 1px)}.sd-card-img,.sd-card-img-bottom,.sd-card-img-top{width:100%}.sd-card-img,.sd-card-img-top{border-top-left-radius:calc(0.25rem - 1px);border-top-right-radius:calc(0.25rem - 1px)}.sd-card-img,.sd-card-img-bottom{border-bottom-left-radius:calc(0.25rem - 1px);border-bottom-right-radius:calc(0.25rem - 1px)}.sd-cards-carousel{width:100%;display:flex;flex-wrap:nowrap;-ms-flex-direction:row;flex-direction:row;overflow-x:hidden;scroll-snap-type:x mandatory}.sd-cards-carousel.sd-show-scrollbar{overflow-x:auto}.sd-cards-carousel:hover,.sd-cards-carousel:focus{overflow-x:auto}.sd-cards-carousel>.sd-card{flex-shrink:0;scroll-snap-align:start}.sd-cards-carousel>.sd-card:not(:last-child){margin-right:3px}.sd-card-cols-1>.sd-card{width:90%}.sd-card-cols-2>.sd-card{width:45%}.sd-card-cols-3>.sd-card{width:30%}.sd-card-cols-4>.sd-card{width:22.5%}.sd-card-cols-5>.sd-card{width:18%}.sd-card-cols-6>.sd-card{width:15%}.sd-card-cols-7>.sd-card{width:12.8571428571%}.sd-card-cols-8>.sd-card{width:11.25%}.sd-card-cols-9>.sd-card{width:10%}.sd-card-cols-10>.sd-card{width:9%}.sd-card-cols-11>.sd-card{width:8.1818181818%}.sd-card-cols-12>.sd-card{width:7.5%}.sd-container,.sd-container-fluid,.sd-container-lg,.sd-container-md,.sd-container-sm,.sd-container-xl{margin-left:auto;margin-right:auto;padding-left:var(--sd-gutter-x, 0.75rem);padding-right:var(--sd-gutter-x, 0.75rem);width:100%}@media(min-width: 576px){.sd-container-sm,.sd-container{max-width:540px}}@media(min-width: 768px){.sd-container-md,.sd-container-sm,.sd-container{max-width:720px}}@media(min-width: 992px){.sd-container-lg,.sd-container-md,.sd-container-sm,.sd-container{max-width:960px}}@media(min-width: 1200px){.sd-container-xl,.sd-container-lg,.sd-container-md,.sd-container-sm,.sd-container{max-width:1140px}}.sd-row{--sd-gutter-x: 1.5rem;--sd-gutter-y: 0;display:-ms-flexbox;display:flex;-ms-flex-wrap:wrap;flex-wrap:wrap;margin-top:calc(var(--sd-gutter-y) * -1);margin-right:calc(var(--sd-gutter-x) * -0.5);margin-left:calc(var(--sd-gutter-x) * -0.5)}.sd-row>*{box-sizing:border-box;flex-shrink:0;width:100%;max-width:100%;padding-right:calc(var(--sd-gutter-x) * 0.5);padding-left:calc(var(--sd-gutter-x) * 0.5);margin-top:var(--sd-gutter-y)}.sd-col{flex:1 0 0%;-ms-flex:1 0 0%}.sd-row-cols-auto>*{flex:0 0 auto;width:auto}.sd-row-cols-1>*{flex:0 0 auto;-ms-flex:0 0 auto;width:100%}.sd-row-cols-2>*{flex:0 0 auto;-ms-flex:0 0 auto;width:50%}.sd-row-cols-3>*{flex:0 0 auto;-ms-flex:0 0 auto;width:33.3333333333%}.sd-row-cols-4>*{flex:0 0 auto;-ms-flex:0 0 auto;width:25%}.sd-row-cols-5>*{flex:0 0 auto;-ms-flex:0 0 auto;width:20%}.sd-row-cols-6>*{flex:0 0 auto;-ms-flex:0 0 auto;width:16.6666666667%}.sd-row-cols-7>*{flex:0 0 auto;-ms-flex:0 0 auto;width:14.2857142857%}.sd-row-cols-8>*{flex:0 0 auto;-ms-flex:0 0 auto;width:12.5%}.sd-row-cols-9>*{flex:0 0 auto;-ms-flex:0 0 auto;width:11.1111111111%}.sd-row-cols-10>*{flex:0 0 auto;-ms-flex:0 0 auto;width:10%}.sd-row-cols-11>*{flex:0 0 auto;-ms-flex:0 0 auto;width:9.0909090909%}.sd-row-cols-12>*{flex:0 0 auto;-ms-flex:0 0 auto;width:8.3333333333%}@media(min-width: 576px){.sd-col-sm{flex:1 0 0%;-ms-flex:1 0 0%}.sd-row-cols-sm-auto{flex:1 0 auto;-ms-flex:1 0 auto;width:100%}.sd-row-cols-sm-1>*{flex:0 0 auto;-ms-flex:0 0 auto;width:100%}.sd-row-cols-sm-2>*{flex:0 0 auto;-ms-flex:0 0 auto;width:50%}.sd-row-cols-sm-3>*{flex:0 0 auto;-ms-flex:0 0 auto;width:33.3333333333%}.sd-row-cols-sm-4>*{flex:0 0 auto;-ms-flex:0 0 auto;width:25%}.sd-row-cols-sm-5>*{flex:0 0 auto;-ms-flex:0 0 auto;width:20%}.sd-row-cols-sm-6>*{flex:0 0 auto;-ms-flex:0 0 auto;width:16.6666666667%}.sd-row-cols-sm-7>*{flex:0 0 auto;-ms-flex:0 0 auto;width:14.2857142857%}.sd-row-cols-sm-8>*{flex:0 0 auto;-ms-flex:0 0 auto;width:12.5%}.sd-row-cols-sm-9>*{flex:0 0 auto;-ms-flex:0 0 auto;width:11.1111111111%}.sd-row-cols-sm-10>*{flex:0 0 auto;-ms-flex:0 0 auto;width:10%}.sd-row-cols-sm-11>*{flex:0 0 auto;-ms-flex:0 0 auto;width:9.0909090909%}.sd-row-cols-sm-12>*{flex:0 0 auto;-ms-flex:0 0 auto;width:8.3333333333%}}@media(min-width: 768px){.sd-col-md{flex:1 0 0%;-ms-flex:1 0 0%}.sd-row-cols-md-auto{flex:1 0 auto;-ms-flex:1 0 auto;width:100%}.sd-row-cols-md-1>*{flex:0 0 auto;-ms-flex:0 0 auto;width:100%}.sd-row-cols-md-2>*{flex:0 0 auto;-ms-flex:0 0 auto;width:50%}.sd-row-cols-md-3>*{flex:0 0 auto;-ms-flex:0 0 auto;width:33.3333333333%}.sd-row-cols-md-4>*{flex:0 0 auto;-ms-flex:0 0 auto;width:25%}.sd-row-cols-md-5>*{flex:0 0 auto;-ms-flex:0 0 auto;width:20%}.sd-row-cols-md-6>*{flex:0 0 auto;-ms-flex:0 0 auto;width:16.6666666667%}.sd-row-cols-md-7>*{flex:0 0 auto;-ms-flex:0 0 auto;width:14.2857142857%}.sd-row-cols-md-8>*{flex:0 0 auto;-ms-flex:0 0 auto;width:12.5%}.sd-row-cols-md-9>*{flex:0 0 auto;-ms-flex:0 0 auto;width:11.1111111111%}.sd-row-cols-md-10>*{flex:0 0 auto;-ms-flex:0 0 auto;width:10%}.sd-row-cols-md-11>*{flex:0 0 auto;-ms-flex:0 0 auto;width:9.0909090909%}.sd-row-cols-md-12>*{flex:0 0 auto;-ms-flex:0 0 auto;width:8.3333333333%}}@media(min-width: 992px){.sd-col-lg{flex:1 0 0%;-ms-flex:1 0 0%}.sd-row-cols-lg-auto{flex:1 0 auto;-ms-flex:1 0 auto;width:100%}.sd-row-cols-lg-1>*{flex:0 0 auto;-ms-flex:0 0 auto;width:100%}.sd-row-cols-lg-2>*{flex:0 0 auto;-ms-flex:0 0 auto;width:50%}.sd-row-cols-lg-3>*{flex:0 0 auto;-ms-flex:0 0 auto;width:33.3333333333%}.sd-row-cols-lg-4>*{flex:0 0 auto;-ms-flex:0 0 auto;width:25%}.sd-row-cols-lg-5>*{flex:0 0 auto;-ms-flex:0 0 auto;width:20%}.sd-row-cols-lg-6>*{flex:0 0 auto;-ms-flex:0 0 auto;width:16.6666666667%}.sd-row-cols-lg-7>*{flex:0 0 auto;-ms-flex:0 0 auto;width:14.2857142857%}.sd-row-cols-lg-8>*{flex:0 0 auto;-ms-flex:0 0 auto;width:12.5%}.sd-row-cols-lg-9>*{flex:0 0 auto;-ms-flex:0 0 auto;width:11.1111111111%}.sd-row-cols-lg-10>*{flex:0 0 auto;-ms-flex:0 0 auto;width:10%}.sd-row-cols-lg-11>*{flex:0 0 auto;-ms-flex:0 0 auto;width:9.0909090909%}.sd-row-cols-lg-12>*{flex:0 0 auto;-ms-flex:0 0 auto;width:8.3333333333%}}@media(min-width: 1200px){.sd-col-xl{flex:1 0 0%;-ms-flex:1 0 0%}.sd-row-cols-xl-auto{flex:1 0 auto;-ms-flex:1 0 auto;width:100%}.sd-row-cols-xl-1>*{flex:0 0 auto;-ms-flex:0 0 auto;width:100%}.sd-row-cols-xl-2>*{flex:0 0 auto;-ms-flex:0 0 auto;width:50%}.sd-row-cols-xl-3>*{flex:0 0 auto;-ms-flex:0 0 auto;width:33.3333333333%}.sd-row-cols-xl-4>*{flex:0 0 auto;-ms-flex:0 0 auto;width:25%}.sd-row-cols-xl-5>*{flex:0 0 auto;-ms-flex:0 0 auto;width:20%}.sd-row-cols-xl-6>*{flex:0 0 auto;-ms-flex:0 0 auto;width:16.6666666667%}.sd-row-cols-xl-7>*{flex:0 0 auto;-ms-flex:0 0 auto;width:14.2857142857%}.sd-row-cols-xl-8>*{flex:0 0 auto;-ms-flex:0 0 auto;width:12.5%}.sd-row-cols-xl-9>*{flex:0 0 auto;-ms-flex:0 0 auto;width:11.1111111111%}.sd-row-cols-xl-10>*{flex:0 0 auto;-ms-flex:0 0 auto;width:10%}.sd-row-cols-xl-11>*{flex:0 0 auto;-ms-flex:0 0 auto;width:9.0909090909%}.sd-row-cols-xl-12>*{flex:0 0 auto;-ms-flex:0 0 auto;width:8.3333333333%}}.sd-col-auto{flex:0 0 auto;-ms-flex:0 0 auto;width:auto}.sd-col-1{flex:0 0 auto;-ms-flex:0 0 auto;width:8.3333333333%}.sd-col-2{flex:0 0 auto;-ms-flex:0 0 auto;width:16.6666666667%}.sd-col-3{flex:0 0 auto;-ms-flex:0 0 auto;width:25%}.sd-col-4{flex:0 0 auto;-ms-flex:0 0 auto;width:33.3333333333%}.sd-col-5{flex:0 0 auto;-ms-flex:0 0 auto;width:41.6666666667%}.sd-col-6{flex:0 0 auto;-ms-flex:0 0 auto;width:50%}.sd-col-7{flex:0 0 auto;-ms-flex:0 0 auto;width:58.3333333333%}.sd-col-8{flex:0 0 auto;-ms-flex:0 0 auto;width:66.6666666667%}.sd-col-9{flex:0 0 auto;-ms-flex:0 0 auto;width:75%}.sd-col-10{flex:0 0 auto;-ms-flex:0 0 auto;width:83.3333333333%}.sd-col-11{flex:0 0 auto;-ms-flex:0 0 auto;width:91.6666666667%}.sd-col-12{flex:0 0 auto;-ms-flex:0 0 auto;width:100%}.sd-g-0,.sd-gy-0{--sd-gutter-y: 0}.sd-g-0,.sd-gx-0{--sd-gutter-x: 0}.sd-g-1,.sd-gy-1{--sd-gutter-y: 0.25rem}.sd-g-1,.sd-gx-1{--sd-gutter-x: 0.25rem}.sd-g-2,.sd-gy-2{--sd-gutter-y: 0.5rem}.sd-g-2,.sd-gx-2{--sd-gutter-x: 0.5rem}.sd-g-3,.sd-gy-3{--sd-gutter-y: 1rem}.sd-g-3,.sd-gx-3{--sd-gutter-x: 1rem}.sd-g-4,.sd-gy-4{--sd-gutter-y: 1.5rem}.sd-g-4,.sd-gx-4{--sd-gutter-x: 1.5rem}.sd-g-5,.sd-gy-5{--sd-gutter-y: 3rem}.sd-g-5,.sd-gx-5{--sd-gutter-x: 3rem}@media(min-width: 576px){.sd-col-sm-auto{-ms-flex:0 0 auto;flex:0 0 auto;width:auto}.sd-col-sm-1{-ms-flex:0 0 auto;flex:0 0 auto;width:8.3333333333%}.sd-col-sm-2{-ms-flex:0 0 auto;flex:0 0 auto;width:16.6666666667%}.sd-col-sm-3{-ms-flex:0 0 auto;flex:0 0 auto;width:25%}.sd-col-sm-4{-ms-flex:0 0 auto;flex:0 0 auto;width:33.3333333333%}.sd-col-sm-5{-ms-flex:0 0 auto;flex:0 0 auto;width:41.6666666667%}.sd-col-sm-6{-ms-flex:0 0 auto;flex:0 0 auto;width:50%}.sd-col-sm-7{-ms-flex:0 0 auto;flex:0 0 auto;width:58.3333333333%}.sd-col-sm-8{-ms-flex:0 0 auto;flex:0 0 auto;width:66.6666666667%}.sd-col-sm-9{-ms-flex:0 0 auto;flex:0 0 auto;width:75%}.sd-col-sm-10{-ms-flex:0 0 auto;flex:0 0 auto;width:83.3333333333%}.sd-col-sm-11{-ms-flex:0 0 auto;flex:0 0 auto;width:91.6666666667%}.sd-col-sm-12{-ms-flex:0 0 auto;flex:0 0 auto;width:100%}.sd-g-sm-0,.sd-gy-sm-0{--sd-gutter-y: 0}.sd-g-sm-0,.sd-gx-sm-0{--sd-gutter-x: 0}.sd-g-sm-1,.sd-gy-sm-1{--sd-gutter-y: 0.25rem}.sd-g-sm-1,.sd-gx-sm-1{--sd-gutter-x: 0.25rem}.sd-g-sm-2,.sd-gy-sm-2{--sd-gutter-y: 0.5rem}.sd-g-sm-2,.sd-gx-sm-2{--sd-gutter-x: 0.5rem}.sd-g-sm-3,.sd-gy-sm-3{--sd-gutter-y: 1rem}.sd-g-sm-3,.sd-gx-sm-3{--sd-gutter-x: 1rem}.sd-g-sm-4,.sd-gy-sm-4{--sd-gutter-y: 1.5rem}.sd-g-sm-4,.sd-gx-sm-4{--sd-gutter-x: 1.5rem}.sd-g-sm-5,.sd-gy-sm-5{--sd-gutter-y: 3rem}.sd-g-sm-5,.sd-gx-sm-5{--sd-gutter-x: 3rem}}@media(min-width: 768px){.sd-col-md-auto{-ms-flex:0 0 auto;flex:0 0 auto;width:auto}.sd-col-md-1{-ms-flex:0 0 auto;flex:0 0 auto;width:8.3333333333%}.sd-col-md-2{-ms-flex:0 0 auto;flex:0 0 auto;width:16.6666666667%}.sd-col-md-3{-ms-flex:0 0 auto;flex:0 0 auto;width:25%}.sd-col-md-4{-ms-flex:0 0 auto;flex:0 0 auto;width:33.3333333333%}.sd-col-md-5{-ms-flex:0 0 auto;flex:0 0 auto;width:41.6666666667%}.sd-col-md-6{-ms-flex:0 0 auto;flex:0 0 auto;width:50%}.sd-col-md-7{-ms-flex:0 0 auto;flex:0 0 auto;width:58.3333333333%}.sd-col-md-8{-ms-flex:0 0 auto;flex:0 0 auto;width:66.6666666667%}.sd-col-md-9{-ms-flex:0 0 auto;flex:0 0 auto;width:75%}.sd-col-md-10{-ms-flex:0 0 auto;flex:0 0 auto;width:83.3333333333%}.sd-col-md-11{-ms-flex:0 0 auto;flex:0 0 auto;width:91.6666666667%}.sd-col-md-12{-ms-flex:0 0 auto;flex:0 0 auto;width:100%}.sd-g-md-0,.sd-gy-md-0{--sd-gutter-y: 0}.sd-g-md-0,.sd-gx-md-0{--sd-gutter-x: 0}.sd-g-md-1,.sd-gy-md-1{--sd-gutter-y: 0.25rem}.sd-g-md-1,.sd-gx-md-1{--sd-gutter-x: 0.25rem}.sd-g-md-2,.sd-gy-md-2{--sd-gutter-y: 0.5rem}.sd-g-md-2,.sd-gx-md-2{--sd-gutter-x: 0.5rem}.sd-g-md-3,.sd-gy-md-3{--sd-gutter-y: 1rem}.sd-g-md-3,.sd-gx-md-3{--sd-gutter-x: 1rem}.sd-g-md-4,.sd-gy-md-4{--sd-gutter-y: 1.5rem}.sd-g-md-4,.sd-gx-md-4{--sd-gutter-x: 1.5rem}.sd-g-md-5,.sd-gy-md-5{--sd-gutter-y: 3rem}.sd-g-md-5,.sd-gx-md-5{--sd-gutter-x: 3rem}}@media(min-width: 992px){.sd-col-lg-auto{-ms-flex:0 0 auto;flex:0 0 auto;width:auto}.sd-col-lg-1{-ms-flex:0 0 auto;flex:0 0 auto;width:8.3333333333%}.sd-col-lg-2{-ms-flex:0 0 auto;flex:0 0 auto;width:16.6666666667%}.sd-col-lg-3{-ms-flex:0 0 auto;flex:0 0 auto;width:25%}.sd-col-lg-4{-ms-flex:0 0 auto;flex:0 0 auto;width:33.3333333333%}.sd-col-lg-5{-ms-flex:0 0 auto;flex:0 0 auto;width:41.6666666667%}.sd-col-lg-6{-ms-flex:0 0 auto;flex:0 0 auto;width:50%}.sd-col-lg-7{-ms-flex:0 0 auto;flex:0 0 auto;width:58.3333333333%}.sd-col-lg-8{-ms-flex:0 0 auto;flex:0 0 auto;width:66.6666666667%}.sd-col-lg-9{-ms-flex:0 0 auto;flex:0 0 auto;width:75%}.sd-col-lg-10{-ms-flex:0 0 auto;flex:0 0 auto;width:83.3333333333%}.sd-col-lg-11{-ms-flex:0 0 auto;flex:0 0 auto;width:91.6666666667%}.sd-col-lg-12{-ms-flex:0 0 auto;flex:0 0 auto;width:100%}.sd-g-lg-0,.sd-gy-lg-0{--sd-gutter-y: 0}.sd-g-lg-0,.sd-gx-lg-0{--sd-gutter-x: 0}.sd-g-lg-1,.sd-gy-lg-1{--sd-gutter-y: 0.25rem}.sd-g-lg-1,.sd-gx-lg-1{--sd-gutter-x: 0.25rem}.sd-g-lg-2,.sd-gy-lg-2{--sd-gutter-y: 0.5rem}.sd-g-lg-2,.sd-gx-lg-2{--sd-gutter-x: 0.5rem}.sd-g-lg-3,.sd-gy-lg-3{--sd-gutter-y: 1rem}.sd-g-lg-3,.sd-gx-lg-3{--sd-gutter-x: 1rem}.sd-g-lg-4,.sd-gy-lg-4{--sd-gutter-y: 1.5rem}.sd-g-lg-4,.sd-gx-lg-4{--sd-gutter-x: 1.5rem}.sd-g-lg-5,.sd-gy-lg-5{--sd-gutter-y: 3rem}.sd-g-lg-5,.sd-gx-lg-5{--sd-gutter-x: 3rem}}@media(min-width: 1200px){.sd-col-xl-auto{-ms-flex:0 0 auto;flex:0 0 auto;width:auto}.sd-col-xl-1{-ms-flex:0 0 auto;flex:0 0 auto;width:8.3333333333%}.sd-col-xl-2{-ms-flex:0 0 auto;flex:0 0 auto;width:16.6666666667%}.sd-col-xl-3{-ms-flex:0 0 auto;flex:0 0 auto;width:25%}.sd-col-xl-4{-ms-flex:0 0 auto;flex:0 0 auto;width:33.3333333333%}.sd-col-xl-5{-ms-flex:0 0 auto;flex:0 0 auto;width:41.6666666667%}.sd-col-xl-6{-ms-flex:0 0 auto;flex:0 0 auto;width:50%}.sd-col-xl-7{-ms-flex:0 0 auto;flex:0 0 auto;width:58.3333333333%}.sd-col-xl-8{-ms-flex:0 0 auto;flex:0 0 auto;width:66.6666666667%}.sd-col-xl-9{-ms-flex:0 0 auto;flex:0 0 auto;width:75%}.sd-col-xl-10{-ms-flex:0 0 auto;flex:0 0 auto;width:83.3333333333%}.sd-col-xl-11{-ms-flex:0 0 auto;flex:0 0 auto;width:91.6666666667%}.sd-col-xl-12{-ms-flex:0 0 auto;flex:0 0 auto;width:100%}.sd-g-xl-0,.sd-gy-xl-0{--sd-gutter-y: 0}.sd-g-xl-0,.sd-gx-xl-0{--sd-gutter-x: 0}.sd-g-xl-1,.sd-gy-xl-1{--sd-gutter-y: 0.25rem}.sd-g-xl-1,.sd-gx-xl-1{--sd-gutter-x: 0.25rem}.sd-g-xl-2,.sd-gy-xl-2{--sd-gutter-y: 0.5rem}.sd-g-xl-2,.sd-gx-xl-2{--sd-gutter-x: 0.5rem}.sd-g-xl-3,.sd-gy-xl-3{--sd-gutter-y: 1rem}.sd-g-xl-3,.sd-gx-xl-3{--sd-gutter-x: 1rem}.sd-g-xl-4,.sd-gy-xl-4{--sd-gutter-y: 1.5rem}.sd-g-xl-4,.sd-gx-xl-4{--sd-gutter-x: 1.5rem}.sd-g-xl-5,.sd-gy-xl-5{--sd-gutter-y: 3rem}.sd-g-xl-5,.sd-gx-xl-5{--sd-gutter-x: 3rem}}.sd-flex-row-reverse{flex-direction:row-reverse !important}details.sd-dropdown{position:relative;font-size:var(--sd-fontsize-dropdown)}details.sd-dropdown:hover{cursor:pointer}details.sd-dropdown .sd-summary-content{cursor:default}details.sd-dropdown summary.sd-summary-title{padding:.5em .6em .5em 1em;font-size:var(--sd-fontsize-dropdown-title);font-weight:var(--sd-fontweight-dropdown-title);user-select:none;-moz-user-select:none;-ms-user-select:none;-webkit-user-select:none;list-style:none;display:inline-flex;justify-content:space-between}details.sd-dropdown summary.sd-summary-title::-webkit-details-marker{display:none}details.sd-dropdown summary.sd-summary-title:focus{outline:none}details.sd-dropdown summary.sd-summary-title .sd-summary-icon{margin-right:.6em;display:inline-flex;align-items:center}details.sd-dropdown summary.sd-summary-title .sd-summary-icon svg{opacity:.8}details.sd-dropdown summary.sd-summary-title .sd-summary-text{flex-grow:1;line-height:1.5;padding-right:.5rem}details.sd-dropdown summary.sd-summary-title .sd-summary-state-marker{pointer-events:none;display:inline-flex;align-items:center}details.sd-dropdown summary.sd-summary-title .sd-summary-state-marker svg{opacity:.6}details.sd-dropdown summary.sd-summary-title:hover .sd-summary-state-marker svg{opacity:1;transform:scale(1.1)}details.sd-dropdown[open] summary .sd-octicon.no-title{visibility:hidden}details.sd-dropdown .sd-summary-chevron-right{transition:.25s}details.sd-dropdown[open]>.sd-summary-title .sd-summary-chevron-right{transform:rotate(90deg)}details.sd-dropdown[open]>.sd-summary-title .sd-summary-chevron-down{transform:rotate(180deg)}details.sd-dropdown:not([open]).sd-card{border:none}details.sd-dropdown:not([open])>.sd-card-header{border:1px solid var(--sd-color-card-border);border-radius:.25rem}details.sd-dropdown.sd-fade-in[open] summary~*{-moz-animation:sd-fade-in .5s ease-in-out;-webkit-animation:sd-fade-in .5s ease-in-out;animation:sd-fade-in .5s ease-in-out}details.sd-dropdown.sd-fade-in-slide-down[open] summary~*{-moz-animation:sd-fade-in .5s ease-in-out,sd-slide-down .5s ease-in-out;-webkit-animation:sd-fade-in .5s ease-in-out,sd-slide-down .5s ease-in-out;animation:sd-fade-in .5s ease-in-out,sd-slide-down .5s ease-in-out}.sd-col>.sd-dropdown{width:100%}.sd-summary-content>.sd-tab-set:first-child{margin-top:0}@keyframes sd-fade-in{0%{opacity:0}100%{opacity:1}}@keyframes sd-slide-down{0%{transform:translate(0, -10px)}100%{transform:translate(0, 0)}}.sd-tab-set{border-radius:.125rem;display:flex;flex-wrap:wrap;margin:1em 0;position:relative}.sd-tab-set>input{opacity:0;position:absolute}.sd-tab-set>input:checked+label{border-color:var(--sd-color-tabs-underline-active);color:var(--sd-color-tabs-label-active)}.sd-tab-set>input:checked+label+.sd-tab-content{display:block}.sd-tab-set>input:not(:checked)+label:hover{color:var(--sd-color-tabs-label-hover);border-color:var(--sd-color-tabs-underline-hover)}.sd-tab-set>input:focus+label{outline-style:auto}.sd-tab-set>input:not(.focus-visible)+label{outline:none;-webkit-tap-highlight-color:transparent}.sd-tab-set>label{border-bottom:.125rem solid transparent;margin-bottom:0;color:var(--sd-color-tabs-label-inactive);border-color:var(--sd-color-tabs-underline-inactive);cursor:pointer;font-size:var(--sd-fontsize-tabs-label);font-weight:700;padding:1em 1.25em .5em;transition:color 250ms;width:auto;z-index:1}html .sd-tab-set>label:hover{color:var(--sd-color-tabs-label-active)}.sd-col>.sd-tab-set{width:100%}.sd-tab-content{box-shadow:0 -0.0625rem var(--sd-color-tabs-overline),0 .0625rem var(--sd-color-tabs-underline);display:none;order:99;padding-bottom:.75rem;padding-top:.75rem;width:100%}.sd-tab-content>:first-child{margin-top:0 !important}.sd-tab-content>:last-child{margin-bottom:0 !important}.sd-tab-content>.sd-tab-set{margin:0}.sd-sphinx-override,.sd-sphinx-override *{-moz-box-sizing:border-box;-webkit-box-sizing:border-box;box-sizing:border-box}.sd-sphinx-override p{margin-top:0}:root{--sd-color-primary: #0071bc;--sd-color-secondary: #6c757d;--sd-color-success: #28a745;--sd-color-info: #17a2b8;--sd-color-warning: #f0b37e;--sd-color-danger: #dc3545;--sd-color-light: #f8f9fa;--sd-color-muted: #6c757d;--sd-color-dark: #212529;--sd-color-black: black;--sd-color-white: white;--sd-color-primary-highlight: #0060a0;--sd-color-secondary-highlight: #5c636a;--sd-color-success-highlight: #228e3b;--sd-color-info-highlight: #148a9c;--sd-color-warning-highlight: #cc986b;--sd-color-danger-highlight: #bb2d3b;--sd-color-light-highlight: #d3d4d5;--sd-color-muted-highlight: #5c636a;--sd-color-dark-highlight: #1c1f23;--sd-color-black-highlight: black;--sd-color-white-highlight: #d9d9d9;--sd-color-primary-bg: rgba(0, 113, 188, 0.2);--sd-color-secondary-bg: rgba(108, 117, 125, 0.2);--sd-color-success-bg: rgba(40, 167, 69, 0.2);--sd-color-info-bg: rgba(23, 162, 184, 0.2);--sd-color-warning-bg: rgba(240, 179, 126, 0.2);--sd-color-danger-bg: rgba(220, 53, 69, 0.2);--sd-color-light-bg: rgba(248, 249, 250, 0.2);--sd-color-muted-bg: rgba(108, 117, 125, 0.2);--sd-color-dark-bg: rgba(33, 37, 41, 0.2);--sd-color-black-bg: rgba(0, 0, 0, 0.2);--sd-color-white-bg: rgba(255, 255, 255, 0.2);--sd-color-primary-text: #fff;--sd-color-secondary-text: #fff;--sd-color-success-text: #fff;--sd-color-info-text: #fff;--sd-color-warning-text: #212529;--sd-color-danger-text: #fff;--sd-color-light-text: #212529;--sd-color-muted-text: #fff;--sd-color-dark-text: #fff;--sd-color-black-text: #fff;--sd-color-white-text: #212529;--sd-color-shadow: rgba(0, 0, 0, 0.15);--sd-color-card-border: rgba(0, 0, 0, 0.125);--sd-color-card-border-hover: hsla(231, 99%, 66%, 1);--sd-color-card-background: transparent;--sd-color-card-text: inherit;--sd-color-card-header: transparent;--sd-color-card-footer: transparent;--sd-color-tabs-label-active: hsla(231, 99%, 66%, 1);--sd-color-tabs-label-hover: hsla(231, 99%, 66%, 1);--sd-color-tabs-label-inactive: hsl(0, 0%, 66%);--sd-color-tabs-underline-active: hsla(231, 99%, 66%, 1);--sd-color-tabs-underline-hover: rgba(178, 206, 245, 0.62);--sd-color-tabs-underline-inactive: transparent;--sd-color-tabs-overline: rgb(222, 222, 222);--sd-color-tabs-underline: rgb(222, 222, 222);--sd-fontsize-tabs-label: 1rem;--sd-fontsize-dropdown: inherit;--sd-fontsize-dropdown-title: 1rem;--sd-fontweight-dropdown-title: 700} diff --git a/_static/basic.css b/_static/basic.css new file mode 100644 index 0000000..f316efc --- /dev/null +++ b/_static/basic.css @@ -0,0 +1,925 @@ +/* + * basic.css + * ~~~~~~~~~ + * + * Sphinx stylesheet -- basic theme. + * + * :copyright: Copyright 2007-2024 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +/* -- main layout ----------------------------------------------------------- */ + +div.clearer { + clear: both; +} + +div.section::after { + display: block; + content: ''; + clear: left; +} + +/* -- relbar ---------------------------------------------------------------- */ + +div.related { + width: 100%; + font-size: 90%; +} + +div.related h3 { + display: none; +} + +div.related ul { + margin: 0; + padding: 0 0 0 10px; + list-style: none; +} + +div.related li { + display: inline; +} + +div.related li.right { + float: right; + margin-right: 5px; +} + +/* -- sidebar --------------------------------------------------------------- */ + +div.sphinxsidebarwrapper { + padding: 10px 5px 0 10px; +} + +div.sphinxsidebar { + float: left; + width: 230px; + margin-left: -100%; + font-size: 90%; + word-wrap: break-word; + overflow-wrap : break-word; +} + +div.sphinxsidebar ul { + list-style: none; +} + +div.sphinxsidebar ul ul, +div.sphinxsidebar ul.want-points { + margin-left: 20px; + list-style: square; +} + +div.sphinxsidebar ul ul { + margin-top: 0; + margin-bottom: 0; +} + +div.sphinxsidebar form { + margin-top: 10px; +} + +div.sphinxsidebar input { + border: 1px solid #98dbcc; + font-family: sans-serif; + font-size: 1em; +} + +div.sphinxsidebar #searchbox form.search { + overflow: hidden; +} + +div.sphinxsidebar #searchbox input[type="text"] { + float: left; + width: 80%; + padding: 0.25em; + box-sizing: border-box; +} + +div.sphinxsidebar #searchbox input[type="submit"] { + float: left; + width: 20%; + border-left: none; + padding: 0.25em; + box-sizing: border-box; +} + + +img { + border: 0; + max-width: 100%; +} + +/* -- search page ----------------------------------------------------------- */ + +ul.search { + margin: 10px 0 0 20px; + padding: 0; +} + +ul.search li { + padding: 5px 0 5px 20px; + background-image: url(file.png); + background-repeat: no-repeat; + background-position: 0 7px; +} + +ul.search li a { + font-weight: bold; +} + +ul.search li p.context { + color: #888; + margin: 2px 0 0 30px; + text-align: left; +} + +ul.keywordmatches li.goodmatch a { + font-weight: bold; +} + +/* -- index page ------------------------------------------------------------ */ + +table.contentstable { + width: 90%; + margin-left: auto; + margin-right: auto; +} + +table.contentstable p.biglink { + line-height: 150%; +} + +a.biglink { + font-size: 1.3em; +} + +span.linkdescr { + font-style: italic; + padding-top: 5px; + font-size: 90%; +} + +/* -- general index --------------------------------------------------------- */ + +table.indextable { + width: 100%; +} + +table.indextable td { + text-align: left; + vertical-align: top; +} + +table.indextable ul { + margin-top: 0; + margin-bottom: 0; + list-style-type: none; +} + +table.indextable > tbody > tr > td > ul { + padding-left: 0em; +} + +table.indextable tr.pcap { + height: 10px; +} + +table.indextable tr.cap { + margin-top: 10px; + background-color: #f2f2f2; +} + +img.toggler { + margin-right: 3px; + margin-top: 3px; + cursor: pointer; +} + +div.modindex-jumpbox { + border-top: 1px solid #ddd; + border-bottom: 1px solid #ddd; + margin: 1em 0 1em 0; + padding: 0.4em; +} + +div.genindex-jumpbox { + border-top: 1px solid #ddd; + border-bottom: 1px solid #ddd; + margin: 1em 0 1em 0; + padding: 0.4em; +} + +/* -- domain module index --------------------------------------------------- */ + +table.modindextable td { + padding: 2px; + border-collapse: collapse; +} + +/* -- general body styles --------------------------------------------------- */ + +div.body { + min-width: 360px; + max-width: 800px; +} + +div.body p, div.body dd, div.body li, div.body blockquote { + -moz-hyphens: auto; + -ms-hyphens: auto; + -webkit-hyphens: auto; + hyphens: auto; +} + +a.headerlink { + visibility: hidden; +} + +a:visited { + color: #551A8B; +} + +h1:hover > a.headerlink, +h2:hover > a.headerlink, +h3:hover > a.headerlink, +h4:hover > a.headerlink, +h5:hover > a.headerlink, +h6:hover > a.headerlink, +dt:hover > a.headerlink, +caption:hover > a.headerlink, +p.caption:hover > a.headerlink, +div.code-block-caption:hover > a.headerlink { + visibility: visible; +} + +div.body p.caption { + text-align: inherit; +} + +div.body td { + text-align: left; +} + +.first { + margin-top: 0 !important; +} + +p.rubric { + margin-top: 30px; + font-weight: bold; +} + +img.align-left, figure.align-left, .figure.align-left, object.align-left { + clear: left; + float: left; + margin-right: 1em; +} + +img.align-right, figure.align-right, .figure.align-right, object.align-right { + clear: right; + float: right; + margin-left: 1em; +} + +img.align-center, figure.align-center, .figure.align-center, object.align-center { + display: block; + margin-left: auto; + margin-right: auto; +} + +img.align-default, figure.align-default, .figure.align-default { + display: block; + margin-left: auto; + margin-right: auto; +} + +.align-left { + text-align: left; +} + +.align-center { + text-align: center; +} + +.align-default { + text-align: center; +} + +.align-right { + text-align: right; +} + +/* -- sidebars -------------------------------------------------------------- */ + +div.sidebar, +aside.sidebar { + margin: 0 0 0.5em 1em; + border: 1px solid #ddb; + padding: 7px; + background-color: #ffe; + width: 40%; + float: right; + clear: right; + overflow-x: auto; +} + +p.sidebar-title { + font-weight: bold; +} + +nav.contents, +aside.topic, +div.admonition, div.topic, blockquote { + clear: left; +} + +/* -- topics ---------------------------------------------------------------- */ + +nav.contents, +aside.topic, +div.topic { + border: 1px solid #ccc; + padding: 7px; + margin: 10px 0 10px 0; +} + +p.topic-title { + font-size: 1.1em; + font-weight: bold; + margin-top: 10px; +} + +/* -- admonitions ----------------------------------------------------------- */ + +div.admonition { + margin-top: 10px; + margin-bottom: 10px; + padding: 7px; +} + +div.admonition dt { + font-weight: bold; +} + +p.admonition-title { + margin: 0px 10px 5px 0px; + font-weight: bold; +} + +div.body p.centered { + text-align: center; + margin-top: 25px; +} + +/* -- content of sidebars/topics/admonitions -------------------------------- */ + +div.sidebar > :last-child, +aside.sidebar > :last-child, +nav.contents > :last-child, +aside.topic > :last-child, +div.topic > :last-child, +div.admonition > :last-child { + margin-bottom: 0; +} + +div.sidebar::after, +aside.sidebar::after, +nav.contents::after, +aside.topic::after, +div.topic::after, +div.admonition::after, +blockquote::after { + display: block; + content: ''; + clear: both; +} + +/* -- tables ---------------------------------------------------------------- */ + +table.docutils { + margin-top: 10px; + margin-bottom: 10px; + border: 0; + border-collapse: collapse; +} + +table.align-center { + margin-left: auto; + margin-right: auto; +} + +table.align-default { + margin-left: auto; + margin-right: auto; +} + +table caption span.caption-number { + font-style: italic; +} + +table caption span.caption-text { +} + +table.docutils td, table.docutils th { + padding: 1px 8px 1px 5px; + border-top: 0; + border-left: 0; + border-right: 0; + border-bottom: 1px solid #aaa; +} + +th { + text-align: left; + padding-right: 5px; +} + +table.citation { + border-left: solid 1px gray; + margin-left: 1px; +} + +table.citation td { + border-bottom: none; +} + +th > :first-child, +td > :first-child { + margin-top: 0px; +} + +th > :last-child, +td > :last-child { + margin-bottom: 0px; +} + +/* -- figures --------------------------------------------------------------- */ + +div.figure, figure { + margin: 0.5em; + padding: 0.5em; +} + +div.figure p.caption, figcaption { + padding: 0.3em; +} + +div.figure p.caption span.caption-number, +figcaption span.caption-number { + font-style: italic; +} + +div.figure p.caption span.caption-text, +figcaption span.caption-text { +} + +/* -- field list styles ----------------------------------------------------- */ + +table.field-list td, table.field-list th { + border: 0 !important; +} + +.field-list ul { + margin: 0; + padding-left: 1em; +} + +.field-list p { + margin: 0; +} + +.field-name { + -moz-hyphens: manual; + -ms-hyphens: manual; + -webkit-hyphens: manual; + hyphens: manual; +} + +/* -- hlist styles ---------------------------------------------------------- */ + +table.hlist { + margin: 1em 0; +} + +table.hlist td { + vertical-align: top; +} + +/* -- object description styles --------------------------------------------- */ + +.sig { + font-family: 'Consolas', 'Menlo', 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', monospace; +} + +.sig-name, code.descname { + background-color: transparent; + font-weight: bold; +} + +.sig-name { + font-size: 1.1em; +} + +code.descname { + font-size: 1.2em; +} + +.sig-prename, code.descclassname { + background-color: transparent; +} + +.optional { + font-size: 1.3em; +} + +.sig-paren { + font-size: larger; +} + +.sig-param.n { + font-style: italic; +} + +/* C++ specific styling */ + +.sig-inline.c-texpr, +.sig-inline.cpp-texpr { + font-family: unset; +} + +.sig.c .k, .sig.c .kt, +.sig.cpp .k, .sig.cpp .kt { + color: #0033B3; +} + +.sig.c .m, +.sig.cpp .m { + color: #1750EB; +} + +.sig.c .s, .sig.c .sc, +.sig.cpp .s, .sig.cpp .sc { + color: #067D17; +} + + +/* -- other body styles ----------------------------------------------------- */ + +ol.arabic { + list-style: decimal; +} + +ol.loweralpha { + list-style: lower-alpha; +} + +ol.upperalpha { + list-style: upper-alpha; +} + +ol.lowerroman { + list-style: lower-roman; +} + +ol.upperroman { + list-style: upper-roman; +} + +:not(li) > ol > li:first-child > :first-child, +:not(li) > ul > li:first-child > :first-child { + margin-top: 0px; +} + +:not(li) > ol > li:last-child > :last-child, +:not(li) > ul > li:last-child > :last-child { + margin-bottom: 0px; +} + +ol.simple ol p, +ol.simple ul p, +ul.simple ol p, +ul.simple ul p { + margin-top: 0; +} + +ol.simple > li:not(:first-child) > p, +ul.simple > li:not(:first-child) > p { + margin-top: 0; +} + +ol.simple p, +ul.simple p { + margin-bottom: 0; +} + +aside.footnote > span, +div.citation > span { + float: left; +} +aside.footnote > span:last-of-type, +div.citation > span:last-of-type { + padding-right: 0.5em; +} +aside.footnote > p { + margin-left: 2em; +} +div.citation > p { + margin-left: 4em; +} +aside.footnote > p:last-of-type, +div.citation > p:last-of-type { + margin-bottom: 0em; +} +aside.footnote > p:last-of-type:after, +div.citation > p:last-of-type:after { + content: ""; + clear: both; +} + +dl.field-list { + display: grid; + grid-template-columns: fit-content(30%) auto; +} + +dl.field-list > dt { + font-weight: bold; + word-break: break-word; + padding-left: 0.5em; + padding-right: 5px; +} + +dl.field-list > dd { + padding-left: 0.5em; + margin-top: 0em; + margin-left: 0em; + margin-bottom: 0em; +} + +dl { + margin-bottom: 15px; +} + +dd > :first-child { + margin-top: 0px; +} + +dd ul, dd table { + margin-bottom: 10px; +} + +dd { + margin-top: 3px; + margin-bottom: 10px; + margin-left: 30px; +} + +.sig dd { + margin-top: 0px; + margin-bottom: 0px; +} + +.sig dl { + margin-top: 0px; + margin-bottom: 0px; +} + +dl > dd:last-child, +dl > dd:last-child > :last-child { + margin-bottom: 0; +} + +dt:target, span.highlighted { + background-color: #fbe54e; +} + +rect.highlighted { + fill: #fbe54e; +} + +dl.glossary dt { + font-weight: bold; + font-size: 1.1em; +} + +.versionmodified { + font-style: italic; +} + +.system-message { + background-color: #fda; + padding: 5px; + border: 3px solid red; +} + +.footnote:target { + background-color: #ffa; +} + +.line-block { + display: block; + margin-top: 1em; + margin-bottom: 1em; +} + +.line-block .line-block { + margin-top: 0; + margin-bottom: 0; + margin-left: 1.5em; +} + +.guilabel, .menuselection { + font-family: sans-serif; +} + +.accelerator { + text-decoration: underline; +} + +.classifier { + font-style: oblique; +} + +.classifier:before { + font-style: normal; + margin: 0 0.5em; + content: ":"; + display: inline-block; +} + +abbr, acronym { + border-bottom: dotted 1px; + cursor: help; +} + +.translated { + background-color: rgba(207, 255, 207, 0.2) +} + +.untranslated { + background-color: rgba(255, 207, 207, 0.2) +} + +/* -- code displays --------------------------------------------------------- */ + +pre { + overflow: auto; + overflow-y: hidden; /* fixes display issues on Chrome browsers */ +} + +pre, div[class*="highlight-"] { + clear: both; +} + +span.pre { + -moz-hyphens: none; + -ms-hyphens: none; + -webkit-hyphens: none; + hyphens: none; + white-space: nowrap; +} + +div[class*="highlight-"] { + margin: 1em 0; +} + +td.linenos pre { + border: 0; + background-color: transparent; + color: #aaa; +} + +table.highlighttable { + display: block; +} + +table.highlighttable tbody { + display: block; +} + +table.highlighttable tr { + display: flex; +} + +table.highlighttable td { + margin: 0; + padding: 0; +} + +table.highlighttable td.linenos { + padding-right: 0.5em; +} + +table.highlighttable td.code { + flex: 1; + overflow: hidden; +} + +.highlight .hll { + display: block; +} + +div.highlight pre, +table.highlighttable pre { + margin: 0; +} + +div.code-block-caption + div { + margin-top: 0; +} + +div.code-block-caption { + margin-top: 1em; + padding: 2px 5px; + font-size: small; +} + +div.code-block-caption code { + background-color: transparent; +} + +table.highlighttable td.linenos, +span.linenos, +div.highlight span.gp { /* gp: Generic.Prompt */ + user-select: none; + -webkit-user-select: text; /* Safari fallback only */ + -webkit-user-select: none; /* Chrome/Safari */ + -moz-user-select: none; /* Firefox */ + -ms-user-select: none; /* IE10+ */ +} + +div.code-block-caption span.caption-number { + padding: 0.1em 0.3em; + font-style: italic; +} + +div.code-block-caption span.caption-text { +} + +div.literal-block-wrapper { + margin: 1em 0; +} + +code.xref, a code { + background-color: transparent; + font-weight: bold; +} + +h1 code, h2 code, h3 code, h4 code, h5 code, h6 code { + background-color: transparent; +} + +.viewcode-link { + float: right; +} + +.viewcode-back { + float: right; + font-family: sans-serif; +} + +div.viewcode-block:target { + margin: -1px -10px; + padding: 0 10px; +} + +/* -- math display ---------------------------------------------------------- */ + +img.math { + vertical-align: middle; +} + +div.body div.math p { + text-align: center; +} + +span.eqno { + float: right; +} + +span.eqno a.headerlink { + position: absolute; + z-index: 1; +} + +div.math:hover a.headerlink { + visibility: visible; +} + +/* -- printout stylesheet --------------------------------------------------- */ + +@media print { + div.document, + div.documentwrapper, + div.bodywrapper { + margin: 0 !important; + width: 100%; + } + + div.sphinxsidebar, + div.related, + div.footer, + #top-link { + display: none; + } +} \ No newline at end of file diff --git a/_static/check-solid.svg b/_static/check-solid.svg new file mode 100644 index 0000000..92fad4b --- /dev/null +++ b/_static/check-solid.svg @@ -0,0 +1,4 @@ + + + + diff --git a/_static/clipboard.min.js b/_static/clipboard.min.js new file mode 100644 index 0000000..54b3c46 --- /dev/null +++ b/_static/clipboard.min.js @@ -0,0 +1,7 @@ +/*! + * clipboard.js v2.0.8 + * https://clipboardjs.com/ + * + * Licensed MIT © Zeno Rocha + */ +!function(t,e){"object"==typeof exports&&"object"==typeof module?module.exports=e():"function"==typeof define&&define.amd?define([],e):"object"==typeof exports?exports.ClipboardJS=e():t.ClipboardJS=e()}(this,function(){return n={686:function(t,e,n){"use strict";n.d(e,{default:function(){return o}});var e=n(279),i=n.n(e),e=n(370),u=n.n(e),e=n(817),c=n.n(e);function a(t){try{return document.execCommand(t)}catch(t){return}}var f=function(t){t=c()(t);return a("cut"),t};var l=function(t){var e,n,o,r=1 + + + + diff --git a/_static/copybutton.css b/_static/copybutton.css new file mode 100644 index 0000000..f1916ec --- /dev/null +++ b/_static/copybutton.css @@ -0,0 +1,94 @@ +/* Copy buttons */ +button.copybtn { + position: absolute; + display: flex; + top: .3em; + right: .3em; + width: 1.7em; + height: 1.7em; + opacity: 0; + transition: opacity 0.3s, border .3s, background-color .3s; + user-select: none; + padding: 0; + border: none; + outline: none; + border-radius: 0.4em; + /* The colors that GitHub uses */ + border: #1b1f2426 1px solid; + background-color: #f6f8fa; + color: #57606a; +} + +button.copybtn.success { + border-color: #22863a; + color: #22863a; +} + +button.copybtn svg { + stroke: currentColor; + width: 1.5em; + height: 1.5em; + padding: 0.1em; +} + +div.highlight { + position: relative; +} + +/* Show the copybutton */ +.highlight:hover button.copybtn, button.copybtn.success { + opacity: 1; +} + +.highlight button.copybtn:hover { + background-color: rgb(235, 235, 235); +} + +.highlight button.copybtn:active { + background-color: rgb(187, 187, 187); +} + +/** + * A minimal CSS-only tooltip copied from: + * https://codepen.io/mildrenben/pen/rVBrpK + * + * To use, write HTML like the following: + * + *

Short

+ */ + .o-tooltip--left { + position: relative; + } + + .o-tooltip--left:after { + opacity: 0; + visibility: hidden; + position: absolute; + content: attr(data-tooltip); + padding: .2em; + font-size: .8em; + left: -.2em; + background: grey; + color: white; + white-space: nowrap; + z-index: 2; + border-radius: 2px; + transform: translateX(-102%) translateY(0); + transition: opacity 0.2s cubic-bezier(0.64, 0.09, 0.08, 1), transform 0.2s cubic-bezier(0.64, 0.09, 0.08, 1); +} + +.o-tooltip--left:hover:after { + display: block; + opacity: 1; + visibility: visible; + transform: translateX(-100%) translateY(0); + transition: opacity 0.2s cubic-bezier(0.64, 0.09, 0.08, 1), transform 0.2s cubic-bezier(0.64, 0.09, 0.08, 1); + transition-delay: .5s; +} + +/* By default the copy button shouldn't show up when printing a page */ +@media print { + button.copybtn { + display: none; + } +} diff --git a/_static/copybutton.js b/_static/copybutton.js new file mode 100644 index 0000000..2ea7ff3 --- /dev/null +++ b/_static/copybutton.js @@ -0,0 +1,248 @@ +// Localization support +const messages = { + 'en': { + 'copy': 'Copy', + 'copy_to_clipboard': 'Copy to clipboard', + 'copy_success': 'Copied!', + 'copy_failure': 'Failed to copy', + }, + 'es' : { + 'copy': 'Copiar', + 'copy_to_clipboard': 'Copiar al portapapeles', + 'copy_success': '¡Copiado!', + 'copy_failure': 'Error al copiar', + }, + 'de' : { + 'copy': 'Kopieren', + 'copy_to_clipboard': 'In die Zwischenablage kopieren', + 'copy_success': 'Kopiert!', + 'copy_failure': 'Fehler beim Kopieren', + }, + 'fr' : { + 'copy': 'Copier', + 'copy_to_clipboard': 'Copier dans le presse-papier', + 'copy_success': 'Copié !', + 'copy_failure': 'Échec de la copie', + }, + 'ru': { + 'copy': 'Скопировать', + 'copy_to_clipboard': 'Скопировать в буфер', + 'copy_success': 'Скопировано!', + 'copy_failure': 'Не удалось скопировать', + }, + 'zh-CN': { + 'copy': '复制', + 'copy_to_clipboard': '复制到剪贴板', + 'copy_success': '复制成功!', + 'copy_failure': '复制失败', + }, + 'it' : { + 'copy': 'Copiare', + 'copy_to_clipboard': 'Copiato negli appunti', + 'copy_success': 'Copiato!', + 'copy_failure': 'Errore durante la copia', + } +} + +let locale = 'en' +if( document.documentElement.lang !== undefined + && messages[document.documentElement.lang] !== undefined ) { + locale = document.documentElement.lang +} + +let doc_url_root = DOCUMENTATION_OPTIONS.URL_ROOT; +if (doc_url_root == '#') { + doc_url_root = ''; +} + +/** + * SVG files for our copy buttons + */ +let iconCheck = ` + ${messages[locale]['copy_success']} + + +` + +// If the user specified their own SVG use that, otherwise use the default +let iconCopy = ``; +if (!iconCopy) { + iconCopy = ` + ${messages[locale]['copy_to_clipboard']} + + + +` +} + +/** + * Set up copy/paste for code blocks + */ + +const runWhenDOMLoaded = cb => { + if (document.readyState != 'loading') { + cb() + } else if (document.addEventListener) { + document.addEventListener('DOMContentLoaded', cb) + } else { + document.attachEvent('onreadystatechange', function() { + if (document.readyState == 'complete') cb() + }) + } +} + +const codeCellId = index => `codecell${index}` + +// Clears selected text since ClipboardJS will select the text when copying +const clearSelection = () => { + if (window.getSelection) { + window.getSelection().removeAllRanges() + } else if (document.selection) { + document.selection.empty() + } +} + +// Changes tooltip text for a moment, then changes it back +// We want the timeout of our `success` class to be a bit shorter than the +// tooltip and icon change, so that we can hide the icon before changing back. +var timeoutIcon = 2000; +var timeoutSuccessClass = 1500; + +const temporarilyChangeTooltip = (el, oldText, newText) => { + el.setAttribute('data-tooltip', newText) + el.classList.add('success') + // Remove success a little bit sooner than we change the tooltip + // So that we can use CSS to hide the copybutton first + setTimeout(() => el.classList.remove('success'), timeoutSuccessClass) + setTimeout(() => el.setAttribute('data-tooltip', oldText), timeoutIcon) +} + +// Changes the copy button icon for two seconds, then changes it back +const temporarilyChangeIcon = (el) => { + el.innerHTML = iconCheck; + setTimeout(() => {el.innerHTML = iconCopy}, timeoutIcon) +} + +const addCopyButtonToCodeCells = () => { + // If ClipboardJS hasn't loaded, wait a bit and try again. This + // happens because we load ClipboardJS asynchronously. + if (window.ClipboardJS === undefined) { + setTimeout(addCopyButtonToCodeCells, 250) + return + } + + // Add copybuttons to all of our code cells + const COPYBUTTON_SELECTOR = 'div.highlight pre'; + const codeCells = document.querySelectorAll(COPYBUTTON_SELECTOR) + codeCells.forEach((codeCell, index) => { + const id = codeCellId(index) + codeCell.setAttribute('id', id) + + const clipboardButton = id => + `` + codeCell.insertAdjacentHTML('afterend', clipboardButton(id)) + }) + +function escapeRegExp(string) { + return string.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); // $& means the whole matched string +} + +/** + * Removes excluded text from a Node. + * + * @param {Node} target Node to filter. + * @param {string} exclude CSS selector of nodes to exclude. + * @returns {DOMString} Text from `target` with text removed. + */ +function filterText(target, exclude) { + const clone = target.cloneNode(true); // clone as to not modify the live DOM + if (exclude) { + // remove excluded nodes + clone.querySelectorAll(exclude).forEach(node => node.remove()); + } + return clone.innerText; +} + +// Callback when a copy button is clicked. Will be passed the node that was clicked +// should then grab the text and replace pieces of text that shouldn't be used in output +function formatCopyText(textContent, copybuttonPromptText, isRegexp = false, onlyCopyPromptLines = true, removePrompts = true, copyEmptyLines = true, lineContinuationChar = "", hereDocDelim = "") { + var regexp; + var match; + + // Do we check for line continuation characters and "HERE-documents"? + var useLineCont = !!lineContinuationChar + var useHereDoc = !!hereDocDelim + + // create regexp to capture prompt and remaining line + if (isRegexp) { + regexp = new RegExp('^(' + copybuttonPromptText + ')(.*)') + } else { + regexp = new RegExp('^(' + escapeRegExp(copybuttonPromptText) + ')(.*)') + } + + const outputLines = []; + var promptFound = false; + var gotLineCont = false; + var gotHereDoc = false; + const lineGotPrompt = []; + for (const line of textContent.split('\n')) { + match = line.match(regexp) + if (match || gotLineCont || gotHereDoc) { + promptFound = regexp.test(line) + lineGotPrompt.push(promptFound) + if (removePrompts && promptFound) { + outputLines.push(match[2]) + } else { + outputLines.push(line) + } + gotLineCont = line.endsWith(lineContinuationChar) & useLineCont + if (line.includes(hereDocDelim) & useHereDoc) + gotHereDoc = !gotHereDoc + } else if (!onlyCopyPromptLines) { + outputLines.push(line) + } else if (copyEmptyLines && line.trim() === '') { + outputLines.push(line) + } + } + + // If no lines with the prompt were found then just use original lines + if (lineGotPrompt.some(v => v === true)) { + textContent = outputLines.join('\n'); + } + + // Remove a trailing newline to avoid auto-running when pasting + if (textContent.endsWith("\n")) { + textContent = textContent.slice(0, -1) + } + return textContent +} + + +var copyTargetText = (trigger) => { + var target = document.querySelector(trigger.attributes['data-clipboard-target'].value); + + // get filtered text + let exclude = '.linenos'; + + let text = filterText(target, exclude); + return formatCopyText(text, '', false, true, true, true, '', '') +} + + // Initialize with a callback so we can modify the text before copy + const clipboard = new ClipboardJS('.copybtn', {text: copyTargetText}) + + // Update UI with error/success messages + clipboard.on('success', event => { + clearSelection() + temporarilyChangeTooltip(event.trigger, messages[locale]['copy'], messages[locale]['copy_success']) + temporarilyChangeIcon(event.trigger) + }) + + clipboard.on('error', event => { + temporarilyChangeTooltip(event.trigger, messages[locale]['copy'], messages[locale]['copy_failure']) + }) +} + +runWhenDOMLoaded(addCopyButtonToCodeCells) \ No newline at end of file diff --git a/_static/copybutton_funcs.js b/_static/copybutton_funcs.js new file mode 100644 index 0000000..dbe1aaa --- /dev/null +++ b/_static/copybutton_funcs.js @@ -0,0 +1,73 @@ +function escapeRegExp(string) { + return string.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); // $& means the whole matched string +} + +/** + * Removes excluded text from a Node. + * + * @param {Node} target Node to filter. + * @param {string} exclude CSS selector of nodes to exclude. + * @returns {DOMString} Text from `target` with text removed. + */ +export function filterText(target, exclude) { + const clone = target.cloneNode(true); // clone as to not modify the live DOM + if (exclude) { + // remove excluded nodes + clone.querySelectorAll(exclude).forEach(node => node.remove()); + } + return clone.innerText; +} + +// Callback when a copy button is clicked. Will be passed the node that was clicked +// should then grab the text and replace pieces of text that shouldn't be used in output +export function formatCopyText(textContent, copybuttonPromptText, isRegexp = false, onlyCopyPromptLines = true, removePrompts = true, copyEmptyLines = true, lineContinuationChar = "", hereDocDelim = "") { + var regexp; + var match; + + // Do we check for line continuation characters and "HERE-documents"? + var useLineCont = !!lineContinuationChar + var useHereDoc = !!hereDocDelim + + // create regexp to capture prompt and remaining line + if (isRegexp) { + regexp = new RegExp('^(' + copybuttonPromptText + ')(.*)') + } else { + regexp = new RegExp('^(' + escapeRegExp(copybuttonPromptText) + ')(.*)') + } + + const outputLines = []; + var promptFound = false; + var gotLineCont = false; + var gotHereDoc = false; + const lineGotPrompt = []; + for (const line of textContent.split('\n')) { + match = line.match(regexp) + if (match || gotLineCont || gotHereDoc) { + promptFound = regexp.test(line) + lineGotPrompt.push(promptFound) + if (removePrompts && promptFound) { + outputLines.push(match[2]) + } else { + outputLines.push(line) + } + gotLineCont = line.endsWith(lineContinuationChar) & useLineCont + if (line.includes(hereDocDelim) & useHereDoc) + gotHereDoc = !gotHereDoc + } else if (!onlyCopyPromptLines) { + outputLines.push(line) + } else if (copyEmptyLines && line.trim() === '') { + outputLines.push(line) + } + } + + // If no lines with the prompt were found then just use original lines + if (lineGotPrompt.some(v => v === true)) { + textContent = outputLines.join('\n'); + } + + // Remove a trailing newline to avoid auto-running when pasting + if (textContent.endsWith("\n")) { + textContent = textContent.slice(0, -1) + } + return textContent +} diff --git a/_static/custom.css b/_static/custom.css new file mode 100644 index 0000000..1c9a321 --- /dev/null +++ b/_static/custom.css @@ -0,0 +1,12 @@ +div.main { + text-align: justify; +} + +div.tags { + text-align: left; +} + +.icon-container { + display: flex; + justify-content: space-between; +} \ No newline at end of file diff --git a/_static/debug.css b/_static/debug.css new file mode 100644 index 0000000..74d4aec --- /dev/null +++ b/_static/debug.css @@ -0,0 +1,69 @@ +/* + This CSS file should be overridden by the theme authors. It's + meant for debugging and developing the skeleton that this theme provides. +*/ +body { + font-family: -apple-system, "Segoe UI", Roboto, Helvetica, Arial, sans-serif, + "Apple Color Emoji", "Segoe UI Emoji"; + background: lavender; +} +.sb-announcement { + background: rgb(131, 131, 131); +} +.sb-announcement__inner { + background: black; + color: white; +} +.sb-header { + background: lightskyblue; +} +.sb-header__inner { + background: royalblue; + color: white; +} +.sb-header-secondary { + background: lightcyan; +} +.sb-header-secondary__inner { + background: cornflowerblue; + color: white; +} +.sb-sidebar-primary { + background: lightgreen; +} +.sb-main { + background: blanchedalmond; +} +.sb-main__inner { + background: antiquewhite; +} +.sb-header-article { + background: lightsteelblue; +} +.sb-article-container { + background: snow; +} +.sb-article-main { + background: white; +} +.sb-footer-article { + background: lightpink; +} +.sb-sidebar-secondary { + background: lightgoldenrodyellow; +} +.sb-footer-content { + background: plum; +} +.sb-footer-content__inner { + background: palevioletred; +} +.sb-footer { + background: pink; +} +.sb-footer__inner { + background: salmon; +} +.sb-article { + background: white; +} diff --git a/_static/design-tabs.js b/_static/design-tabs.js new file mode 100644 index 0000000..b25bd6a --- /dev/null +++ b/_static/design-tabs.js @@ -0,0 +1,101 @@ +// @ts-check + +// Extra JS capability for selected tabs to be synced +// The selection is stored in local storage so that it persists across page loads. + +/** + * @type {Record} + */ +let sd_id_to_elements = {}; +const storageKeyPrefix = "sphinx-design-tab-id-"; + +/** + * Create a key for a tab element. + * @param {HTMLElement} el - The tab element. + * @returns {[string, string, string] | null} - The key. + * + */ +function create_key(el) { + let syncId = el.getAttribute("data-sync-id"); + let syncGroup = el.getAttribute("data-sync-group"); + if (!syncId || !syncGroup) return null; + return [syncGroup, syncId, syncGroup + "--" + syncId]; +} + +/** + * Initialize the tab selection. + * + */ +function ready() { + // Find all tabs with sync data + + /** @type {string[]} */ + let groups = []; + + document.querySelectorAll(".sd-tab-label").forEach((label) => { + if (label instanceof HTMLElement) { + let data = create_key(label); + if (data) { + let [group, id, key] = data; + + // add click event listener + // @ts-ignore + label.onclick = onSDLabelClick; + + // store map of key to elements + if (!sd_id_to_elements[key]) { + sd_id_to_elements[key] = []; + } + sd_id_to_elements[key].push(label); + + if (groups.indexOf(group) === -1) { + groups.push(group); + // Check if a specific tab has been selected via URL parameter + const tabParam = new URLSearchParams(window.location.search).get( + group + ); + if (tabParam) { + console.log( + "sphinx-design: Selecting tab id for group '" + + group + + "' from URL parameter: " + + tabParam + ); + window.sessionStorage.setItem(storageKeyPrefix + group, tabParam); + } + } + + // Check is a specific tab has been selected previously + let previousId = window.sessionStorage.getItem( + storageKeyPrefix + group + ); + if (previousId === id) { + // console.log( + // "sphinx-design: Selecting tab from session storage: " + id + // ); + // @ts-ignore + label.previousElementSibling.checked = true; + } + } + } + }); +} + +/** + * Activate other tabs with the same sync id. + * + * @this {HTMLElement} - The element that was clicked. + */ +function onSDLabelClick() { + let data = create_key(this); + if (!data) return; + let [group, id, key] = data; + for (const label of sd_id_to_elements[key]) { + if (label === this) continue; + // @ts-ignore + label.previousElementSibling.checked = true; + } + window.sessionStorage.setItem(storageKeyPrefix + group, id); +} + +document.addEventListener("DOMContentLoaded", ready, false); diff --git a/_static/doctools.js b/_static/doctools.js new file mode 100644 index 0000000..4d67807 --- /dev/null +++ b/_static/doctools.js @@ -0,0 +1,156 @@ +/* + * doctools.js + * ~~~~~~~~~~~ + * + * Base JavaScript utilities for all Sphinx HTML documentation. + * + * :copyright: Copyright 2007-2024 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ +"use strict"; + +const BLACKLISTED_KEY_CONTROL_ELEMENTS = new Set([ + "TEXTAREA", + "INPUT", + "SELECT", + "BUTTON", +]); + +const _ready = (callback) => { + if (document.readyState !== "loading") { + callback(); + } else { + document.addEventListener("DOMContentLoaded", callback); + } +}; + +/** + * Small JavaScript module for the documentation. + */ +const Documentation = { + init: () => { + Documentation.initDomainIndexTable(); + Documentation.initOnKeyListeners(); + }, + + /** + * i18n support + */ + TRANSLATIONS: {}, + PLURAL_EXPR: (n) => (n === 1 ? 0 : 1), + LOCALE: "unknown", + + // gettext and ngettext don't access this so that the functions + // can safely bound to a different name (_ = Documentation.gettext) + gettext: (string) => { + const translated = Documentation.TRANSLATIONS[string]; + switch (typeof translated) { + case "undefined": + return string; // no translation + case "string": + return translated; // translation exists + default: + return translated[0]; // (singular, plural) translation tuple exists + } + }, + + ngettext: (singular, plural, n) => { + const translated = Documentation.TRANSLATIONS[singular]; + if (typeof translated !== "undefined") + return translated[Documentation.PLURAL_EXPR(n)]; + return n === 1 ? singular : plural; + }, + + addTranslations: (catalog) => { + Object.assign(Documentation.TRANSLATIONS, catalog.messages); + Documentation.PLURAL_EXPR = new Function( + "n", + `return (${catalog.plural_expr})` + ); + Documentation.LOCALE = catalog.locale; + }, + + /** + * helper function to focus on search bar + */ + focusSearchBar: () => { + document.querySelectorAll("input[name=q]")[0]?.focus(); + }, + + /** + * Initialise the domain index toggle buttons + */ + initDomainIndexTable: () => { + const toggler = (el) => { + const idNumber = el.id.substr(7); + const toggledRows = document.querySelectorAll(`tr.cg-${idNumber}`); + if (el.src.substr(-9) === "minus.png") { + el.src = `${el.src.substr(0, el.src.length - 9)}plus.png`; + toggledRows.forEach((el) => (el.style.display = "none")); + } else { + el.src = `${el.src.substr(0, el.src.length - 8)}minus.png`; + toggledRows.forEach((el) => (el.style.display = "")); + } + }; + + const togglerElements = document.querySelectorAll("img.toggler"); + togglerElements.forEach((el) => + el.addEventListener("click", (event) => toggler(event.currentTarget)) + ); + togglerElements.forEach((el) => (el.style.display = "")); + if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) togglerElements.forEach(toggler); + }, + + initOnKeyListeners: () => { + // only install a listener if it is really needed + if ( + !DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS && + !DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS + ) + return; + + document.addEventListener("keydown", (event) => { + // bail for input elements + if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return; + // bail with special keys + if (event.altKey || event.ctrlKey || event.metaKey) return; + + if (!event.shiftKey) { + switch (event.key) { + case "ArrowLeft": + if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break; + + const prevLink = document.querySelector('link[rel="prev"]'); + if (prevLink && prevLink.href) { + window.location.href = prevLink.href; + event.preventDefault(); + } + break; + case "ArrowRight": + if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break; + + const nextLink = document.querySelector('link[rel="next"]'); + if (nextLink && nextLink.href) { + window.location.href = nextLink.href; + event.preventDefault(); + } + break; + } + } + + // some keyboard layouts may need Shift to get / + switch (event.key) { + case "/": + if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) break; + Documentation.focusSearchBar(); + event.preventDefault(); + } + }); + }, +}; + +// quick alias for translations +const _ = Documentation.gettext; + +_ready(Documentation.init); diff --git a/_static/documentation_options.js b/_static/documentation_options.js new file mode 100644 index 0000000..7e4c114 --- /dev/null +++ b/_static/documentation_options.js @@ -0,0 +1,13 @@ +const DOCUMENTATION_OPTIONS = { + VERSION: '', + LANGUAGE: 'en', + COLLAPSE_INDEX: false, + BUILDER: 'html', + FILE_SUFFIX: '.html', + LINK_SUFFIX: '.html', + HAS_SOURCE: true, + SOURCELINK_SUFFIX: '.txt', + NAVIGATION_WITH_KEYS: false, + SHOW_SEARCH_SUMMARY: true, + ENABLE_SEARCH_SHORTCUTS: true, +}; \ No newline at end of file diff --git a/_static/file.png b/_static/file.png new file mode 100644 index 0000000..a858a41 Binary files /dev/null and b/_static/file.png differ diff --git a/_static/ibc3.png b/_static/ibc3.png new file mode 100644 index 0000000..8569813 Binary files /dev/null and b/_static/ibc3.png differ diff --git a/_static/language_data.js b/_static/language_data.js new file mode 100644 index 0000000..367b8ed --- /dev/null +++ b/_static/language_data.js @@ -0,0 +1,199 @@ +/* + * language_data.js + * ~~~~~~~~~~~~~~~~ + * + * This script contains the language-specific data used by searchtools.js, + * namely the list of stopwords, stemmer, scorer and splitter. + * + * :copyright: Copyright 2007-2024 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +var stopwords = ["a", "and", "are", "as", "at", "be", "but", "by", "for", "if", "in", "into", "is", "it", "near", "no", "not", "of", "on", "or", "such", "that", "the", "their", "then", "there", "these", "they", "this", "to", "was", "will", "with"]; + + +/* Non-minified version is copied as a separate JS file, if available */ + +/** + * Porter Stemmer + */ +var Stemmer = function() { + + var step2list = { + ational: 'ate', + tional: 'tion', + enci: 'ence', + anci: 'ance', + izer: 'ize', + bli: 'ble', + alli: 'al', + entli: 'ent', + eli: 'e', + ousli: 'ous', + ization: 'ize', + ation: 'ate', + ator: 'ate', + alism: 'al', + iveness: 'ive', + fulness: 'ful', + ousness: 'ous', + aliti: 'al', + iviti: 'ive', + biliti: 'ble', + logi: 'log' + }; + + var step3list = { + icate: 'ic', + ative: '', + alize: 'al', + iciti: 'ic', + ical: 'ic', + ful: '', + ness: '' + }; + + var c = "[^aeiou]"; // consonant + var v = "[aeiouy]"; // vowel + var C = c + "[^aeiouy]*"; // consonant sequence + var V = v + "[aeiou]*"; // vowel sequence + + var mgr0 = "^(" + C + ")?" + V + C; // [C]VC... is m>0 + var meq1 = "^(" + C + ")?" + V + C + "(" + V + ")?$"; // [C]VC[V] is m=1 + var mgr1 = "^(" + C + ")?" + V + C + V + C; // [C]VCVC... is m>1 + var s_v = "^(" + C + ")?" + v; // vowel in stem + + this.stemWord = function (w) { + var stem; + var suffix; + var firstch; + var origword = w; + + if (w.length < 3) + return w; + + var re; + var re2; + var re3; + var re4; + + firstch = w.substr(0,1); + if (firstch == "y") + w = firstch.toUpperCase() + w.substr(1); + + // Step 1a + re = /^(.+?)(ss|i)es$/; + re2 = /^(.+?)([^s])s$/; + + if (re.test(w)) + w = w.replace(re,"$1$2"); + else if (re2.test(w)) + w = w.replace(re2,"$1$2"); + + // Step 1b + re = /^(.+?)eed$/; + re2 = /^(.+?)(ed|ing)$/; + if (re.test(w)) { + var fp = re.exec(w); + re = new RegExp(mgr0); + if (re.test(fp[1])) { + re = /.$/; + w = w.replace(re,""); + } + } + else if (re2.test(w)) { + var fp = re2.exec(w); + stem = fp[1]; + re2 = new RegExp(s_v); + if (re2.test(stem)) { + w = stem; + re2 = /(at|bl|iz)$/; + re3 = new RegExp("([^aeiouylsz])\\1$"); + re4 = new RegExp("^" + C + v + "[^aeiouwxy]$"); + if (re2.test(w)) + w = w + "e"; + else if (re3.test(w)) { + re = /.$/; + w = w.replace(re,""); + } + else if (re4.test(w)) + w = w + "e"; + } + } + + // Step 1c + re = /^(.+?)y$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(s_v); + if (re.test(stem)) + w = stem + "i"; + } + + // Step 2 + re = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + suffix = fp[2]; + re = new RegExp(mgr0); + if (re.test(stem)) + w = stem + step2list[suffix]; + } + + // Step 3 + re = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + suffix = fp[2]; + re = new RegExp(mgr0); + if (re.test(stem)) + w = stem + step3list[suffix]; + } + + // Step 4 + re = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|iti|ous|ive|ize)$/; + re2 = /^(.+?)(s|t)(ion)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(mgr1); + if (re.test(stem)) + w = stem; + } + else if (re2.test(w)) { + var fp = re2.exec(w); + stem = fp[1] + fp[2]; + re2 = new RegExp(mgr1); + if (re2.test(stem)) + w = stem; + } + + // Step 5 + re = /^(.+?)e$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(mgr1); + re2 = new RegExp(meq1); + re3 = new RegExp("^" + C + v + "[^aeiouwxy]$"); + if (re.test(stem) || (re2.test(stem) && !(re3.test(stem)))) + w = stem; + } + re = /ll$/; + re2 = new RegExp(mgr1); + if (re.test(w) && re2.test(w)) { + re = /.$/; + w = w.replace(re,""); + } + + // and turn initial Y back to y + if (firstch == "y") + w = firstch.toLowerCase() + w.substr(1); + return w; + } +} + diff --git a/_static/mark-github.svg b/_static/mark-github.svg new file mode 100644 index 0000000..3c541b0 --- /dev/null +++ b/_static/mark-github.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/_static/minus.png b/_static/minus.png new file mode 100644 index 0000000..d96755f Binary files /dev/null and b/_static/minus.png differ diff --git a/_static/nbsphinx-broken-thumbnail.svg b/_static/nbsphinx-broken-thumbnail.svg new file mode 100644 index 0000000..4919ca8 --- /dev/null +++ b/_static/nbsphinx-broken-thumbnail.svg @@ -0,0 +1,9 @@ + + + + diff --git a/_static/nbsphinx-code-cells.css b/_static/nbsphinx-code-cells.css new file mode 100644 index 0000000..a3fb27c --- /dev/null +++ b/_static/nbsphinx-code-cells.css @@ -0,0 +1,259 @@ +/* remove conflicting styling from Sphinx themes */ +div.nbinput.container div.prompt *, +div.nboutput.container div.prompt *, +div.nbinput.container div.input_area pre, +div.nboutput.container div.output_area pre, +div.nbinput.container div.input_area .highlight, +div.nboutput.container div.output_area .highlight { + border: none; + padding: 0; + margin: 0; + box-shadow: none; +} + +div.nbinput.container > div[class*=highlight], +div.nboutput.container > div[class*=highlight] { + margin: 0; +} + +div.nbinput.container div.prompt *, +div.nboutput.container div.prompt * { + background: none; +} + +div.nboutput.container div.output_area .highlight, +div.nboutput.container div.output_area pre { + background: unset; +} + +div.nboutput.container div.output_area div.highlight { + color: unset; /* override Pygments text color */ +} + +/* avoid gaps between output lines */ +div.nboutput.container div[class*=highlight] pre { + line-height: normal; +} + +/* input/output containers */ +div.nbinput.container, +div.nboutput.container { + display: -webkit-flex; + display: flex; + align-items: flex-start; + margin: 0; + width: 100%; +} +@media (max-width: 540px) { + div.nbinput.container, + div.nboutput.container { + flex-direction: column; + } +} + +/* input container */ +div.nbinput.container { + padding-top: 5px; +} + +/* last container */ +div.nblast.container { + padding-bottom: 5px; +} + +/* input prompt */ +div.nbinput.container div.prompt pre, +/* for sphinx_immaterial theme: */ +div.nbinput.container div.prompt pre > code { + color: #307FC1; +} + +/* output prompt */ +div.nboutput.container div.prompt pre, +/* for sphinx_immaterial theme: */ +div.nboutput.container div.prompt pre > code { + color: #BF5B3D; +} + +/* all prompts */ +div.nbinput.container div.prompt, +div.nboutput.container div.prompt { + width: 4.5ex; + padding-top: 5px; + position: relative; + user-select: none; +} + +div.nbinput.container div.prompt > div, +div.nboutput.container div.prompt > div { + position: absolute; + right: 0; + margin-right: 0.3ex; +} + +@media (max-width: 540px) { + div.nbinput.container div.prompt, + div.nboutput.container div.prompt { + width: unset; + text-align: left; + padding: 0.4em; + } + div.nboutput.container div.prompt.empty { + padding: 0; + } + + div.nbinput.container div.prompt > div, + div.nboutput.container div.prompt > div { + position: unset; + } +} + +/* disable scrollbars and line breaks on prompts */ +div.nbinput.container div.prompt pre, +div.nboutput.container div.prompt pre { + overflow: hidden; + white-space: pre; +} + +/* input/output area */ +div.nbinput.container div.input_area, +div.nboutput.container div.output_area { + -webkit-flex: 1; + flex: 1; + overflow: auto; +} +@media (max-width: 540px) { + div.nbinput.container div.input_area, + div.nboutput.container div.output_area { + width: 100%; + } +} + +/* input area */ +div.nbinput.container div.input_area { + border: 1px solid #e0e0e0; + border-radius: 2px; + /*background: #f5f5f5;*/ +} + +/* override MathJax center alignment in output cells */ +div.nboutput.container div[class*=MathJax] { + text-align: left !important; +} + +/* override sphinx.ext.imgmath center alignment in output cells */ +div.nboutput.container div.math p { + text-align: left; +} + +/* standard error */ +div.nboutput.container div.output_area.stderr { + background: #fdd; +} + +/* ANSI colors */ +.ansi-black-fg { color: #3E424D; } +.ansi-black-bg { background-color: #3E424D; } +.ansi-black-intense-fg { color: #282C36; } +.ansi-black-intense-bg { background-color: #282C36; } +.ansi-red-fg { color: #E75C58; } +.ansi-red-bg { background-color: #E75C58; } +.ansi-red-intense-fg { color: #B22B31; } +.ansi-red-intense-bg { background-color: #B22B31; } +.ansi-green-fg { color: #00A250; } +.ansi-green-bg { background-color: #00A250; } +.ansi-green-intense-fg { color: #007427; } +.ansi-green-intense-bg { background-color: #007427; } +.ansi-yellow-fg { color: #DDB62B; } +.ansi-yellow-bg { background-color: #DDB62B; } +.ansi-yellow-intense-fg { color: #B27D12; } +.ansi-yellow-intense-bg { background-color: #B27D12; } +.ansi-blue-fg { color: #208FFB; } +.ansi-blue-bg { background-color: #208FFB; } +.ansi-blue-intense-fg { color: #0065CA; } +.ansi-blue-intense-bg { background-color: #0065CA; } +.ansi-magenta-fg { color: #D160C4; } +.ansi-magenta-bg { background-color: #D160C4; } +.ansi-magenta-intense-fg { color: #A03196; } +.ansi-magenta-intense-bg { background-color: #A03196; } +.ansi-cyan-fg { color: #60C6C8; } +.ansi-cyan-bg { background-color: #60C6C8; } +.ansi-cyan-intense-fg { color: #258F8F; } +.ansi-cyan-intense-bg { background-color: #258F8F; } +.ansi-white-fg { color: #C5C1B4; } +.ansi-white-bg { background-color: #C5C1B4; } +.ansi-white-intense-fg { color: #A1A6B2; } +.ansi-white-intense-bg { background-color: #A1A6B2; } + +.ansi-default-inverse-fg { color: #FFFFFF; } +.ansi-default-inverse-bg { background-color: #000000; } + +.ansi-bold { font-weight: bold; } +.ansi-underline { text-decoration: underline; } + + +div.nbinput.container div.input_area div[class*=highlight] > pre, +div.nboutput.container div.output_area div[class*=highlight] > pre, +div.nboutput.container div.output_area div[class*=highlight].math, +div.nboutput.container div.output_area.rendered_html, +div.nboutput.container div.output_area > div.output_javascript, +div.nboutput.container div.output_area:not(.rendered_html) > img{ + padding: 5px; + margin: 0; +} + +/* fix copybtn overflow problem in chromium (needed for 'sphinx_copybutton') */ +div.nbinput.container div.input_area > div[class^='highlight'], +div.nboutput.container div.output_area > div[class^='highlight']{ + overflow-y: hidden; +} + +/* hide copy button on prompts for 'sphinx_copybutton' extension ... */ +.prompt .copybtn, +/* ... and 'sphinx_immaterial' theme */ +.prompt .md-clipboard.md-icon { + display: none; +} + +/* Some additional styling taken form the Jupyter notebook CSS */ +.jp-RenderedHTMLCommon table, +div.rendered_html table { + border: none; + border-collapse: collapse; + border-spacing: 0; + color: black; + font-size: 12px; + table-layout: fixed; +} +.jp-RenderedHTMLCommon thead, +div.rendered_html thead { + border-bottom: 1px solid black; + vertical-align: bottom; +} +.jp-RenderedHTMLCommon tr, +.jp-RenderedHTMLCommon th, +.jp-RenderedHTMLCommon td, +div.rendered_html tr, +div.rendered_html th, +div.rendered_html td { + text-align: right; + vertical-align: middle; + padding: 0.5em 0.5em; + line-height: normal; + white-space: normal; + max-width: none; + border: none; +} +.jp-RenderedHTMLCommon th, +div.rendered_html th { + font-weight: bold; +} +.jp-RenderedHTMLCommon tbody tr:nth-child(odd), +div.rendered_html tbody tr:nth-child(odd) { + background: #f5f5f5; +} +.jp-RenderedHTMLCommon tbody tr:hover, +div.rendered_html tbody tr:hover { + background: rgba(66, 165, 245, 0.2); +} + diff --git a/_static/nbsphinx-gallery.css b/_static/nbsphinx-gallery.css new file mode 100644 index 0000000..365c27a --- /dev/null +++ b/_static/nbsphinx-gallery.css @@ -0,0 +1,31 @@ +.nbsphinx-gallery { + display: grid; + grid-template-columns: repeat(auto-fill, minmax(160px, 1fr)); + gap: 5px; + margin-top: 1em; + margin-bottom: 1em; +} + +.nbsphinx-gallery > a { + padding: 5px; + border: 1px dotted currentColor; + border-radius: 2px; + text-align: center; +} + +.nbsphinx-gallery > a:hover { + border-style: solid; +} + +.nbsphinx-gallery img { + max-width: 100%; + max-height: 100%; +} + +.nbsphinx-gallery > a > div:first-child { + display: flex; + align-items: start; + justify-content: center; + height: 120px; + margin-bottom: 5px; +} diff --git a/_static/nbsphinx-no-thumbnail.svg b/_static/nbsphinx-no-thumbnail.svg new file mode 100644 index 0000000..9dca758 --- /dev/null +++ b/_static/nbsphinx-no-thumbnail.svg @@ -0,0 +1,9 @@ + + + + diff --git a/_static/plus.png b/_static/plus.png new file mode 100644 index 0000000..7107cec Binary files /dev/null and b/_static/plus.png differ diff --git a/_static/pygments.css b/_static/pygments.css new file mode 100644 index 0000000..02b4b12 --- /dev/null +++ b/_static/pygments.css @@ -0,0 +1,258 @@ +.highlight pre { line-height: 125%; } +.highlight td.linenos .normal { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } +.highlight span.linenos { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } +.highlight td.linenos .special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } +.highlight span.linenos.special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } +.highlight .hll { background-color: #ffffcc } +.highlight { background: #f8f8f8; } +.highlight .c { color: #8f5902; font-style: italic } /* Comment */ +.highlight .err { color: #a40000; border: 1px solid #ef2929 } /* Error */ +.highlight .g { color: #000000 } /* Generic */ +.highlight .k { color: #204a87; font-weight: bold } /* Keyword */ +.highlight .l { color: #000000 } /* Literal */ +.highlight .n { color: #000000 } /* Name */ +.highlight .o { color: #ce5c00; font-weight: bold } /* Operator */ +.highlight .x { color: #000000 } /* Other */ +.highlight .p { color: #000000; font-weight: bold } /* Punctuation */ +.highlight .ch { color: #8f5902; font-style: italic } /* Comment.Hashbang */ +.highlight .cm { color: #8f5902; font-style: italic } /* Comment.Multiline */ +.highlight .cp { color: #8f5902; font-style: italic } /* Comment.Preproc */ +.highlight .cpf { color: #8f5902; font-style: italic } /* Comment.PreprocFile */ +.highlight .c1 { color: #8f5902; font-style: italic } /* Comment.Single */ +.highlight .cs { color: #8f5902; font-style: italic } /* Comment.Special */ +.highlight .gd { color: #a40000 } /* Generic.Deleted */ +.highlight .ge { color: #000000; font-style: italic } /* Generic.Emph */ +.highlight .ges { color: #000000; font-weight: bold; font-style: italic } /* Generic.EmphStrong */ +.highlight .gr { color: #ef2929 } /* Generic.Error */ +.highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */ +.highlight .gi { color: #00A000 } /* Generic.Inserted */ +.highlight .go { color: #000000; font-style: italic } /* Generic.Output */ +.highlight .gp { color: #8f5902 } /* Generic.Prompt */ +.highlight .gs { color: #000000; font-weight: bold } /* Generic.Strong */ +.highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */ +.highlight .gt { color: #a40000; font-weight: bold } /* Generic.Traceback */ +.highlight .kc { color: #204a87; font-weight: bold } /* Keyword.Constant */ +.highlight .kd { color: #204a87; font-weight: bold } /* Keyword.Declaration */ +.highlight .kn { color: #204a87; font-weight: bold } /* Keyword.Namespace */ +.highlight .kp { color: #204a87; font-weight: bold } /* Keyword.Pseudo */ +.highlight .kr { color: #204a87; font-weight: bold } /* Keyword.Reserved */ +.highlight .kt { color: #204a87; font-weight: bold } /* Keyword.Type */ +.highlight .ld { color: #000000 } /* Literal.Date */ +.highlight .m { color: #0000cf; font-weight: bold } /* Literal.Number */ +.highlight .s { color: #4e9a06 } /* Literal.String */ +.highlight .na { color: #c4a000 } /* Name.Attribute */ +.highlight .nb { color: #204a87 } /* Name.Builtin */ +.highlight .nc { color: #000000 } /* Name.Class */ +.highlight .no { color: #000000 } /* Name.Constant */ +.highlight .nd { color: #5c35cc; font-weight: bold } /* Name.Decorator */ +.highlight .ni { color: #ce5c00 } /* Name.Entity */ +.highlight .ne { color: #cc0000; font-weight: bold } /* Name.Exception */ +.highlight .nf { color: #000000 } /* Name.Function */ +.highlight .nl { color: #f57900 } /* Name.Label */ +.highlight .nn { color: #000000 } /* Name.Namespace */ +.highlight .nx { color: #000000 } /* Name.Other */ +.highlight .py { color: #000000 } /* Name.Property */ +.highlight .nt { color: #204a87; font-weight: bold } /* Name.Tag */ +.highlight .nv { color: #000000 } /* Name.Variable */ +.highlight .ow { color: #204a87; font-weight: bold } /* Operator.Word */ +.highlight .pm { color: #000000; font-weight: bold } /* Punctuation.Marker */ +.highlight .w { color: #f8f8f8 } /* Text.Whitespace */ +.highlight .mb { color: #0000cf; font-weight: bold } /* Literal.Number.Bin */ +.highlight .mf { color: #0000cf; font-weight: bold } /* Literal.Number.Float */ +.highlight .mh { color: #0000cf; font-weight: bold } /* Literal.Number.Hex */ +.highlight .mi { color: #0000cf; font-weight: bold } /* Literal.Number.Integer */ +.highlight .mo { color: #0000cf; font-weight: bold } /* Literal.Number.Oct */ +.highlight .sa { color: #4e9a06 } /* Literal.String.Affix */ +.highlight .sb { color: #4e9a06 } /* Literal.String.Backtick */ +.highlight .sc { color: #4e9a06 } /* Literal.String.Char */ +.highlight .dl { color: #4e9a06 } /* Literal.String.Delimiter */ +.highlight .sd { color: #8f5902; font-style: italic } /* Literal.String.Doc */ +.highlight .s2 { color: #4e9a06 } /* Literal.String.Double */ +.highlight .se { color: #4e9a06 } /* Literal.String.Escape */ +.highlight .sh { color: #4e9a06 } /* Literal.String.Heredoc */ +.highlight .si { color: #4e9a06 } /* Literal.String.Interpol */ +.highlight .sx { color: #4e9a06 } /* Literal.String.Other */ +.highlight .sr { color: #4e9a06 } /* Literal.String.Regex */ +.highlight .s1 { color: #4e9a06 } /* Literal.String.Single */ +.highlight .ss { color: #4e9a06 } /* Literal.String.Symbol */ +.highlight .bp { color: #3465a4 } /* Name.Builtin.Pseudo */ +.highlight .fm { color: #000000 } /* Name.Function.Magic */ +.highlight .vc { color: #000000 } /* Name.Variable.Class */ +.highlight .vg { color: #000000 } /* Name.Variable.Global */ +.highlight .vi { color: #000000 } /* Name.Variable.Instance */ +.highlight .vm { color: #000000 } /* Name.Variable.Magic */ +.highlight .il { color: #0000cf; font-weight: bold } /* Literal.Number.Integer.Long */ +@media not print { +body[data-theme="dark"] .highlight pre { line-height: 125%; } +body[data-theme="dark"] .highlight td.linenos .normal { color: #aaaaaa; background-color: transparent; padding-left: 5px; padding-right: 5px; } +body[data-theme="dark"] .highlight span.linenos { color: #aaaaaa; background-color: transparent; padding-left: 5px; padding-right: 5px; } +body[data-theme="dark"] .highlight td.linenos .special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } +body[data-theme="dark"] .highlight span.linenos.special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } +body[data-theme="dark"] .highlight .hll { background-color: #404040 } +body[data-theme="dark"] .highlight { background: #202020; color: #d0d0d0 } +body[data-theme="dark"] .highlight .c { color: #ababab; font-style: italic } /* Comment */ +body[data-theme="dark"] .highlight .err { color: #a61717; background-color: #e3d2d2 } /* Error */ +body[data-theme="dark"] .highlight .esc { color: #d0d0d0 } /* Escape */ +body[data-theme="dark"] .highlight .g { color: #d0d0d0 } /* Generic */ +body[data-theme="dark"] .highlight .k { color: #6ebf26; font-weight: bold } /* Keyword */ +body[data-theme="dark"] .highlight .l { color: #d0d0d0 } /* Literal */ +body[data-theme="dark"] .highlight .n { color: #d0d0d0 } /* Name */ +body[data-theme="dark"] .highlight .o { color: #d0d0d0 } /* Operator */ +body[data-theme="dark"] .highlight .x { color: #d0d0d0 } /* Other */ +body[data-theme="dark"] .highlight .p { color: #d0d0d0 } /* Punctuation */ +body[data-theme="dark"] .highlight .ch { color: #ababab; font-style: italic } /* Comment.Hashbang */ +body[data-theme="dark"] .highlight .cm { color: #ababab; font-style: italic } /* Comment.Multiline */ +body[data-theme="dark"] .highlight .cp { color: #ff3a3a; font-weight: bold } /* Comment.Preproc */ +body[data-theme="dark"] .highlight .cpf { color: #ababab; font-style: italic } /* Comment.PreprocFile */ +body[data-theme="dark"] .highlight .c1 { color: #ababab; font-style: italic } /* Comment.Single */ +body[data-theme="dark"] .highlight .cs { color: #e50808; font-weight: bold; background-color: #520000 } /* Comment.Special */ +body[data-theme="dark"] .highlight .gd { color: #ff3a3a } /* Generic.Deleted */ +body[data-theme="dark"] .highlight .ge { color: #d0d0d0; font-style: italic } /* Generic.Emph */ +body[data-theme="dark"] .highlight .ges { color: #d0d0d0; font-weight: bold; font-style: italic } /* Generic.EmphStrong */ +body[data-theme="dark"] .highlight .gr { color: #ff3a3a } /* Generic.Error */ +body[data-theme="dark"] .highlight .gh { color: #ffffff; font-weight: bold } /* Generic.Heading */ +body[data-theme="dark"] .highlight .gi { color: #589819 } /* Generic.Inserted */ +body[data-theme="dark"] .highlight .go { color: #cccccc } /* Generic.Output */ +body[data-theme="dark"] .highlight .gp { color: #aaaaaa } /* Generic.Prompt */ +body[data-theme="dark"] .highlight .gs { color: #d0d0d0; font-weight: bold } /* Generic.Strong */ +body[data-theme="dark"] .highlight .gu { color: #ffffff; text-decoration: underline } /* Generic.Subheading */ +body[data-theme="dark"] .highlight .gt { color: #ff3a3a } /* Generic.Traceback */ +body[data-theme="dark"] .highlight .kc { color: #6ebf26; font-weight: bold } /* Keyword.Constant */ +body[data-theme="dark"] .highlight .kd { color: #6ebf26; font-weight: bold } /* Keyword.Declaration */ +body[data-theme="dark"] .highlight .kn { color: #6ebf26; font-weight: bold } /* Keyword.Namespace */ +body[data-theme="dark"] .highlight .kp { color: #6ebf26 } /* Keyword.Pseudo */ +body[data-theme="dark"] .highlight .kr { color: #6ebf26; font-weight: bold } /* Keyword.Reserved */ +body[data-theme="dark"] .highlight .kt { color: #6ebf26; font-weight: bold } /* Keyword.Type */ +body[data-theme="dark"] .highlight .ld { color: #d0d0d0 } /* Literal.Date */ +body[data-theme="dark"] .highlight .m { color: #51b2fd } /* Literal.Number */ +body[data-theme="dark"] .highlight .s { color: #ed9d13 } /* Literal.String */ +body[data-theme="dark"] .highlight .na { color: #bbbbbb } /* Name.Attribute */ +body[data-theme="dark"] .highlight .nb { color: #2fbccd } /* Name.Builtin */ +body[data-theme="dark"] .highlight .nc { color: #71adff; text-decoration: underline } /* Name.Class */ +body[data-theme="dark"] .highlight .no { color: #40ffff } /* Name.Constant */ +body[data-theme="dark"] .highlight .nd { color: #ffa500 } /* Name.Decorator */ +body[data-theme="dark"] .highlight .ni { color: #d0d0d0 } /* Name.Entity */ +body[data-theme="dark"] .highlight .ne { color: #bbbbbb } /* Name.Exception */ +body[data-theme="dark"] .highlight .nf { color: #71adff } /* Name.Function */ +body[data-theme="dark"] .highlight .nl { color: #d0d0d0 } /* Name.Label */ +body[data-theme="dark"] .highlight .nn { color: #71adff; text-decoration: underline } /* Name.Namespace */ +body[data-theme="dark"] .highlight .nx { color: #d0d0d0 } /* Name.Other */ +body[data-theme="dark"] .highlight .py { color: #d0d0d0 } /* Name.Property */ +body[data-theme="dark"] .highlight .nt { color: #6ebf26; font-weight: bold } /* Name.Tag */ +body[data-theme="dark"] .highlight .nv { color: #40ffff } /* Name.Variable */ +body[data-theme="dark"] .highlight .ow { color: #6ebf26; font-weight: bold } /* Operator.Word */ +body[data-theme="dark"] .highlight .pm { color: #d0d0d0 } /* Punctuation.Marker */ +body[data-theme="dark"] .highlight .w { color: #666666 } /* Text.Whitespace */ +body[data-theme="dark"] .highlight .mb { color: #51b2fd } /* Literal.Number.Bin */ +body[data-theme="dark"] .highlight .mf { color: #51b2fd } /* Literal.Number.Float */ +body[data-theme="dark"] .highlight .mh { color: #51b2fd } /* Literal.Number.Hex */ +body[data-theme="dark"] .highlight .mi { color: #51b2fd } /* Literal.Number.Integer */ +body[data-theme="dark"] .highlight .mo { color: #51b2fd } /* Literal.Number.Oct */ +body[data-theme="dark"] .highlight .sa { color: #ed9d13 } /* Literal.String.Affix */ +body[data-theme="dark"] .highlight .sb { color: #ed9d13 } /* Literal.String.Backtick */ +body[data-theme="dark"] .highlight .sc { color: #ed9d13 } /* Literal.String.Char */ +body[data-theme="dark"] .highlight .dl { color: #ed9d13 } /* Literal.String.Delimiter */ +body[data-theme="dark"] .highlight .sd { color: #ed9d13 } /* Literal.String.Doc */ +body[data-theme="dark"] .highlight .s2 { color: #ed9d13 } /* Literal.String.Double */ +body[data-theme="dark"] .highlight .se { color: #ed9d13 } /* Literal.String.Escape */ +body[data-theme="dark"] .highlight .sh { color: #ed9d13 } /* Literal.String.Heredoc */ +body[data-theme="dark"] .highlight .si { color: #ed9d13 } /* Literal.String.Interpol */ +body[data-theme="dark"] .highlight .sx { color: #ffa500 } /* Literal.String.Other */ +body[data-theme="dark"] .highlight .sr { color: #ed9d13 } /* Literal.String.Regex */ +body[data-theme="dark"] .highlight .s1 { color: #ed9d13 } /* Literal.String.Single */ +body[data-theme="dark"] .highlight .ss { color: #ed9d13 } /* Literal.String.Symbol */ +body[data-theme="dark"] .highlight .bp { color: #2fbccd } /* Name.Builtin.Pseudo */ +body[data-theme="dark"] .highlight .fm { color: #71adff } /* Name.Function.Magic */ +body[data-theme="dark"] .highlight .vc { color: #40ffff } /* Name.Variable.Class */ +body[data-theme="dark"] .highlight .vg { color: #40ffff } /* Name.Variable.Global */ +body[data-theme="dark"] .highlight .vi { color: #40ffff } /* Name.Variable.Instance */ +body[data-theme="dark"] .highlight .vm { color: #40ffff } /* Name.Variable.Magic */ +body[data-theme="dark"] .highlight .il { color: #51b2fd } /* Literal.Number.Integer.Long */ +@media (prefers-color-scheme: dark) { +body:not([data-theme="light"]) .highlight pre { line-height: 125%; } +body:not([data-theme="light"]) .highlight td.linenos .normal { color: #aaaaaa; background-color: transparent; padding-left: 5px; padding-right: 5px; } +body:not([data-theme="light"]) .highlight span.linenos { color: #aaaaaa; background-color: transparent; padding-left: 5px; padding-right: 5px; } +body:not([data-theme="light"]) .highlight td.linenos .special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } +body:not([data-theme="light"]) .highlight span.linenos.special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } +body:not([data-theme="light"]) .highlight .hll { background-color: #404040 } +body:not([data-theme="light"]) .highlight { background: #202020; color: #d0d0d0 } +body:not([data-theme="light"]) .highlight .c { color: #ababab; font-style: italic } /* Comment */ +body:not([data-theme="light"]) .highlight .err { color: #a61717; background-color: #e3d2d2 } /* Error */ +body:not([data-theme="light"]) .highlight .esc { color: #d0d0d0 } /* Escape */ +body:not([data-theme="light"]) .highlight .g { color: #d0d0d0 } /* Generic */ +body:not([data-theme="light"]) .highlight .k { color: #6ebf26; font-weight: bold } /* Keyword */ +body:not([data-theme="light"]) .highlight .l { color: #d0d0d0 } /* Literal */ +body:not([data-theme="light"]) .highlight .n { color: #d0d0d0 } /* Name */ +body:not([data-theme="light"]) .highlight .o { color: #d0d0d0 } /* Operator */ +body:not([data-theme="light"]) .highlight .x { color: #d0d0d0 } /* Other */ +body:not([data-theme="light"]) .highlight .p { color: #d0d0d0 } /* Punctuation */ +body:not([data-theme="light"]) .highlight .ch { color: #ababab; font-style: italic } /* Comment.Hashbang */ +body:not([data-theme="light"]) .highlight .cm { color: #ababab; font-style: italic } /* Comment.Multiline */ +body:not([data-theme="light"]) .highlight .cp { color: #ff3a3a; font-weight: bold } /* Comment.Preproc */ +body:not([data-theme="light"]) .highlight .cpf { color: #ababab; font-style: italic } /* Comment.PreprocFile */ +body:not([data-theme="light"]) .highlight .c1 { color: #ababab; font-style: italic } /* Comment.Single */ +body:not([data-theme="light"]) .highlight .cs { color: #e50808; font-weight: bold; background-color: #520000 } /* Comment.Special */ +body:not([data-theme="light"]) .highlight .gd { color: #ff3a3a } /* Generic.Deleted */ +body:not([data-theme="light"]) .highlight .ge { color: #d0d0d0; font-style: italic } /* Generic.Emph */ +body:not([data-theme="light"]) .highlight .ges { color: #d0d0d0; font-weight: bold; font-style: italic } /* Generic.EmphStrong */ +body:not([data-theme="light"]) .highlight .gr { color: #ff3a3a } /* Generic.Error */ +body:not([data-theme="light"]) .highlight .gh { color: #ffffff; font-weight: bold } /* Generic.Heading */ +body:not([data-theme="light"]) .highlight .gi { color: #589819 } /* Generic.Inserted */ +body:not([data-theme="light"]) .highlight .go { color: #cccccc } /* Generic.Output */ +body:not([data-theme="light"]) .highlight .gp { color: #aaaaaa } /* Generic.Prompt */ +body:not([data-theme="light"]) .highlight .gs { color: #d0d0d0; font-weight: bold } /* Generic.Strong */ +body:not([data-theme="light"]) .highlight .gu { color: #ffffff; text-decoration: underline } /* Generic.Subheading */ +body:not([data-theme="light"]) .highlight .gt { color: #ff3a3a } /* Generic.Traceback */ +body:not([data-theme="light"]) .highlight .kc { color: #6ebf26; font-weight: bold } /* Keyword.Constant */ +body:not([data-theme="light"]) .highlight .kd { color: #6ebf26; font-weight: bold } /* Keyword.Declaration */ +body:not([data-theme="light"]) .highlight .kn { color: #6ebf26; font-weight: bold } /* Keyword.Namespace */ +body:not([data-theme="light"]) .highlight .kp { color: #6ebf26 } /* Keyword.Pseudo */ +body:not([data-theme="light"]) .highlight .kr { color: #6ebf26; font-weight: bold } /* Keyword.Reserved */ +body:not([data-theme="light"]) .highlight .kt { color: #6ebf26; font-weight: bold } /* Keyword.Type */ +body:not([data-theme="light"]) .highlight .ld { color: #d0d0d0 } /* Literal.Date */ +body:not([data-theme="light"]) .highlight .m { color: #51b2fd } /* Literal.Number */ +body:not([data-theme="light"]) .highlight .s { color: #ed9d13 } /* Literal.String */ +body:not([data-theme="light"]) .highlight .na { color: #bbbbbb } /* Name.Attribute */ +body:not([data-theme="light"]) .highlight .nb { color: #2fbccd } /* Name.Builtin */ +body:not([data-theme="light"]) .highlight .nc { color: #71adff; text-decoration: underline } /* Name.Class */ +body:not([data-theme="light"]) .highlight .no { color: #40ffff } /* Name.Constant */ +body:not([data-theme="light"]) .highlight .nd { color: #ffa500 } /* Name.Decorator */ +body:not([data-theme="light"]) .highlight .ni { color: #d0d0d0 } /* Name.Entity */ +body:not([data-theme="light"]) .highlight .ne { color: #bbbbbb } /* Name.Exception */ +body:not([data-theme="light"]) .highlight .nf { color: #71adff } /* Name.Function */ +body:not([data-theme="light"]) .highlight .nl { color: #d0d0d0 } /* Name.Label */ +body:not([data-theme="light"]) .highlight .nn { color: #71adff; text-decoration: underline } /* Name.Namespace */ +body:not([data-theme="light"]) .highlight .nx { color: #d0d0d0 } /* Name.Other */ +body:not([data-theme="light"]) .highlight .py { color: #d0d0d0 } /* Name.Property */ +body:not([data-theme="light"]) .highlight .nt { color: #6ebf26; font-weight: bold } /* Name.Tag */ +body:not([data-theme="light"]) .highlight .nv { color: #40ffff } /* Name.Variable */ +body:not([data-theme="light"]) .highlight .ow { color: #6ebf26; font-weight: bold } /* Operator.Word */ +body:not([data-theme="light"]) .highlight .pm { color: #d0d0d0 } /* Punctuation.Marker */ +body:not([data-theme="light"]) .highlight .w { color: #666666 } /* Text.Whitespace */ +body:not([data-theme="light"]) .highlight .mb { color: #51b2fd } /* Literal.Number.Bin */ +body:not([data-theme="light"]) .highlight .mf { color: #51b2fd } /* Literal.Number.Float */ +body:not([data-theme="light"]) .highlight .mh { color: #51b2fd } /* Literal.Number.Hex */ +body:not([data-theme="light"]) .highlight .mi { color: #51b2fd } /* Literal.Number.Integer */ +body:not([data-theme="light"]) .highlight .mo { color: #51b2fd } /* Literal.Number.Oct */ +body:not([data-theme="light"]) .highlight .sa { color: #ed9d13 } /* Literal.String.Affix */ +body:not([data-theme="light"]) .highlight .sb { color: #ed9d13 } /* Literal.String.Backtick */ +body:not([data-theme="light"]) .highlight .sc { color: #ed9d13 } /* Literal.String.Char */ +body:not([data-theme="light"]) .highlight .dl { color: #ed9d13 } /* Literal.String.Delimiter */ +body:not([data-theme="light"]) .highlight .sd { color: #ed9d13 } /* Literal.String.Doc */ +body:not([data-theme="light"]) .highlight .s2 { color: #ed9d13 } /* Literal.String.Double */ +body:not([data-theme="light"]) .highlight .se { color: #ed9d13 } /* Literal.String.Escape */ +body:not([data-theme="light"]) .highlight .sh { color: #ed9d13 } /* Literal.String.Heredoc */ +body:not([data-theme="light"]) .highlight .si { color: #ed9d13 } /* Literal.String.Interpol */ +body:not([data-theme="light"]) .highlight .sx { color: #ffa500 } /* Literal.String.Other */ +body:not([data-theme="light"]) .highlight .sr { color: #ed9d13 } /* Literal.String.Regex */ +body:not([data-theme="light"]) .highlight .s1 { color: #ed9d13 } /* Literal.String.Single */ +body:not([data-theme="light"]) .highlight .ss { color: #ed9d13 } /* Literal.String.Symbol */ +body:not([data-theme="light"]) .highlight .bp { color: #2fbccd } /* Name.Builtin.Pseudo */ +body:not([data-theme="light"]) .highlight .fm { color: #71adff } /* Name.Function.Magic */ +body:not([data-theme="light"]) .highlight .vc { color: #40ffff } /* Name.Variable.Class */ +body:not([data-theme="light"]) .highlight .vg { color: #40ffff } /* Name.Variable.Global */ +body:not([data-theme="light"]) .highlight .vi { color: #40ffff } /* Name.Variable.Instance */ +body:not([data-theme="light"]) .highlight .vm { color: #40ffff } /* Name.Variable.Magic */ +body:not([data-theme="light"]) .highlight .il { color: #51b2fd } /* Literal.Number.Integer.Long */ +} +} \ No newline at end of file diff --git a/_static/scripts/furo-extensions.js b/_static/scripts/furo-extensions.js new file mode 100644 index 0000000..e69de29 diff --git a/_static/scripts/furo.js b/_static/scripts/furo.js new file mode 100644 index 0000000..0abb2af --- /dev/null +++ b/_static/scripts/furo.js @@ -0,0 +1,3 @@ +/*! For license information please see furo.js.LICENSE.txt */ +(()=>{var t={856:function(t,e,n){var o,r;r=void 0!==n.g?n.g:"undefined"!=typeof window?window:this,o=function(){return function(t){"use strict";var e={navClass:"active",contentClass:"active",nested:!1,nestedClass:"active",offset:0,reflow:!1,events:!0},n=function(t,e,n){if(n.settings.events){var o=new CustomEvent(t,{bubbles:!0,cancelable:!0,detail:n});e.dispatchEvent(o)}},o=function(t){var e=0;if(t.offsetParent)for(;t;)e+=t.offsetTop,t=t.offsetParent;return e>=0?e:0},r=function(t){t&&t.sort((function(t,e){return o(t.content)=Math.max(document.body.scrollHeight,document.documentElement.scrollHeight,document.body.offsetHeight,document.documentElement.offsetHeight,document.body.clientHeight,document.documentElement.clientHeight)},l=function(t,e){var n=t[t.length-1];if(function(t,e){return!(!s()||!c(t.content,e,!0))}(n,e))return n;for(var o=t.length-1;o>=0;o--)if(c(t[o].content,e))return t[o]},a=function(t,e){if(e.nested&&t.parentNode){var n=t.parentNode.closest("li");n&&(n.classList.remove(e.nestedClass),a(n,e))}},i=function(t,e){if(t){var o=t.nav.closest("li");o&&(o.classList.remove(e.navClass),t.content.classList.remove(e.contentClass),a(o,e),n("gumshoeDeactivate",o,{link:t.nav,content:t.content,settings:e}))}},u=function(t,e){if(e.nested){var n=t.parentNode.closest("li");n&&(n.classList.add(e.nestedClass),u(n,e))}};return function(o,c){var s,a,d,f,m,v={setup:function(){s=document.querySelectorAll(o),a=[],Array.prototype.forEach.call(s,(function(t){var e=document.getElementById(decodeURIComponent(t.hash.substr(1)));e&&a.push({nav:t,content:e})})),r(a)},detect:function(){var t=l(a,m);t?d&&t.content===d.content||(i(d,m),function(t,e){if(t){var o=t.nav.closest("li");o&&(o.classList.add(e.navClass),t.content.classList.add(e.contentClass),u(o,e),n("gumshoeActivate",o,{link:t.nav,content:t.content,settings:e}))}}(t,m),d=t):d&&(i(d,m),d=null)}},h=function(e){f&&t.cancelAnimationFrame(f),f=t.requestAnimationFrame(v.detect)},g=function(e){f&&t.cancelAnimationFrame(f),f=t.requestAnimationFrame((function(){r(a),v.detect()}))};return v.destroy=function(){d&&i(d,m),t.removeEventListener("scroll",h,!1),m.reflow&&t.removeEventListener("resize",g,!1),a=null,s=null,d=null,f=null,m=null},m=function(){var t={};return Array.prototype.forEach.call(arguments,(function(e){for(var n in e){if(!e.hasOwnProperty(n))return;t[n]=e[n]}})),t}(e,c||{}),v.setup(),v.detect(),t.addEventListener("scroll",h,!1),m.reflow&&t.addEventListener("resize",g,!1),v}}(r)}.apply(e,[]),void 0===o||(t.exports=o)}},e={};function n(o){var r=e[o];if(void 0!==r)return r.exports;var c=e[o]={exports:{}};return t[o].call(c.exports,c,c.exports,n),c.exports}n.n=t=>{var e=t&&t.__esModule?()=>t.default:()=>t;return n.d(e,{a:e}),e},n.d=(t,e)=>{for(var o in e)n.o(e,o)&&!n.o(t,o)&&Object.defineProperty(t,o,{enumerable:!0,get:e[o]})},n.g=function(){if("object"==typeof globalThis)return globalThis;try{return this||new Function("return this")()}catch(t){if("object"==typeof window)return window}}(),n.o=(t,e)=>Object.prototype.hasOwnProperty.call(t,e),(()=>{"use strict";var t=n(856),e=n.n(t),o=null,r=null,c=document.documentElement.scrollTop;const s=64;function l(){const t=localStorage.getItem("theme")||"auto";var e;"light"!==(e=window.matchMedia("(prefers-color-scheme: dark)").matches?"auto"===t?"light":"light"==t?"dark":"auto":"auto"===t?"dark":"dark"==t?"light":"auto")&&"dark"!==e&&"auto"!==e&&(console.error(`Got invalid theme mode: ${e}. Resetting to auto.`),e="auto"),document.body.dataset.theme=e,localStorage.setItem("theme",e),console.log(`Changed to ${e} mode.`)}function a(){!function(){const t=document.getElementsByClassName("theme-toggle");Array.from(t).forEach((t=>{t.addEventListener("click",l)}))}(),function(){let t=0,e=!1;window.addEventListener("scroll",(function(n){t=window.scrollY,e||(window.requestAnimationFrame((function(){var n;(function(t){const e=Math.floor(r.getBoundingClientRect().top);console.log(`headerTop: ${e}`),0==e&&t!=e?r.classList.add("scrolled"):r.classList.remove("scrolled")})(n=t),function(t){tc&&document.documentElement.classList.remove("show-back-to-top"),c=t}(n),function(t){null!==o&&(0==t?o.scrollTo(0,0):Math.ceil(t)>=Math.floor(document.documentElement.scrollHeight-window.innerHeight)?o.scrollTo(0,o.scrollHeight):document.querySelector(".scroll-current"))}(n),e=!1})),e=!0)})),window.scroll()}(),null!==o&&new(e())(".toc-tree a",{reflow:!0,recursive:!0,navClass:"scroll-current",offset:()=>{let t=parseFloat(getComputedStyle(document.documentElement).fontSize);return r.getBoundingClientRect().height+2.5*t+1}})}document.addEventListener("DOMContentLoaded",(function(){document.body.parentNode.classList.remove("no-js"),r=document.querySelector("header"),o=document.querySelector(".toc-scroll"),a()}))})()})(); +//# sourceMappingURL=furo.js.map \ No newline at end of file diff --git a/_static/scripts/furo.js.LICENSE.txt b/_static/scripts/furo.js.LICENSE.txt new file mode 100644 index 0000000..1632189 --- /dev/null +++ b/_static/scripts/furo.js.LICENSE.txt @@ -0,0 +1,7 @@ +/*! + * gumshoejs v5.1.2 (patched by @pradyunsg) + * A simple, framework-agnostic scrollspy script. + * (c) 2019 Chris Ferdinandi + * MIT License + * http://github.com/cferdinandi/gumshoe + */ diff --git a/_static/scripts/furo.js.map b/_static/scripts/furo.js.map new file mode 100644 index 0000000..80ea12b --- /dev/null +++ b/_static/scripts/furo.js.map @@ -0,0 +1 @@ +{"version":3,"file":"scripts/furo.js","mappings":";iCAAA,MAQWA,SAWS,IAAX,EAAAC,EACH,EAAAA,EACkB,oBAAXC,OACLA,OACAC,KAbO,EAAF,WACP,OAaJ,SAAUD,GACR,aAMA,IAAIE,EAAW,CAEbC,SAAU,SACVC,aAAc,SAGdC,QAAQ,EACRC,YAAa,SAGbC,OAAQ,EACRC,QAAQ,EAGRC,QAAQ,GA6BNC,EAAY,SAAUC,EAAMC,EAAMC,GAEpC,GAAKA,EAAOC,SAASL,OAArB,CAGA,IAAIM,EAAQ,IAAIC,YAAYL,EAAM,CAChCM,SAAS,EACTC,YAAY,EACZL,OAAQA,IAIVD,EAAKO,cAAcJ,EAVgB,CAWrC,EAOIK,EAAe,SAAUR,GAC3B,IAAIS,EAAW,EACf,GAAIT,EAAKU,aACP,KAAOV,GACLS,GAAYT,EAAKW,UACjBX,EAAOA,EAAKU,aAGhB,OAAOD,GAAY,EAAIA,EAAW,CACpC,EAMIG,EAAe,SAAUC,GACvBA,GACFA,EAASC,MAAK,SAAUC,EAAOC,GAG7B,OAFcR,EAAaO,EAAME,SACnBT,EAAaQ,EAAMC,UACF,EACxB,CACT,GAEJ,EAwCIC,EAAW,SAAUlB,EAAME,EAAUiB,GACvC,IAAIC,EAASpB,EAAKqB,wBACd1B,EAnCU,SAAUO,GAExB,MAA+B,mBAApBA,EAASP,OACX2B,WAAWpB,EAASP,UAItB2B,WAAWpB,EAASP,OAC7B,CA2Be4B,CAAUrB,GACvB,OAAIiB,EAEAK,SAASJ,EAAOD,OAAQ,KACvB/B,EAAOqC,aAAeC,SAASC,gBAAgBC,cAG7CJ,SAASJ,EAAOS,IAAK,KAAOlC,CACrC,EAMImC,EAAa,WACf,OACEC,KAAKC,KAAK5C,EAAOqC,YAAcrC,EAAO6C,cAnCjCF,KAAKG,IACVR,SAASS,KAAKC,aACdV,SAASC,gBAAgBS,aACzBV,SAASS,KAAKE,aACdX,SAASC,gBAAgBU,aACzBX,SAASS,KAAKP,aACdF,SAASC,gBAAgBC,aAkC7B,EAmBIU,EAAY,SAAUzB,EAAUX,GAClC,IAAIqC,EAAO1B,EAASA,EAAS2B,OAAS,GACtC,GAbgB,SAAUC,EAAMvC,GAChC,SAAI4B,MAAgBZ,EAASuB,EAAKxB,QAASf,GAAU,GAEvD,CAUMwC,CAAYH,EAAMrC,GAAW,OAAOqC,EACxC,IAAK,IAAII,EAAI9B,EAAS2B,OAAS,EAAGG,GAAK,EAAGA,IACxC,GAAIzB,EAASL,EAAS8B,GAAG1B,QAASf,GAAW,OAAOW,EAAS8B,EAEjE,EAOIC,EAAmB,SAAUC,EAAK3C,GAEpC,GAAKA,EAAST,QAAWoD,EAAIC,WAA7B,CAGA,IAAIC,EAAKF,EAAIC,WAAWE,QAAQ,MAC3BD,IAGLA,EAAGE,UAAUC,OAAOhD,EAASR,aAG7BkD,EAAiBG,EAAI7C,GAV0B,CAWjD,EAOIiD,EAAa,SAAUC,EAAOlD,GAEhC,GAAKkD,EAAL,CAGA,IAAIL,EAAKK,EAAMP,IAAIG,QAAQ,MACtBD,IAGLA,EAAGE,UAAUC,OAAOhD,EAASX,UAC7B6D,EAAMnC,QAAQgC,UAAUC,OAAOhD,EAASV,cAGxCoD,EAAiBG,EAAI7C,GAGrBJ,EAAU,oBAAqBiD,EAAI,CACjCM,KAAMD,EAAMP,IACZ5B,QAASmC,EAAMnC,QACff,SAAUA,IAjBM,CAmBpB,EAOIoD,EAAiB,SAAUT,EAAK3C,GAElC,GAAKA,EAAST,OAAd,CAGA,IAAIsD,EAAKF,EAAIC,WAAWE,QAAQ,MAC3BD,IAGLA,EAAGE,UAAUM,IAAIrD,EAASR,aAG1B4D,EAAeP,EAAI7C,GAVS,CAW9B,EA6LA,OA1JkB,SAAUsD,EAAUC,GAKpC,IACIC,EAAU7C,EAAU8C,EAASC,EAAS1D,EADtC2D,EAAa,CAUjBA,MAAmB,WAEjBH,EAAWhC,SAASoC,iBAAiBN,GAGrC3C,EAAW,GAGXkD,MAAMC,UAAUC,QAAQC,KAAKR,GAAU,SAAUjB,GAE/C,IAAIxB,EAAUS,SAASyC,eACrBC,mBAAmB3B,EAAK4B,KAAKC,OAAO,KAEjCrD,GAGLJ,EAAS0D,KAAK,CACZ1B,IAAKJ,EACLxB,QAASA,GAEb,IAGAL,EAAaC,EACf,EAKAgD,OAAoB,WAElB,IAAIW,EAASlC,EAAUzB,EAAUX,GAG5BsE,EASDb,GAAWa,EAAOvD,UAAY0C,EAAQ1C,UAG1CkC,EAAWQ,EAASzD,GAzFT,SAAUkD,EAAOlD,GAE9B,GAAKkD,EAAL,CAGA,IAAIL,EAAKK,EAAMP,IAAIG,QAAQ,MACtBD,IAGLA,EAAGE,UAAUM,IAAIrD,EAASX,UAC1B6D,EAAMnC,QAAQgC,UAAUM,IAAIrD,EAASV,cAGrC8D,EAAeP,EAAI7C,GAGnBJ,EAAU,kBAAmBiD,EAAI,CAC/BM,KAAMD,EAAMP,IACZ5B,QAASmC,EAAMnC,QACff,SAAUA,IAjBM,CAmBpB,CAqEIuE,CAASD,EAAQtE,GAGjByD,EAAUa,GAfJb,IACFR,EAAWQ,EAASzD,GACpByD,EAAU,KAchB,GAMIe,EAAgB,SAAUvE,GAExByD,GACFxE,EAAOuF,qBAAqBf,GAI9BA,EAAUxE,EAAOwF,sBAAsBf,EAAWgB,OACpD,EAMIC,EAAgB,SAAU3E,GAExByD,GACFxE,EAAOuF,qBAAqBf,GAI9BA,EAAUxE,EAAOwF,uBAAsB,WACrChE,EAAaC,GACbgD,EAAWgB,QACb,GACF,EAkDA,OA7CAhB,EAAWkB,QAAU,WAEfpB,GACFR,EAAWQ,EAASzD,GAItBd,EAAO4F,oBAAoB,SAAUN,GAAe,GAChDxE,EAASN,QACXR,EAAO4F,oBAAoB,SAAUF,GAAe,GAItDjE,EAAW,KACX6C,EAAW,KACXC,EAAU,KACVC,EAAU,KACV1D,EAAW,IACb,EAOEA,EA3XS,WACX,IAAI+E,EAAS,CAAC,EAOd,OANAlB,MAAMC,UAAUC,QAAQC,KAAKgB,WAAW,SAAUC,GAChD,IAAK,IAAIC,KAAOD,EAAK,CACnB,IAAKA,EAAIE,eAAeD,GAAM,OAC9BH,EAAOG,GAAOD,EAAIC,EACpB,CACF,IACOH,CACT,CAkXeK,CAAOhG,EAAUmE,GAAW,CAAC,GAGxCI,EAAW0B,QAGX1B,EAAWgB,SAGXzF,EAAOoG,iBAAiB,SAAUd,GAAe,GAC7CxE,EAASN,QACXR,EAAOoG,iBAAiB,SAAUV,GAAe,GAS9CjB,CACT,CAOF,CArcW4B,CAAQvG,EAChB,UAFM,SAEN,uBCXDwG,EAA2B,CAAC,EAGhC,SAASC,EAAoBC,GAE5B,IAAIC,EAAeH,EAAyBE,GAC5C,QAAqBE,IAAjBD,EACH,OAAOA,EAAaE,QAGrB,IAAIC,EAASN,EAAyBE,GAAY,CAGjDG,QAAS,CAAC,GAOX,OAHAE,EAAoBL,GAAU1B,KAAK8B,EAAOD,QAASC,EAAQA,EAAOD,QAASJ,GAGpEK,EAAOD,OACf,CCrBAJ,EAAoBO,EAAKF,IACxB,IAAIG,EAASH,GAAUA,EAAOI,WAC7B,IAAOJ,EAAiB,QACxB,IAAM,EAEP,OADAL,EAAoBU,EAAEF,EAAQ,CAAEG,EAAGH,IAC5BA,CAAM,ECLdR,EAAoBU,EAAI,CAACN,EAASQ,KACjC,IAAI,IAAInB,KAAOmB,EACXZ,EAAoBa,EAAED,EAAYnB,KAASO,EAAoBa,EAAET,EAASX,IAC5EqB,OAAOC,eAAeX,EAASX,EAAK,CAAEuB,YAAY,EAAMC,IAAKL,EAAWnB,IAE1E,ECNDO,EAAoBxG,EAAI,WACvB,GAA0B,iBAAf0H,WAAyB,OAAOA,WAC3C,IACC,OAAOxH,MAAQ,IAAIyH,SAAS,cAAb,EAChB,CAAE,MAAOC,GACR,GAAsB,iBAAX3H,OAAqB,OAAOA,MACxC,CACA,CAPuB,GCAxBuG,EAAoBa,EAAI,CAACrB,EAAK6B,IAAUP,OAAOzC,UAAUqB,eAAenB,KAAKiB,EAAK6B,4CCK9EC,EAAY,KACZC,EAAS,KACTC,EAAgBzF,SAASC,gBAAgByF,UAC7C,MAAMC,EAAmB,GA8EzB,SAASC,IACP,MAAMC,EAAeC,aAAaC,QAAQ,UAAY,OAZxD,IAAkBC,EACH,WADGA,EAaItI,OAAOuI,WAAW,gCAAgCC,QAI/C,SAAjBL,EACO,QACgB,SAAhBA,EACA,OAEA,OAIU,SAAjBA,EACO,OACgB,QAAhBA,EACA,QAEA,SA9BoB,SAATG,GAA4B,SAATA,IACzCG,QAAQC,MAAM,2BAA2BJ,yBACzCA,EAAO,QAGThG,SAASS,KAAK4F,QAAQC,MAAQN,EAC9BF,aAAaS,QAAQ,QAASP,GAC9BG,QAAQK,IAAI,cAAcR,UA0B5B,CAkDA,SAASnC,KART,WAEE,MAAM4C,EAAUzG,SAAS0G,uBAAuB,gBAChDrE,MAAMsE,KAAKF,GAASlE,SAASqE,IAC3BA,EAAI9C,iBAAiB,QAAS8B,EAAe,GAEjD,CAGEiB,GA9CF,WAEE,IAAIC,EAA6B,EAC7BC,GAAU,EAEdrJ,OAAOoG,iBAAiB,UAAU,SAAUuB,GAC1CyB,EAA6BpJ,OAAOsJ,QAE/BD,IACHrJ,OAAOwF,uBAAsB,WAzDnC,IAAuB+D,GAxDvB,SAAgCA,GAC9B,MAAMC,EAAY7G,KAAK8G,MAAM3B,EAAO7F,wBAAwBQ,KAE5DgG,QAAQK,IAAI,cAAcU,KACT,GAAbA,GAAkBD,GAAaC,EACjC1B,EAAOjE,UAAUM,IAAI,YAErB2D,EAAOjE,UAAUC,OAAO,WAE5B,EAgDE4F,CADqBH,EA0DDH,GAvGtB,SAAmCG,GAC7BA,EAAYtB,EACd3F,SAASC,gBAAgBsB,UAAUC,OAAO,oBAEtCyF,EAAYxB,EACdzF,SAASC,gBAAgBsB,UAAUM,IAAI,oBAC9BoF,EAAYxB,GACrBzF,SAASC,gBAAgBsB,UAAUC,OAAO,oBAG9CiE,EAAgBwB,CAClB,CAoCEI,CAA0BJ,GAlC5B,SAA6BA,GACT,OAAd1B,IAKa,GAAb0B,EACF1B,EAAU+B,SAAS,EAAG,GAGtBjH,KAAKC,KAAK2G,IACV5G,KAAK8G,MAAMnH,SAASC,gBAAgBS,aAAehD,OAAOqC,aAE1DwF,EAAU+B,SAAS,EAAG/B,EAAU7E,cAGhBV,SAASuH,cAAc,mBAc3C,CAKEC,CAAoBP,GAwDdF,GAAU,CACZ,IAEAA,GAAU,EAEd,IACArJ,OAAO+J,QACT,CA6BEC,GA1BkB,OAAdnC,GAKJ,IAAI,IAAJ,CAAY,cAAe,CACzBrH,QAAQ,EACRyJ,WAAW,EACX9J,SAAU,iBACVI,OAAQ,KACN,IAAI2J,EAAMhI,WAAWiI,iBAAiB7H,SAASC,iBAAiB6H,UAChE,OAAOtC,EAAO7F,wBAAwBoI,OAAS,IAAMH,EAAM,CAAC,GAiBlE,CAcA5H,SAAS8D,iBAAiB,oBAT1B,WACE9D,SAASS,KAAKW,WAAWG,UAAUC,OAAO,SAE1CgE,EAASxF,SAASuH,cAAc,UAChChC,EAAYvF,SAASuH,cAAc,eAEnC1D,GACF","sources":["webpack:///./src/furo/assets/scripts/gumshoe-patched.js","webpack:///webpack/bootstrap","webpack:///webpack/runtime/compat get default export","webpack:///webpack/runtime/define property getters","webpack:///webpack/runtime/global","webpack:///webpack/runtime/hasOwnProperty shorthand","webpack:///./src/furo/assets/scripts/furo.js"],"sourcesContent":["/*!\n * gumshoejs v5.1.2 (patched by @pradyunsg)\n * A simple, framework-agnostic scrollspy script.\n * (c) 2019 Chris Ferdinandi\n * MIT License\n * http://github.com/cferdinandi/gumshoe\n */\n\n(function (root, factory) {\n if (typeof define === \"function\" && define.amd) {\n define([], function () {\n return factory(root);\n });\n } else if (typeof exports === \"object\") {\n module.exports = factory(root);\n } else {\n root.Gumshoe = factory(root);\n }\n})(\n typeof global !== \"undefined\"\n ? global\n : typeof window !== \"undefined\"\n ? window\n : this,\n function (window) {\n \"use strict\";\n\n //\n // Defaults\n //\n\n var defaults = {\n // Active classes\n navClass: \"active\",\n contentClass: \"active\",\n\n // Nested navigation\n nested: false,\n nestedClass: \"active\",\n\n // Offset & reflow\n offset: 0,\n reflow: false,\n\n // Event support\n events: true,\n };\n\n //\n // Methods\n //\n\n /**\n * Merge two or more objects together.\n * @param {Object} objects The objects to merge together\n * @returns {Object} Merged values of defaults and options\n */\n var extend = function () {\n var merged = {};\n Array.prototype.forEach.call(arguments, function (obj) {\n for (var key in obj) {\n if (!obj.hasOwnProperty(key)) return;\n merged[key] = obj[key];\n }\n });\n return merged;\n };\n\n /**\n * Emit a custom event\n * @param {String} type The event type\n * @param {Node} elem The element to attach the event to\n * @param {Object} detail Any details to pass along with the event\n */\n var emitEvent = function (type, elem, detail) {\n // Make sure events are enabled\n if (!detail.settings.events) return;\n\n // Create a new event\n var event = new CustomEvent(type, {\n bubbles: true,\n cancelable: true,\n detail: detail,\n });\n\n // Dispatch the event\n elem.dispatchEvent(event);\n };\n\n /**\n * Get an element's distance from the top of the Document.\n * @param {Node} elem The element\n * @return {Number} Distance from the top in pixels\n */\n var getOffsetTop = function (elem) {\n var location = 0;\n if (elem.offsetParent) {\n while (elem) {\n location += elem.offsetTop;\n elem = elem.offsetParent;\n }\n }\n return location >= 0 ? location : 0;\n };\n\n /**\n * Sort content from first to last in the DOM\n * @param {Array} contents The content areas\n */\n var sortContents = function (contents) {\n if (contents) {\n contents.sort(function (item1, item2) {\n var offset1 = getOffsetTop(item1.content);\n var offset2 = getOffsetTop(item2.content);\n if (offset1 < offset2) return -1;\n return 1;\n });\n }\n };\n\n /**\n * Get the offset to use for calculating position\n * @param {Object} settings The settings for this instantiation\n * @return {Float} The number of pixels to offset the calculations\n */\n var getOffset = function (settings) {\n // if the offset is a function run it\n if (typeof settings.offset === \"function\") {\n return parseFloat(settings.offset());\n }\n\n // Otherwise, return it as-is\n return parseFloat(settings.offset);\n };\n\n /**\n * Get the document element's height\n * @private\n * @returns {Number}\n */\n var getDocumentHeight = function () {\n return Math.max(\n document.body.scrollHeight,\n document.documentElement.scrollHeight,\n document.body.offsetHeight,\n document.documentElement.offsetHeight,\n document.body.clientHeight,\n document.documentElement.clientHeight,\n );\n };\n\n /**\n * Determine if an element is in view\n * @param {Node} elem The element\n * @param {Object} settings The settings for this instantiation\n * @param {Boolean} bottom If true, check if element is above bottom of viewport instead\n * @return {Boolean} Returns true if element is in the viewport\n */\n var isInView = function (elem, settings, bottom) {\n var bounds = elem.getBoundingClientRect();\n var offset = getOffset(settings);\n if (bottom) {\n return (\n parseInt(bounds.bottom, 10) <\n (window.innerHeight || document.documentElement.clientHeight)\n );\n }\n return parseInt(bounds.top, 10) <= offset;\n };\n\n /**\n * Check if at the bottom of the viewport\n * @return {Boolean} If true, page is at the bottom of the viewport\n */\n var isAtBottom = function () {\n if (\n Math.ceil(window.innerHeight + window.pageYOffset) >=\n getDocumentHeight()\n )\n return true;\n return false;\n };\n\n /**\n * Check if the last item should be used (even if not at the top of the page)\n * @param {Object} item The last item\n * @param {Object} settings The settings for this instantiation\n * @return {Boolean} If true, use the last item\n */\n var useLastItem = function (item, settings) {\n if (isAtBottom() && isInView(item.content, settings, true)) return true;\n return false;\n };\n\n /**\n * Get the active content\n * @param {Array} contents The content areas\n * @param {Object} settings The settings for this instantiation\n * @return {Object} The content area and matching navigation link\n */\n var getActive = function (contents, settings) {\n var last = contents[contents.length - 1];\n if (useLastItem(last, settings)) return last;\n for (var i = contents.length - 1; i >= 0; i--) {\n if (isInView(contents[i].content, settings)) return contents[i];\n }\n };\n\n /**\n * Deactivate parent navs in a nested navigation\n * @param {Node} nav The starting navigation element\n * @param {Object} settings The settings for this instantiation\n */\n var deactivateNested = function (nav, settings) {\n // If nesting isn't activated, bail\n if (!settings.nested || !nav.parentNode) return;\n\n // Get the parent navigation\n var li = nav.parentNode.closest(\"li\");\n if (!li) return;\n\n // Remove the active class\n li.classList.remove(settings.nestedClass);\n\n // Apply recursively to any parent navigation elements\n deactivateNested(li, settings);\n };\n\n /**\n * Deactivate a nav and content area\n * @param {Object} items The nav item and content to deactivate\n * @param {Object} settings The settings for this instantiation\n */\n var deactivate = function (items, settings) {\n // Make sure there are items to deactivate\n if (!items) return;\n\n // Get the parent list item\n var li = items.nav.closest(\"li\");\n if (!li) return;\n\n // Remove the active class from the nav and content\n li.classList.remove(settings.navClass);\n items.content.classList.remove(settings.contentClass);\n\n // Deactivate any parent navs in a nested navigation\n deactivateNested(li, settings);\n\n // Emit a custom event\n emitEvent(\"gumshoeDeactivate\", li, {\n link: items.nav,\n content: items.content,\n settings: settings,\n });\n };\n\n /**\n * Activate parent navs in a nested navigation\n * @param {Node} nav The starting navigation element\n * @param {Object} settings The settings for this instantiation\n */\n var activateNested = function (nav, settings) {\n // If nesting isn't activated, bail\n if (!settings.nested) return;\n\n // Get the parent navigation\n var li = nav.parentNode.closest(\"li\");\n if (!li) return;\n\n // Add the active class\n li.classList.add(settings.nestedClass);\n\n // Apply recursively to any parent navigation elements\n activateNested(li, settings);\n };\n\n /**\n * Activate a nav and content area\n * @param {Object} items The nav item and content to activate\n * @param {Object} settings The settings for this instantiation\n */\n var activate = function (items, settings) {\n // Make sure there are items to activate\n if (!items) return;\n\n // Get the parent list item\n var li = items.nav.closest(\"li\");\n if (!li) return;\n\n // Add the active class to the nav and content\n li.classList.add(settings.navClass);\n items.content.classList.add(settings.contentClass);\n\n // Activate any parent navs in a nested navigation\n activateNested(li, settings);\n\n // Emit a custom event\n emitEvent(\"gumshoeActivate\", li, {\n link: items.nav,\n content: items.content,\n settings: settings,\n });\n };\n\n /**\n * Create the Constructor object\n * @param {String} selector The selector to use for navigation items\n * @param {Object} options User options and settings\n */\n var Constructor = function (selector, options) {\n //\n // Variables\n //\n\n var publicAPIs = {};\n var navItems, contents, current, timeout, settings;\n\n //\n // Methods\n //\n\n /**\n * Set variables from DOM elements\n */\n publicAPIs.setup = function () {\n // Get all nav items\n navItems = document.querySelectorAll(selector);\n\n // Create contents array\n contents = [];\n\n // Loop through each item, get it's matching content, and push to the array\n Array.prototype.forEach.call(navItems, function (item) {\n // Get the content for the nav item\n var content = document.getElementById(\n decodeURIComponent(item.hash.substr(1)),\n );\n if (!content) return;\n\n // Push to the contents array\n contents.push({\n nav: item,\n content: content,\n });\n });\n\n // Sort contents by the order they appear in the DOM\n sortContents(contents);\n };\n\n /**\n * Detect which content is currently active\n */\n publicAPIs.detect = function () {\n // Get the active content\n var active = getActive(contents, settings);\n\n // if there's no active content, deactivate and bail\n if (!active) {\n if (current) {\n deactivate(current, settings);\n current = null;\n }\n return;\n }\n\n // If the active content is the one currently active, do nothing\n if (current && active.content === current.content) return;\n\n // Deactivate the current content and activate the new content\n deactivate(current, settings);\n activate(active, settings);\n\n // Update the currently active content\n current = active;\n };\n\n /**\n * Detect the active content on scroll\n * Debounced for performance\n */\n var scrollHandler = function (event) {\n // If there's a timer, cancel it\n if (timeout) {\n window.cancelAnimationFrame(timeout);\n }\n\n // Setup debounce callback\n timeout = window.requestAnimationFrame(publicAPIs.detect);\n };\n\n /**\n * Update content sorting on resize\n * Debounced for performance\n */\n var resizeHandler = function (event) {\n // If there's a timer, cancel it\n if (timeout) {\n window.cancelAnimationFrame(timeout);\n }\n\n // Setup debounce callback\n timeout = window.requestAnimationFrame(function () {\n sortContents(contents);\n publicAPIs.detect();\n });\n };\n\n /**\n * Destroy the current instantiation\n */\n publicAPIs.destroy = function () {\n // Undo DOM changes\n if (current) {\n deactivate(current, settings);\n }\n\n // Remove event listeners\n window.removeEventListener(\"scroll\", scrollHandler, false);\n if (settings.reflow) {\n window.removeEventListener(\"resize\", resizeHandler, false);\n }\n\n // Reset variables\n contents = null;\n navItems = null;\n current = null;\n timeout = null;\n settings = null;\n };\n\n /**\n * Initialize the current instantiation\n */\n var init = function () {\n // Merge user options into defaults\n settings = extend(defaults, options || {});\n\n // Setup variables based on the current DOM\n publicAPIs.setup();\n\n // Find the currently active content\n publicAPIs.detect();\n\n // Setup event listeners\n window.addEventListener(\"scroll\", scrollHandler, false);\n if (settings.reflow) {\n window.addEventListener(\"resize\", resizeHandler, false);\n }\n };\n\n //\n // Initialize and return the public APIs\n //\n\n init();\n return publicAPIs;\n };\n\n //\n // Return the Constructor\n //\n\n return Constructor;\n },\n);\n","// The module cache\nvar __webpack_module_cache__ = {};\n\n// The require function\nfunction __webpack_require__(moduleId) {\n\t// Check if module is in cache\n\tvar cachedModule = __webpack_module_cache__[moduleId];\n\tif (cachedModule !== undefined) {\n\t\treturn cachedModule.exports;\n\t}\n\t// Create a new module (and put it into the cache)\n\tvar module = __webpack_module_cache__[moduleId] = {\n\t\t// no module.id needed\n\t\t// no module.loaded needed\n\t\texports: {}\n\t};\n\n\t// Execute the module function\n\t__webpack_modules__[moduleId].call(module.exports, module, module.exports, __webpack_require__);\n\n\t// Return the exports of the module\n\treturn module.exports;\n}\n\n","// getDefaultExport function for compatibility with non-harmony modules\n__webpack_require__.n = (module) => {\n\tvar getter = module && module.__esModule ?\n\t\t() => (module['default']) :\n\t\t() => (module);\n\t__webpack_require__.d(getter, { a: getter });\n\treturn getter;\n};","// define getter functions for harmony exports\n__webpack_require__.d = (exports, definition) => {\n\tfor(var key in definition) {\n\t\tif(__webpack_require__.o(definition, key) && !__webpack_require__.o(exports, key)) {\n\t\t\tObject.defineProperty(exports, key, { enumerable: true, get: definition[key] });\n\t\t}\n\t}\n};","__webpack_require__.g = (function() {\n\tif (typeof globalThis === 'object') return globalThis;\n\ttry {\n\t\treturn this || new Function('return this')();\n\t} catch (e) {\n\t\tif (typeof window === 'object') return window;\n\t}\n})();","__webpack_require__.o = (obj, prop) => (Object.prototype.hasOwnProperty.call(obj, prop))","import Gumshoe from \"./gumshoe-patched.js\";\n\n////////////////////////////////////////////////////////////////////////////////\n// Scroll Handling\n////////////////////////////////////////////////////////////////////////////////\nvar tocScroll = null;\nvar header = null;\nvar lastScrollTop = document.documentElement.scrollTop;\nconst GO_TO_TOP_OFFSET = 64;\n\nfunction scrollHandlerForHeader(positionY) {\n const headerTop = Math.floor(header.getBoundingClientRect().top);\n\n console.log(`headerTop: ${headerTop}`);\n if (headerTop == 0 && positionY != headerTop) {\n header.classList.add(\"scrolled\");\n } else {\n header.classList.remove(\"scrolled\");\n }\n}\n\nfunction scrollHandlerForBackToTop(positionY) {\n if (positionY < GO_TO_TOP_OFFSET) {\n document.documentElement.classList.remove(\"show-back-to-top\");\n } else {\n if (positionY < lastScrollTop) {\n document.documentElement.classList.add(\"show-back-to-top\");\n } else if (positionY > lastScrollTop) {\n document.documentElement.classList.remove(\"show-back-to-top\");\n }\n }\n lastScrollTop = positionY;\n}\n\nfunction scrollHandlerForTOC(positionY) {\n if (tocScroll === null) {\n return;\n }\n\n // top of page.\n if (positionY == 0) {\n tocScroll.scrollTo(0, 0);\n } else if (\n // bottom of page.\n Math.ceil(positionY) >=\n Math.floor(document.documentElement.scrollHeight - window.innerHeight)\n ) {\n tocScroll.scrollTo(0, tocScroll.scrollHeight);\n } else {\n // somewhere in the middle.\n const current = document.querySelector(\".scroll-current\");\n if (current == null) {\n return;\n }\n\n // https://github.com/pypa/pip/issues/9159 This breaks scroll behaviours.\n // // scroll the currently \"active\" heading in toc, into view.\n // const rect = current.getBoundingClientRect();\n // if (0 > rect.top) {\n // current.scrollIntoView(true); // the argument is \"alignTop\"\n // } else if (rect.bottom > window.innerHeight) {\n // current.scrollIntoView(false);\n // }\n }\n}\n\nfunction scrollHandler(positionY) {\n scrollHandlerForHeader(positionY);\n scrollHandlerForBackToTop(positionY);\n scrollHandlerForTOC(positionY);\n}\n\n////////////////////////////////////////////////////////////////////////////////\n// Theme Toggle\n////////////////////////////////////////////////////////////////////////////////\nfunction setTheme(mode) {\n if (mode !== \"light\" && mode !== \"dark\" && mode !== \"auto\") {\n console.error(`Got invalid theme mode: ${mode}. Resetting to auto.`);\n mode = \"auto\";\n }\n\n document.body.dataset.theme = mode;\n localStorage.setItem(\"theme\", mode);\n console.log(`Changed to ${mode} mode.`);\n}\n\nfunction cycleThemeOnce() {\n const currentTheme = localStorage.getItem(\"theme\") || \"auto\";\n const prefersDark = window.matchMedia(\"(prefers-color-scheme: dark)\").matches;\n\n if (prefersDark) {\n // Auto (dark) -> Light -> Dark\n if (currentTheme === \"auto\") {\n setTheme(\"light\");\n } else if (currentTheme == \"light\") {\n setTheme(\"dark\");\n } else {\n setTheme(\"auto\");\n }\n } else {\n // Auto (light) -> Dark -> Light\n if (currentTheme === \"auto\") {\n setTheme(\"dark\");\n } else if (currentTheme == \"dark\") {\n setTheme(\"light\");\n } else {\n setTheme(\"auto\");\n }\n }\n}\n\n////////////////////////////////////////////////////////////////////////////////\n// Setup\n////////////////////////////////////////////////////////////////////////////////\nfunction setupScrollHandler() {\n // Taken from https://developer.mozilla.org/en-US/docs/Web/API/Document/scroll_event\n let last_known_scroll_position = 0;\n let ticking = false;\n\n window.addEventListener(\"scroll\", function (e) {\n last_known_scroll_position = window.scrollY;\n\n if (!ticking) {\n window.requestAnimationFrame(function () {\n scrollHandler(last_known_scroll_position);\n ticking = false;\n });\n\n ticking = true;\n }\n });\n window.scroll();\n}\n\nfunction setupScrollSpy() {\n if (tocScroll === null) {\n return;\n }\n\n // Scrollspy -- highlight table on contents, based on scroll\n new Gumshoe(\".toc-tree a\", {\n reflow: true,\n recursive: true,\n navClass: \"scroll-current\",\n offset: () => {\n let rem = parseFloat(getComputedStyle(document.documentElement).fontSize);\n return header.getBoundingClientRect().height + 2.5 * rem + 1;\n },\n });\n}\n\nfunction setupTheme() {\n // Attach event handlers for toggling themes\n const buttons = document.getElementsByClassName(\"theme-toggle\");\n Array.from(buttons).forEach((btn) => {\n btn.addEventListener(\"click\", cycleThemeOnce);\n });\n}\n\nfunction setup() {\n setupTheme();\n setupScrollHandler();\n setupScrollSpy();\n}\n\n////////////////////////////////////////////////////////////////////////////////\n// Main entrypoint\n////////////////////////////////////////////////////////////////////////////////\nfunction main() {\n document.body.parentNode.classList.remove(\"no-js\");\n\n header = document.querySelector(\"header\");\n tocScroll = document.querySelector(\".toc-scroll\");\n\n setup();\n}\n\ndocument.addEventListener(\"DOMContentLoaded\", main);\n"],"names":["root","g","window","this","defaults","navClass","contentClass","nested","nestedClass","offset","reflow","events","emitEvent","type","elem","detail","settings","event","CustomEvent","bubbles","cancelable","dispatchEvent","getOffsetTop","location","offsetParent","offsetTop","sortContents","contents","sort","item1","item2","content","isInView","bottom","bounds","getBoundingClientRect","parseFloat","getOffset","parseInt","innerHeight","document","documentElement","clientHeight","top","isAtBottom","Math","ceil","pageYOffset","max","body","scrollHeight","offsetHeight","getActive","last","length","item","useLastItem","i","deactivateNested","nav","parentNode","li","closest","classList","remove","deactivate","items","link","activateNested","add","selector","options","navItems","current","timeout","publicAPIs","querySelectorAll","Array","prototype","forEach","call","getElementById","decodeURIComponent","hash","substr","push","active","activate","scrollHandler","cancelAnimationFrame","requestAnimationFrame","detect","resizeHandler","destroy","removeEventListener","merged","arguments","obj","key","hasOwnProperty","extend","setup","addEventListener","factory","__webpack_module_cache__","__webpack_require__","moduleId","cachedModule","undefined","exports","module","__webpack_modules__","n","getter","__esModule","d","a","definition","o","Object","defineProperty","enumerable","get","globalThis","Function","e","prop","tocScroll","header","lastScrollTop","scrollTop","GO_TO_TOP_OFFSET","cycleThemeOnce","currentTheme","localStorage","getItem","mode","matchMedia","matches","console","error","dataset","theme","setItem","log","buttons","getElementsByClassName","from","btn","setupTheme","last_known_scroll_position","ticking","scrollY","positionY","headerTop","floor","scrollHandlerForHeader","scrollHandlerForBackToTop","scrollTo","querySelector","scrollHandlerForTOC","scroll","setupScrollHandler","recursive","rem","getComputedStyle","fontSize","height"],"sourceRoot":""} \ No newline at end of file diff --git a/_static/searchtools.js b/_static/searchtools.js new file mode 100644 index 0000000..b08d58c --- /dev/null +++ b/_static/searchtools.js @@ -0,0 +1,620 @@ +/* + * searchtools.js + * ~~~~~~~~~~~~~~~~ + * + * Sphinx JavaScript utilities for the full-text search. + * + * :copyright: Copyright 2007-2024 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ +"use strict"; + +/** + * Simple result scoring code. + */ +if (typeof Scorer === "undefined") { + var Scorer = { + // Implement the following function to further tweak the score for each result + // The function takes a result array [docname, title, anchor, descr, score, filename] + // and returns the new score. + /* + score: result => { + const [docname, title, anchor, descr, score, filename] = result + return score + }, + */ + + // query matches the full name of an object + objNameMatch: 11, + // or matches in the last dotted part of the object name + objPartialMatch: 6, + // Additive scores depending on the priority of the object + objPrio: { + 0: 15, // used to be importantResults + 1: 5, // used to be objectResults + 2: -5, // used to be unimportantResults + }, + // Used when the priority is not in the mapping. + objPrioDefault: 0, + + // query found in title + title: 15, + partialTitle: 7, + // query found in terms + term: 5, + partialTerm: 2, + }; +} + +const _removeChildren = (element) => { + while (element && element.lastChild) element.removeChild(element.lastChild); +}; + +/** + * See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions#escaping + */ +const _escapeRegExp = (string) => + string.replace(/[.*+\-?^${}()|[\]\\]/g, "\\$&"); // $& means the whole matched string + +const _displayItem = (item, searchTerms, highlightTerms) => { + const docBuilder = DOCUMENTATION_OPTIONS.BUILDER; + const docFileSuffix = DOCUMENTATION_OPTIONS.FILE_SUFFIX; + const docLinkSuffix = DOCUMENTATION_OPTIONS.LINK_SUFFIX; + const showSearchSummary = DOCUMENTATION_OPTIONS.SHOW_SEARCH_SUMMARY; + const contentRoot = document.documentElement.dataset.content_root; + + const [docName, title, anchor, descr, score, _filename] = item; + + let listItem = document.createElement("li"); + let requestUrl; + let linkUrl; + if (docBuilder === "dirhtml") { + // dirhtml builder + let dirname = docName + "/"; + if (dirname.match(/\/index\/$/)) + dirname = dirname.substring(0, dirname.length - 6); + else if (dirname === "index/") dirname = ""; + requestUrl = contentRoot + dirname; + linkUrl = requestUrl; + } else { + // normal html builders + requestUrl = contentRoot + docName + docFileSuffix; + linkUrl = docName + docLinkSuffix; + } + let linkEl = listItem.appendChild(document.createElement("a")); + linkEl.href = linkUrl + anchor; + linkEl.dataset.score = score; + linkEl.innerHTML = title; + if (descr) { + listItem.appendChild(document.createElement("span")).innerHTML = + " (" + descr + ")"; + // highlight search terms in the description + if (SPHINX_HIGHLIGHT_ENABLED) // set in sphinx_highlight.js + highlightTerms.forEach((term) => _highlightText(listItem, term, "highlighted")); + } + else if (showSearchSummary) + fetch(requestUrl) + .then((responseData) => responseData.text()) + .then((data) => { + if (data) + listItem.appendChild( + Search.makeSearchSummary(data, searchTerms, anchor) + ); + // highlight search terms in the summary + if (SPHINX_HIGHLIGHT_ENABLED) // set in sphinx_highlight.js + highlightTerms.forEach((term) => _highlightText(listItem, term, "highlighted")); + }); + Search.output.appendChild(listItem); +}; +const _finishSearch = (resultCount) => { + Search.stopPulse(); + Search.title.innerText = _("Search Results"); + if (!resultCount) + Search.status.innerText = Documentation.gettext( + "Your search did not match any documents. Please make sure that all words are spelled correctly and that you've selected enough categories." + ); + else + Search.status.innerText = _( + "Search finished, found ${resultCount} page(s) matching the search query." + ).replace('${resultCount}', resultCount); +}; +const _displayNextItem = ( + results, + resultCount, + searchTerms, + highlightTerms, +) => { + // results left, load the summary and display it + // this is intended to be dynamic (don't sub resultsCount) + if (results.length) { + _displayItem(results.pop(), searchTerms, highlightTerms); + setTimeout( + () => _displayNextItem(results, resultCount, searchTerms, highlightTerms), + 5 + ); + } + // search finished, update title and status message + else _finishSearch(resultCount); +}; +// Helper function used by query() to order search results. +// Each input is an array of [docname, title, anchor, descr, score, filename]. +// Order the results by score (in opposite order of appearance, since the +// `_displayNextItem` function uses pop() to retrieve items) and then alphabetically. +const _orderResultsByScoreThenName = (a, b) => { + const leftScore = a[4]; + const rightScore = b[4]; + if (leftScore === rightScore) { + // same score: sort alphabetically + const leftTitle = a[1].toLowerCase(); + const rightTitle = b[1].toLowerCase(); + if (leftTitle === rightTitle) return 0; + return leftTitle > rightTitle ? -1 : 1; // inverted is intentional + } + return leftScore > rightScore ? 1 : -1; +}; + +/** + * Default splitQuery function. Can be overridden in ``sphinx.search`` with a + * custom function per language. + * + * The regular expression works by splitting the string on consecutive characters + * that are not Unicode letters, numbers, underscores, or emoji characters. + * This is the same as ``\W+`` in Python, preserving the surrogate pair area. + */ +if (typeof splitQuery === "undefined") { + var splitQuery = (query) => query + .split(/[^\p{Letter}\p{Number}_\p{Emoji_Presentation}]+/gu) + .filter(term => term) // remove remaining empty strings +} + +/** + * Search Module + */ +const Search = { + _index: null, + _queued_query: null, + _pulse_status: -1, + + htmlToText: (htmlString, anchor) => { + const htmlElement = new DOMParser().parseFromString(htmlString, 'text/html'); + for (const removalQuery of [".headerlink", "script", "style"]) { + htmlElement.querySelectorAll(removalQuery).forEach((el) => { el.remove() }); + } + if (anchor) { + const anchorContent = htmlElement.querySelector(`[role="main"] ${anchor}`); + if (anchorContent) return anchorContent.textContent; + + console.warn( + `Anchored content block not found. Sphinx search tries to obtain it via DOM query '[role=main] ${anchor}'. Check your theme or template.` + ); + } + + // if anchor not specified or not found, fall back to main content + const docContent = htmlElement.querySelector('[role="main"]'); + if (docContent) return docContent.textContent; + + console.warn( + "Content block not found. Sphinx search tries to obtain it via DOM query '[role=main]'. Check your theme or template." + ); + return ""; + }, + + init: () => { + const query = new URLSearchParams(window.location.search).get("q"); + document + .querySelectorAll('input[name="q"]') + .forEach((el) => (el.value = query)); + if (query) Search.performSearch(query); + }, + + loadIndex: (url) => + (document.body.appendChild(document.createElement("script")).src = url), + + setIndex: (index) => { + Search._index = index; + if (Search._queued_query !== null) { + const query = Search._queued_query; + Search._queued_query = null; + Search.query(query); + } + }, + + hasIndex: () => Search._index !== null, + + deferQuery: (query) => (Search._queued_query = query), + + stopPulse: () => (Search._pulse_status = -1), + + startPulse: () => { + if (Search._pulse_status >= 0) return; + + const pulse = () => { + Search._pulse_status = (Search._pulse_status + 1) % 4; + Search.dots.innerText = ".".repeat(Search._pulse_status); + if (Search._pulse_status >= 0) window.setTimeout(pulse, 500); + }; + pulse(); + }, + + /** + * perform a search for something (or wait until index is loaded) + */ + performSearch: (query) => { + // create the required interface elements + const searchText = document.createElement("h2"); + searchText.textContent = _("Searching"); + const searchSummary = document.createElement("p"); + searchSummary.classList.add("search-summary"); + searchSummary.innerText = ""; + const searchList = document.createElement("ul"); + searchList.classList.add("search"); + + const out = document.getElementById("search-results"); + Search.title = out.appendChild(searchText); + Search.dots = Search.title.appendChild(document.createElement("span")); + Search.status = out.appendChild(searchSummary); + Search.output = out.appendChild(searchList); + + const searchProgress = document.getElementById("search-progress"); + // Some themes don't use the search progress node + if (searchProgress) { + searchProgress.innerText = _("Preparing search..."); + } + Search.startPulse(); + + // index already loaded, the browser was quick! + if (Search.hasIndex()) Search.query(query); + else Search.deferQuery(query); + }, + + _parseQuery: (query) => { + // stem the search terms and add them to the correct list + const stemmer = new Stemmer(); + const searchTerms = new Set(); + const excludedTerms = new Set(); + const highlightTerms = new Set(); + const objectTerms = new Set(splitQuery(query.toLowerCase().trim())); + splitQuery(query.trim()).forEach((queryTerm) => { + const queryTermLower = queryTerm.toLowerCase(); + + // maybe skip this "word" + // stopwords array is from language_data.js + if ( + stopwords.indexOf(queryTermLower) !== -1 || + queryTerm.match(/^\d+$/) + ) + return; + + // stem the word + let word = stemmer.stemWord(queryTermLower); + // select the correct list + if (word[0] === "-") excludedTerms.add(word.substr(1)); + else { + searchTerms.add(word); + highlightTerms.add(queryTermLower); + } + }); + + if (SPHINX_HIGHLIGHT_ENABLED) { // set in sphinx_highlight.js + localStorage.setItem("sphinx_highlight_terms", [...highlightTerms].join(" ")) + } + + // console.debug("SEARCH: searching for:"); + // console.info("required: ", [...searchTerms]); + // console.info("excluded: ", [...excludedTerms]); + + return [query, searchTerms, excludedTerms, highlightTerms, objectTerms]; + }, + + /** + * execute search (requires search index to be loaded) + */ + _performSearch: (query, searchTerms, excludedTerms, highlightTerms, objectTerms) => { + const filenames = Search._index.filenames; + const docNames = Search._index.docnames; + const titles = Search._index.titles; + const allTitles = Search._index.alltitles; + const indexEntries = Search._index.indexentries; + + // Collect multiple result groups to be sorted separately and then ordered. + // Each is an array of [docname, title, anchor, descr, score, filename]. + const normalResults = []; + const nonMainIndexResults = []; + + _removeChildren(document.getElementById("search-progress")); + + const queryLower = query.toLowerCase().trim(); + for (const [title, foundTitles] of Object.entries(allTitles)) { + if (title.toLowerCase().trim().includes(queryLower) && (queryLower.length >= title.length/2)) { + for (const [file, id] of foundTitles) { + const score = Math.round(Scorer.title * queryLower.length / title.length); + const boost = titles[file] === title ? 1 : 0; // add a boost for document titles + normalResults.push([ + docNames[file], + titles[file] !== title ? `${titles[file]} > ${title}` : title, + id !== null ? "#" + id : "", + null, + score + boost, + filenames[file], + ]); + } + } + } + + // search for explicit entries in index directives + for (const [entry, foundEntries] of Object.entries(indexEntries)) { + if (entry.includes(queryLower) && (queryLower.length >= entry.length/2)) { + for (const [file, id, isMain] of foundEntries) { + const score = Math.round(100 * queryLower.length / entry.length); + const result = [ + docNames[file], + titles[file], + id ? "#" + id : "", + null, + score, + filenames[file], + ]; + if (isMain) { + normalResults.push(result); + } else { + nonMainIndexResults.push(result); + } + } + } + } + + // lookup as object + objectTerms.forEach((term) => + normalResults.push(...Search.performObjectSearch(term, objectTerms)) + ); + + // lookup as search terms in fulltext + normalResults.push(...Search.performTermsSearch(searchTerms, excludedTerms)); + + // let the scorer override scores with a custom scoring function + if (Scorer.score) { + normalResults.forEach((item) => (item[4] = Scorer.score(item))); + nonMainIndexResults.forEach((item) => (item[4] = Scorer.score(item))); + } + + // Sort each group of results by score and then alphabetically by name. + normalResults.sort(_orderResultsByScoreThenName); + nonMainIndexResults.sort(_orderResultsByScoreThenName); + + // Combine the result groups in (reverse) order. + // Non-main index entries are typically arbitrary cross-references, + // so display them after other results. + let results = [...nonMainIndexResults, ...normalResults]; + + // remove duplicate search results + // note the reversing of results, so that in the case of duplicates, the highest-scoring entry is kept + let seen = new Set(); + results = results.reverse().reduce((acc, result) => { + let resultStr = result.slice(0, 4).concat([result[5]]).map(v => String(v)).join(','); + if (!seen.has(resultStr)) { + acc.push(result); + seen.add(resultStr); + } + return acc; + }, []); + + return results.reverse(); + }, + + query: (query) => { + const [searchQuery, searchTerms, excludedTerms, highlightTerms, objectTerms] = Search._parseQuery(query); + const results = Search._performSearch(searchQuery, searchTerms, excludedTerms, highlightTerms, objectTerms); + + // for debugging + //Search.lastresults = results.slice(); // a copy + // console.info("search results:", Search.lastresults); + + // print the results + _displayNextItem(results, results.length, searchTerms, highlightTerms); + }, + + /** + * search for object names + */ + performObjectSearch: (object, objectTerms) => { + const filenames = Search._index.filenames; + const docNames = Search._index.docnames; + const objects = Search._index.objects; + const objNames = Search._index.objnames; + const titles = Search._index.titles; + + const results = []; + + const objectSearchCallback = (prefix, match) => { + const name = match[4] + const fullname = (prefix ? prefix + "." : "") + name; + const fullnameLower = fullname.toLowerCase(); + if (fullnameLower.indexOf(object) < 0) return; + + let score = 0; + const parts = fullnameLower.split("."); + + // check for different match types: exact matches of full name or + // "last name" (i.e. last dotted part) + if (fullnameLower === object || parts.slice(-1)[0] === object) + score += Scorer.objNameMatch; + else if (parts.slice(-1)[0].indexOf(object) > -1) + score += Scorer.objPartialMatch; // matches in last name + + const objName = objNames[match[1]][2]; + const title = titles[match[0]]; + + // If more than one term searched for, we require other words to be + // found in the name/title/description + const otherTerms = new Set(objectTerms); + otherTerms.delete(object); + if (otherTerms.size > 0) { + const haystack = `${prefix} ${name} ${objName} ${title}`.toLowerCase(); + if ( + [...otherTerms].some((otherTerm) => haystack.indexOf(otherTerm) < 0) + ) + return; + } + + let anchor = match[3]; + if (anchor === "") anchor = fullname; + else if (anchor === "-") anchor = objNames[match[1]][1] + "-" + fullname; + + const descr = objName + _(", in ") + title; + + // add custom score for some objects according to scorer + if (Scorer.objPrio.hasOwnProperty(match[2])) + score += Scorer.objPrio[match[2]]; + else score += Scorer.objPrioDefault; + + results.push([ + docNames[match[0]], + fullname, + "#" + anchor, + descr, + score, + filenames[match[0]], + ]); + }; + Object.keys(objects).forEach((prefix) => + objects[prefix].forEach((array) => + objectSearchCallback(prefix, array) + ) + ); + return results; + }, + + /** + * search for full-text terms in the index + */ + performTermsSearch: (searchTerms, excludedTerms) => { + // prepare search + const terms = Search._index.terms; + const titleTerms = Search._index.titleterms; + const filenames = Search._index.filenames; + const docNames = Search._index.docnames; + const titles = Search._index.titles; + + const scoreMap = new Map(); + const fileMap = new Map(); + + // perform the search on the required terms + searchTerms.forEach((word) => { + const files = []; + const arr = [ + { files: terms[word], score: Scorer.term }, + { files: titleTerms[word], score: Scorer.title }, + ]; + // add support for partial matches + if (word.length > 2) { + const escapedWord = _escapeRegExp(word); + if (!terms.hasOwnProperty(word)) { + Object.keys(terms).forEach((term) => { + if (term.match(escapedWord)) + arr.push({ files: terms[term], score: Scorer.partialTerm }); + }); + } + if (!titleTerms.hasOwnProperty(word)) { + Object.keys(titleTerms).forEach((term) => { + if (term.match(escapedWord)) + arr.push({ files: titleTerms[term], score: Scorer.partialTitle }); + }); + } + } + + // no match but word was a required one + if (arr.every((record) => record.files === undefined)) return; + + // found search word in contents + arr.forEach((record) => { + if (record.files === undefined) return; + + let recordFiles = record.files; + if (recordFiles.length === undefined) recordFiles = [recordFiles]; + files.push(...recordFiles); + + // set score for the word in each file + recordFiles.forEach((file) => { + if (!scoreMap.has(file)) scoreMap.set(file, {}); + scoreMap.get(file)[word] = record.score; + }); + }); + + // create the mapping + files.forEach((file) => { + if (!fileMap.has(file)) fileMap.set(file, [word]); + else if (fileMap.get(file).indexOf(word) === -1) fileMap.get(file).push(word); + }); + }); + + // now check if the files don't contain excluded terms + const results = []; + for (const [file, wordList] of fileMap) { + // check if all requirements are matched + + // as search terms with length < 3 are discarded + const filteredTermCount = [...searchTerms].filter( + (term) => term.length > 2 + ).length; + if ( + wordList.length !== searchTerms.size && + wordList.length !== filteredTermCount + ) + continue; + + // ensure that none of the excluded terms is in the search result + if ( + [...excludedTerms].some( + (term) => + terms[term] === file || + titleTerms[term] === file || + (terms[term] || []).includes(file) || + (titleTerms[term] || []).includes(file) + ) + ) + break; + + // select one (max) score for the file. + const score = Math.max(...wordList.map((w) => scoreMap.get(file)[w])); + // add result to the result list + results.push([ + docNames[file], + titles[file], + "", + null, + score, + filenames[file], + ]); + } + return results; + }, + + /** + * helper function to return a node containing the + * search summary for a given text. keywords is a list + * of stemmed words. + */ + makeSearchSummary: (htmlText, keywords, anchor) => { + const text = Search.htmlToText(htmlText, anchor); + if (text === "") return null; + + const textLower = text.toLowerCase(); + const actualStartPosition = [...keywords] + .map((k) => textLower.indexOf(k.toLowerCase())) + .filter((i) => i > -1) + .slice(-1)[0]; + const startWithContext = Math.max(actualStartPosition - 120, 0); + + const top = startWithContext === 0 ? "" : "..."; + const tail = startWithContext + 240 < text.length ? "..." : ""; + + let summary = document.createElement("p"); + summary.classList.add("context"); + summary.textContent = top + text.substr(startWithContext, 240).trim() + tail; + + return summary; + }, +}; + +_ready(Search.init); diff --git a/_static/skeleton.css b/_static/skeleton.css new file mode 100644 index 0000000..467c878 --- /dev/null +++ b/_static/skeleton.css @@ -0,0 +1,296 @@ +/* Some sane resets. */ +html { + height: 100%; +} + +body { + margin: 0; + min-height: 100%; +} + +/* All the flexbox magic! */ +body, +.sb-announcement, +.sb-content, +.sb-main, +.sb-container, +.sb-container__inner, +.sb-article-container, +.sb-footer-content, +.sb-header, +.sb-header-secondary, +.sb-footer { + display: flex; +} + +/* These order things vertically */ +body, +.sb-main, +.sb-article-container { + flex-direction: column; +} + +/* Put elements in the center */ +.sb-header, +.sb-header-secondary, +.sb-container, +.sb-content, +.sb-footer, +.sb-footer-content { + justify-content: center; +} +/* Put elements at the ends */ +.sb-article-container { + justify-content: space-between; +} + +/* These elements grow. */ +.sb-main, +.sb-content, +.sb-container, +article { + flex-grow: 1; +} + +/* Because padding making this wider is not fun */ +article { + box-sizing: border-box; +} + +/* The announcements element should never be wider than the page. */ +.sb-announcement { + max-width: 100%; +} + +.sb-sidebar-primary, +.sb-sidebar-secondary { + flex-shrink: 0; + width: 17rem; +} + +.sb-announcement__inner { + justify-content: center; + + box-sizing: border-box; + height: 3rem; + + overflow-x: auto; + white-space: nowrap; +} + +/* Sidebars, with checkbox-based toggle */ +.sb-sidebar-primary, +.sb-sidebar-secondary { + position: fixed; + height: 100%; + top: 0; +} + +.sb-sidebar-primary { + left: -17rem; + transition: left 250ms ease-in-out; +} +.sb-sidebar-secondary { + right: -17rem; + transition: right 250ms ease-in-out; +} + +.sb-sidebar-toggle { + display: none; +} +.sb-sidebar-overlay { + position: fixed; + top: 0; + width: 0; + height: 0; + + transition: width 0ms ease 250ms, height 0ms ease 250ms, opacity 250ms ease; + + opacity: 0; + background-color: rgba(0, 0, 0, 0.54); +} + +#sb-sidebar-toggle--primary:checked + ~ .sb-sidebar-overlay[for="sb-sidebar-toggle--primary"], +#sb-sidebar-toggle--secondary:checked + ~ .sb-sidebar-overlay[for="sb-sidebar-toggle--secondary"] { + width: 100%; + height: 100%; + opacity: 1; + transition: width 0ms ease, height 0ms ease, opacity 250ms ease; +} + +#sb-sidebar-toggle--primary:checked ~ .sb-container .sb-sidebar-primary { + left: 0; +} +#sb-sidebar-toggle--secondary:checked ~ .sb-container .sb-sidebar-secondary { + right: 0; +} + +/* Full-width mode */ +.drop-secondary-sidebar-for-full-width-content + .hide-when-secondary-sidebar-shown { + display: none !important; +} +.drop-secondary-sidebar-for-full-width-content .sb-sidebar-secondary { + display: none !important; +} + +/* Mobile views */ +.sb-page-width { + width: 100%; +} + +.sb-article-container, +.sb-footer-content__inner, +.drop-secondary-sidebar-for-full-width-content .sb-article, +.drop-secondary-sidebar-for-full-width-content .match-content-width { + width: 100vw; +} + +.sb-article, +.match-content-width { + padding: 0 1rem; + box-sizing: border-box; +} + +@media (min-width: 32rem) { + .sb-article, + .match-content-width { + padding: 0 2rem; + } +} + +/* Tablet views */ +@media (min-width: 42rem) { + .sb-article-container { + width: auto; + } + .sb-footer-content__inner, + .drop-secondary-sidebar-for-full-width-content .sb-article, + .drop-secondary-sidebar-for-full-width-content .match-content-width { + width: 42rem; + } + .sb-article, + .match-content-width { + width: 42rem; + } +} +@media (min-width: 46rem) { + .sb-footer-content__inner, + .drop-secondary-sidebar-for-full-width-content .sb-article, + .drop-secondary-sidebar-for-full-width-content .match-content-width { + width: 46rem; + } + .sb-article, + .match-content-width { + width: 46rem; + } +} +@media (min-width: 50rem) { + .sb-footer-content__inner, + .drop-secondary-sidebar-for-full-width-content .sb-article, + .drop-secondary-sidebar-for-full-width-content .match-content-width { + width: 50rem; + } + .sb-article, + .match-content-width { + width: 50rem; + } +} + +/* Tablet views */ +@media (min-width: 59rem) { + .sb-sidebar-secondary { + position: static; + } + .hide-when-secondary-sidebar-shown { + display: none !important; + } + .sb-footer-content__inner, + .drop-secondary-sidebar-for-full-width-content .sb-article, + .drop-secondary-sidebar-for-full-width-content .match-content-width { + width: 59rem; + } + .sb-article, + .match-content-width { + width: 42rem; + } +} +@media (min-width: 63rem) { + .sb-footer-content__inner, + .drop-secondary-sidebar-for-full-width-content .sb-article, + .drop-secondary-sidebar-for-full-width-content .match-content-width { + width: 63rem; + } + .sb-article, + .match-content-width { + width: 46rem; + } +} +@media (min-width: 67rem) { + .sb-footer-content__inner, + .drop-secondary-sidebar-for-full-width-content .sb-article, + .drop-secondary-sidebar-for-full-width-content .match-content-width { + width: 67rem; + } + .sb-article, + .match-content-width { + width: 50rem; + } +} + +/* Desktop views */ +@media (min-width: 76rem) { + .sb-sidebar-primary { + position: static; + } + .hide-when-primary-sidebar-shown { + display: none !important; + } + .sb-footer-content__inner, + .drop-secondary-sidebar-for-full-width-content .sb-article, + .drop-secondary-sidebar-for-full-width-content .match-content-width { + width: 59rem; + } + .sb-article, + .match-content-width { + width: 42rem; + } +} + +/* Full desktop views */ +@media (min-width: 80rem) { + .sb-article, + .match-content-width { + width: 46rem; + } + .sb-footer-content__inner, + .drop-secondary-sidebar-for-full-width-content .sb-article, + .drop-secondary-sidebar-for-full-width-content .match-content-width { + width: 63rem; + } +} + +@media (min-width: 84rem) { + .sb-article, + .match-content-width { + width: 50rem; + } + .sb-footer-content__inner, + .drop-secondary-sidebar-for-full-width-content .sb-article, + .drop-secondary-sidebar-for-full-width-content .match-content-width { + width: 67rem; + } +} + +@media (min-width: 88rem) { + .sb-footer-content__inner, + .drop-secondary-sidebar-for-full-width-content .sb-article, + .drop-secondary-sidebar-for-full-width-content .match-content-width { + width: 67rem; + } + .sb-page-width { + width: 88rem; + } +} diff --git a/_static/sphinx-design.min.css b/_static/sphinx-design.min.css new file mode 100644 index 0000000..860c36d --- /dev/null +++ b/_static/sphinx-design.min.css @@ -0,0 +1 @@ +.sd-bg-primary{background-color:var(--sd-color-primary) !important}.sd-bg-text-primary{color:var(--sd-color-primary-text) !important}button.sd-bg-primary:focus,button.sd-bg-primary:hover{background-color:var(--sd-color-primary-highlight) !important}a.sd-bg-primary:focus,a.sd-bg-primary:hover{background-color:var(--sd-color-primary-highlight) !important}.sd-bg-secondary{background-color:var(--sd-color-secondary) !important}.sd-bg-text-secondary{color:var(--sd-color-secondary-text) !important}button.sd-bg-secondary:focus,button.sd-bg-secondary:hover{background-color:var(--sd-color-secondary-highlight) !important}a.sd-bg-secondary:focus,a.sd-bg-secondary:hover{background-color:var(--sd-color-secondary-highlight) !important}.sd-bg-success{background-color:var(--sd-color-success) !important}.sd-bg-text-success{color:var(--sd-color-success-text) !important}button.sd-bg-success:focus,button.sd-bg-success:hover{background-color:var(--sd-color-success-highlight) !important}a.sd-bg-success:focus,a.sd-bg-success:hover{background-color:var(--sd-color-success-highlight) !important}.sd-bg-info{background-color:var(--sd-color-info) !important}.sd-bg-text-info{color:var(--sd-color-info-text) !important}button.sd-bg-info:focus,button.sd-bg-info:hover{background-color:var(--sd-color-info-highlight) !important}a.sd-bg-info:focus,a.sd-bg-info:hover{background-color:var(--sd-color-info-highlight) !important}.sd-bg-warning{background-color:var(--sd-color-warning) !important}.sd-bg-text-warning{color:var(--sd-color-warning-text) !important}button.sd-bg-warning:focus,button.sd-bg-warning:hover{background-color:var(--sd-color-warning-highlight) !important}a.sd-bg-warning:focus,a.sd-bg-warning:hover{background-color:var(--sd-color-warning-highlight) !important}.sd-bg-danger{background-color:var(--sd-color-danger) !important}.sd-bg-text-danger{color:var(--sd-color-danger-text) !important}button.sd-bg-danger:focus,button.sd-bg-danger:hover{background-color:var(--sd-color-danger-highlight) !important}a.sd-bg-danger:focus,a.sd-bg-danger:hover{background-color:var(--sd-color-danger-highlight) !important}.sd-bg-light{background-color:var(--sd-color-light) !important}.sd-bg-text-light{color:var(--sd-color-light-text) !important}button.sd-bg-light:focus,button.sd-bg-light:hover{background-color:var(--sd-color-light-highlight) !important}a.sd-bg-light:focus,a.sd-bg-light:hover{background-color:var(--sd-color-light-highlight) !important}.sd-bg-muted{background-color:var(--sd-color-muted) !important}.sd-bg-text-muted{color:var(--sd-color-muted-text) !important}button.sd-bg-muted:focus,button.sd-bg-muted:hover{background-color:var(--sd-color-muted-highlight) !important}a.sd-bg-muted:focus,a.sd-bg-muted:hover{background-color:var(--sd-color-muted-highlight) !important}.sd-bg-dark{background-color:var(--sd-color-dark) !important}.sd-bg-text-dark{color:var(--sd-color-dark-text) !important}button.sd-bg-dark:focus,button.sd-bg-dark:hover{background-color:var(--sd-color-dark-highlight) !important}a.sd-bg-dark:focus,a.sd-bg-dark:hover{background-color:var(--sd-color-dark-highlight) !important}.sd-bg-black{background-color:var(--sd-color-black) !important}.sd-bg-text-black{color:var(--sd-color-black-text) !important}button.sd-bg-black:focus,button.sd-bg-black:hover{background-color:var(--sd-color-black-highlight) !important}a.sd-bg-black:focus,a.sd-bg-black:hover{background-color:var(--sd-color-black-highlight) !important}.sd-bg-white{background-color:var(--sd-color-white) !important}.sd-bg-text-white{color:var(--sd-color-white-text) !important}button.sd-bg-white:focus,button.sd-bg-white:hover{background-color:var(--sd-color-white-highlight) !important}a.sd-bg-white:focus,a.sd-bg-white:hover{background-color:var(--sd-color-white-highlight) !important}.sd-text-primary,.sd-text-primary>p{color:var(--sd-color-primary) !important}a.sd-text-primary:focus,a.sd-text-primary:hover{color:var(--sd-color-primary-highlight) !important}.sd-text-secondary,.sd-text-secondary>p{color:var(--sd-color-secondary) !important}a.sd-text-secondary:focus,a.sd-text-secondary:hover{color:var(--sd-color-secondary-highlight) !important}.sd-text-success,.sd-text-success>p{color:var(--sd-color-success) !important}a.sd-text-success:focus,a.sd-text-success:hover{color:var(--sd-color-success-highlight) !important}.sd-text-info,.sd-text-info>p{color:var(--sd-color-info) !important}a.sd-text-info:focus,a.sd-text-info:hover{color:var(--sd-color-info-highlight) !important}.sd-text-warning,.sd-text-warning>p{color:var(--sd-color-warning) !important}a.sd-text-warning:focus,a.sd-text-warning:hover{color:var(--sd-color-warning-highlight) !important}.sd-text-danger,.sd-text-danger>p{color:var(--sd-color-danger) !important}a.sd-text-danger:focus,a.sd-text-danger:hover{color:var(--sd-color-danger-highlight) !important}.sd-text-light,.sd-text-light>p{color:var(--sd-color-light) !important}a.sd-text-light:focus,a.sd-text-light:hover{color:var(--sd-color-light-highlight) !important}.sd-text-muted,.sd-text-muted>p{color:var(--sd-color-muted) !important}a.sd-text-muted:focus,a.sd-text-muted:hover{color:var(--sd-color-muted-highlight) !important}.sd-text-dark,.sd-text-dark>p{color:var(--sd-color-dark) !important}a.sd-text-dark:focus,a.sd-text-dark:hover{color:var(--sd-color-dark-highlight) !important}.sd-text-black,.sd-text-black>p{color:var(--sd-color-black) !important}a.sd-text-black:focus,a.sd-text-black:hover{color:var(--sd-color-black-highlight) !important}.sd-text-white,.sd-text-white>p{color:var(--sd-color-white) !important}a.sd-text-white:focus,a.sd-text-white:hover{color:var(--sd-color-white-highlight) !important}.sd-outline-primary{border-color:var(--sd-color-primary) !important;border-style:solid !important;border-width:1px !important}a.sd-outline-primary:focus,a.sd-outline-primary:hover{border-color:var(--sd-color-primary-highlight) !important}.sd-outline-secondary{border-color:var(--sd-color-secondary) !important;border-style:solid !important;border-width:1px !important}a.sd-outline-secondary:focus,a.sd-outline-secondary:hover{border-color:var(--sd-color-secondary-highlight) !important}.sd-outline-success{border-color:var(--sd-color-success) !important;border-style:solid !important;border-width:1px !important}a.sd-outline-success:focus,a.sd-outline-success:hover{border-color:var(--sd-color-success-highlight) !important}.sd-outline-info{border-color:var(--sd-color-info) !important;border-style:solid !important;border-width:1px !important}a.sd-outline-info:focus,a.sd-outline-info:hover{border-color:var(--sd-color-info-highlight) !important}.sd-outline-warning{border-color:var(--sd-color-warning) !important;border-style:solid !important;border-width:1px !important}a.sd-outline-warning:focus,a.sd-outline-warning:hover{border-color:var(--sd-color-warning-highlight) !important}.sd-outline-danger{border-color:var(--sd-color-danger) !important;border-style:solid !important;border-width:1px !important}a.sd-outline-danger:focus,a.sd-outline-danger:hover{border-color:var(--sd-color-danger-highlight) !important}.sd-outline-light{border-color:var(--sd-color-light) !important;border-style:solid !important;border-width:1px !important}a.sd-outline-light:focus,a.sd-outline-light:hover{border-color:var(--sd-color-light-highlight) !important}.sd-outline-muted{border-color:var(--sd-color-muted) !important;border-style:solid !important;border-width:1px !important}a.sd-outline-muted:focus,a.sd-outline-muted:hover{border-color:var(--sd-color-muted-highlight) !important}.sd-outline-dark{border-color:var(--sd-color-dark) !important;border-style:solid !important;border-width:1px !important}a.sd-outline-dark:focus,a.sd-outline-dark:hover{border-color:var(--sd-color-dark-highlight) !important}.sd-outline-black{border-color:var(--sd-color-black) !important;border-style:solid !important;border-width:1px !important}a.sd-outline-black:focus,a.sd-outline-black:hover{border-color:var(--sd-color-black-highlight) !important}.sd-outline-white{border-color:var(--sd-color-white) !important;border-style:solid !important;border-width:1px !important}a.sd-outline-white:focus,a.sd-outline-white:hover{border-color:var(--sd-color-white-highlight) !important}.sd-bg-transparent{background-color:transparent !important}.sd-outline-transparent{border-color:transparent !important}.sd-text-transparent{color:transparent !important}.sd-p-0{padding:0 !important}.sd-pt-0,.sd-py-0{padding-top:0 !important}.sd-pr-0,.sd-px-0{padding-right:0 !important}.sd-pb-0,.sd-py-0{padding-bottom:0 !important}.sd-pl-0,.sd-px-0{padding-left:0 !important}.sd-p-1{padding:.25rem !important}.sd-pt-1,.sd-py-1{padding-top:.25rem !important}.sd-pr-1,.sd-px-1{padding-right:.25rem !important}.sd-pb-1,.sd-py-1{padding-bottom:.25rem !important}.sd-pl-1,.sd-px-1{padding-left:.25rem !important}.sd-p-2{padding:.5rem !important}.sd-pt-2,.sd-py-2{padding-top:.5rem !important}.sd-pr-2,.sd-px-2{padding-right:.5rem !important}.sd-pb-2,.sd-py-2{padding-bottom:.5rem !important}.sd-pl-2,.sd-px-2{padding-left:.5rem !important}.sd-p-3{padding:1rem !important}.sd-pt-3,.sd-py-3{padding-top:1rem !important}.sd-pr-3,.sd-px-3{padding-right:1rem !important}.sd-pb-3,.sd-py-3{padding-bottom:1rem !important}.sd-pl-3,.sd-px-3{padding-left:1rem !important}.sd-p-4{padding:1.5rem !important}.sd-pt-4,.sd-py-4{padding-top:1.5rem !important}.sd-pr-4,.sd-px-4{padding-right:1.5rem !important}.sd-pb-4,.sd-py-4{padding-bottom:1.5rem !important}.sd-pl-4,.sd-px-4{padding-left:1.5rem !important}.sd-p-5{padding:3rem !important}.sd-pt-5,.sd-py-5{padding-top:3rem !important}.sd-pr-5,.sd-px-5{padding-right:3rem !important}.sd-pb-5,.sd-py-5{padding-bottom:3rem !important}.sd-pl-5,.sd-px-5{padding-left:3rem !important}.sd-m-auto{margin:auto !important}.sd-mt-auto,.sd-my-auto{margin-top:auto !important}.sd-mr-auto,.sd-mx-auto{margin-right:auto !important}.sd-mb-auto,.sd-my-auto{margin-bottom:auto !important}.sd-ml-auto,.sd-mx-auto{margin-left:auto !important}.sd-m-0{margin:0 !important}.sd-mt-0,.sd-my-0{margin-top:0 !important}.sd-mr-0,.sd-mx-0{margin-right:0 !important}.sd-mb-0,.sd-my-0{margin-bottom:0 !important}.sd-ml-0,.sd-mx-0{margin-left:0 !important}.sd-m-1{margin:.25rem !important}.sd-mt-1,.sd-my-1{margin-top:.25rem !important}.sd-mr-1,.sd-mx-1{margin-right:.25rem !important}.sd-mb-1,.sd-my-1{margin-bottom:.25rem !important}.sd-ml-1,.sd-mx-1{margin-left:.25rem !important}.sd-m-2{margin:.5rem !important}.sd-mt-2,.sd-my-2{margin-top:.5rem !important}.sd-mr-2,.sd-mx-2{margin-right:.5rem !important}.sd-mb-2,.sd-my-2{margin-bottom:.5rem !important}.sd-ml-2,.sd-mx-2{margin-left:.5rem !important}.sd-m-3{margin:1rem !important}.sd-mt-3,.sd-my-3{margin-top:1rem !important}.sd-mr-3,.sd-mx-3{margin-right:1rem !important}.sd-mb-3,.sd-my-3{margin-bottom:1rem !important}.sd-ml-3,.sd-mx-3{margin-left:1rem !important}.sd-m-4{margin:1.5rem !important}.sd-mt-4,.sd-my-4{margin-top:1.5rem !important}.sd-mr-4,.sd-mx-4{margin-right:1.5rem !important}.sd-mb-4,.sd-my-4{margin-bottom:1.5rem !important}.sd-ml-4,.sd-mx-4{margin-left:1.5rem !important}.sd-m-5{margin:3rem !important}.sd-mt-5,.sd-my-5{margin-top:3rem !important}.sd-mr-5,.sd-mx-5{margin-right:3rem !important}.sd-mb-5,.sd-my-5{margin-bottom:3rem !important}.sd-ml-5,.sd-mx-5{margin-left:3rem !important}.sd-w-25{width:25% !important}.sd-w-50{width:50% !important}.sd-w-75{width:75% !important}.sd-w-100{width:100% !important}.sd-w-auto{width:auto !important}.sd-h-25{height:25% !important}.sd-h-50{height:50% !important}.sd-h-75{height:75% !important}.sd-h-100{height:100% !important}.sd-h-auto{height:auto !important}.sd-d-none{display:none !important}.sd-d-inline{display:inline !important}.sd-d-inline-block{display:inline-block !important}.sd-d-block{display:block !important}.sd-d-grid{display:grid !important}.sd-d-flex-row{display:-ms-flexbox !important;display:flex !important;flex-direction:row !important}.sd-d-flex-column{display:-ms-flexbox !important;display:flex !important;flex-direction:column !important}.sd-d-inline-flex{display:-ms-inline-flexbox !important;display:inline-flex !important}@media(min-width: 576px){.sd-d-sm-none{display:none !important}.sd-d-sm-inline{display:inline !important}.sd-d-sm-inline-block{display:inline-block !important}.sd-d-sm-block{display:block !important}.sd-d-sm-grid{display:grid !important}.sd-d-sm-flex{display:-ms-flexbox !important;display:flex !important}.sd-d-sm-inline-flex{display:-ms-inline-flexbox !important;display:inline-flex !important}}@media(min-width: 768px){.sd-d-md-none{display:none !important}.sd-d-md-inline{display:inline !important}.sd-d-md-inline-block{display:inline-block !important}.sd-d-md-block{display:block !important}.sd-d-md-grid{display:grid !important}.sd-d-md-flex{display:-ms-flexbox !important;display:flex !important}.sd-d-md-inline-flex{display:-ms-inline-flexbox !important;display:inline-flex !important}}@media(min-width: 992px){.sd-d-lg-none{display:none !important}.sd-d-lg-inline{display:inline !important}.sd-d-lg-inline-block{display:inline-block !important}.sd-d-lg-block{display:block !important}.sd-d-lg-grid{display:grid !important}.sd-d-lg-flex{display:-ms-flexbox !important;display:flex !important}.sd-d-lg-inline-flex{display:-ms-inline-flexbox !important;display:inline-flex !important}}@media(min-width: 1200px){.sd-d-xl-none{display:none !important}.sd-d-xl-inline{display:inline !important}.sd-d-xl-inline-block{display:inline-block !important}.sd-d-xl-block{display:block !important}.sd-d-xl-grid{display:grid !important}.sd-d-xl-flex{display:-ms-flexbox !important;display:flex !important}.sd-d-xl-inline-flex{display:-ms-inline-flexbox !important;display:inline-flex !important}}.sd-align-major-start{justify-content:flex-start !important}.sd-align-major-end{justify-content:flex-end !important}.sd-align-major-center{justify-content:center !important}.sd-align-major-justify{justify-content:space-between !important}.sd-align-major-spaced{justify-content:space-evenly !important}.sd-align-minor-start{align-items:flex-start !important}.sd-align-minor-end{align-items:flex-end !important}.sd-align-minor-center{align-items:center !important}.sd-align-minor-stretch{align-items:stretch !important}.sd-text-justify{text-align:justify !important}.sd-text-left{text-align:left !important}.sd-text-right{text-align:right !important}.sd-text-center{text-align:center !important}.sd-font-weight-light{font-weight:300 !important}.sd-font-weight-lighter{font-weight:lighter !important}.sd-font-weight-normal{font-weight:400 !important}.sd-font-weight-bold{font-weight:700 !important}.sd-font-weight-bolder{font-weight:bolder !important}.sd-font-italic{font-style:italic !important}.sd-text-decoration-none{text-decoration:none !important}.sd-text-lowercase{text-transform:lowercase !important}.sd-text-uppercase{text-transform:uppercase !important}.sd-text-capitalize{text-transform:capitalize !important}.sd-text-wrap{white-space:normal !important}.sd-text-nowrap{white-space:nowrap !important}.sd-text-truncate{overflow:hidden;text-overflow:ellipsis;white-space:nowrap}.sd-fs-1,.sd-fs-1>p{font-size:calc(1.375rem + 1.5vw) !important;line-height:unset !important}.sd-fs-2,.sd-fs-2>p{font-size:calc(1.325rem + 0.9vw) !important;line-height:unset !important}.sd-fs-3,.sd-fs-3>p{font-size:calc(1.3rem + 0.6vw) !important;line-height:unset !important}.sd-fs-4,.sd-fs-4>p{font-size:calc(1.275rem + 0.3vw) !important;line-height:unset !important}.sd-fs-5,.sd-fs-5>p{font-size:1.25rem !important;line-height:unset !important}.sd-fs-6,.sd-fs-6>p{font-size:1rem !important;line-height:unset !important}.sd-border-0{border:0 solid !important}.sd-border-top-0{border-top:0 solid !important}.sd-border-bottom-0{border-bottom:0 solid !important}.sd-border-right-0{border-right:0 solid !important}.sd-border-left-0{border-left:0 solid !important}.sd-border-1{border:1px solid !important}.sd-border-top-1{border-top:1px solid !important}.sd-border-bottom-1{border-bottom:1px solid !important}.sd-border-right-1{border-right:1px solid !important}.sd-border-left-1{border-left:1px solid !important}.sd-border-2{border:2px solid !important}.sd-border-top-2{border-top:2px solid !important}.sd-border-bottom-2{border-bottom:2px solid !important}.sd-border-right-2{border-right:2px solid !important}.sd-border-left-2{border-left:2px solid !important}.sd-border-3{border:3px solid !important}.sd-border-top-3{border-top:3px solid !important}.sd-border-bottom-3{border-bottom:3px solid !important}.sd-border-right-3{border-right:3px solid !important}.sd-border-left-3{border-left:3px solid !important}.sd-border-4{border:4px solid !important}.sd-border-top-4{border-top:4px solid !important}.sd-border-bottom-4{border-bottom:4px solid !important}.sd-border-right-4{border-right:4px solid !important}.sd-border-left-4{border-left:4px solid !important}.sd-border-5{border:5px solid !important}.sd-border-top-5{border-top:5px solid !important}.sd-border-bottom-5{border-bottom:5px solid !important}.sd-border-right-5{border-right:5px solid !important}.sd-border-left-5{border-left:5px solid !important}.sd-rounded-0{border-radius:0 !important}.sd-rounded-1{border-radius:.2rem !important}.sd-rounded-2{border-radius:.3rem !important}.sd-rounded-3{border-radius:.5rem !important}.sd-rounded-pill{border-radius:50rem !important}.sd-rounded-circle{border-radius:50% !important}.shadow-none{box-shadow:none !important}.sd-shadow-sm{box-shadow:0 .125rem .25rem var(--sd-color-shadow) !important}.sd-shadow-md{box-shadow:0 .5rem 1rem var(--sd-color-shadow) !important}.sd-shadow-lg{box-shadow:0 1rem 3rem var(--sd-color-shadow) !important}@keyframes sd-slide-from-left{0%{transform:translateX(-100%)}100%{transform:translateX(0)}}@keyframes sd-slide-from-right{0%{transform:translateX(200%)}100%{transform:translateX(0)}}@keyframes sd-grow100{0%{transform:scale(0);opacity:.5}100%{transform:scale(1);opacity:1}}@keyframes sd-grow50{0%{transform:scale(0.5);opacity:.5}100%{transform:scale(1);opacity:1}}@keyframes sd-grow50-rot20{0%{transform:scale(0.5) rotateZ(-20deg);opacity:.5}75%{transform:scale(1) rotateZ(5deg);opacity:1}95%{transform:scale(1) rotateZ(-1deg);opacity:1}100%{transform:scale(1) rotateZ(0);opacity:1}}.sd-animate-slide-from-left{animation:1s ease-out 0s 1 normal none running sd-slide-from-left}.sd-animate-slide-from-right{animation:1s ease-out 0s 1 normal none running sd-slide-from-right}.sd-animate-grow100{animation:1s ease-out 0s 1 normal none running sd-grow100}.sd-animate-grow50{animation:1s ease-out 0s 1 normal none running sd-grow50}.sd-animate-grow50-rot20{animation:1s ease-out 0s 1 normal none running sd-grow50-rot20}.sd-badge{display:inline-block;padding:.35em .65em;font-size:.75em;font-weight:700;line-height:1;text-align:center;white-space:nowrap;vertical-align:baseline;border-radius:.25rem}.sd-badge:empty{display:none}a.sd-badge{text-decoration:none}.sd-btn .sd-badge{position:relative;top:-1px}.sd-btn{background-color:transparent;border:1px solid transparent;border-radius:.25rem;cursor:pointer;display:inline-block;font-weight:400;font-size:1rem;line-height:1.5;padding:.375rem .75rem;text-align:center;text-decoration:none;transition:color .15s ease-in-out,background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out;vertical-align:middle;user-select:none;-moz-user-select:none;-ms-user-select:none;-webkit-user-select:none}.sd-btn:hover{text-decoration:none}@media(prefers-reduced-motion: reduce){.sd-btn{transition:none}}.sd-btn-primary,.sd-btn-outline-primary:hover,.sd-btn-outline-primary:focus{color:var(--sd-color-primary-text) !important;background-color:var(--sd-color-primary) !important;border-color:var(--sd-color-primary) !important;border-width:1px !important;border-style:solid !important}.sd-btn-primary:hover,.sd-btn-primary:focus{color:var(--sd-color-primary-text) !important;background-color:var(--sd-color-primary-highlight) !important;border-color:var(--sd-color-primary-highlight) !important;border-width:1px !important;border-style:solid !important}.sd-btn-outline-primary{color:var(--sd-color-primary) !important;border-color:var(--sd-color-primary) !important;border-width:1px !important;border-style:solid !important}.sd-btn-secondary,.sd-btn-outline-secondary:hover,.sd-btn-outline-secondary:focus{color:var(--sd-color-secondary-text) !important;background-color:var(--sd-color-secondary) !important;border-color:var(--sd-color-secondary) !important;border-width:1px !important;border-style:solid !important}.sd-btn-secondary:hover,.sd-btn-secondary:focus{color:var(--sd-color-secondary-text) !important;background-color:var(--sd-color-secondary-highlight) !important;border-color:var(--sd-color-secondary-highlight) !important;border-width:1px !important;border-style:solid !important}.sd-btn-outline-secondary{color:var(--sd-color-secondary) !important;border-color:var(--sd-color-secondary) !important;border-width:1px !important;border-style:solid !important}.sd-btn-success,.sd-btn-outline-success:hover,.sd-btn-outline-success:focus{color:var(--sd-color-success-text) !important;background-color:var(--sd-color-success) !important;border-color:var(--sd-color-success) !important;border-width:1px !important;border-style:solid !important}.sd-btn-success:hover,.sd-btn-success:focus{color:var(--sd-color-success-text) !important;background-color:var(--sd-color-success-highlight) !important;border-color:var(--sd-color-success-highlight) !important;border-width:1px !important;border-style:solid !important}.sd-btn-outline-success{color:var(--sd-color-success) !important;border-color:var(--sd-color-success) !important;border-width:1px !important;border-style:solid !important}.sd-btn-info,.sd-btn-outline-info:hover,.sd-btn-outline-info:focus{color:var(--sd-color-info-text) !important;background-color:var(--sd-color-info) !important;border-color:var(--sd-color-info) !important;border-width:1px !important;border-style:solid !important}.sd-btn-info:hover,.sd-btn-info:focus{color:var(--sd-color-info-text) !important;background-color:var(--sd-color-info-highlight) !important;border-color:var(--sd-color-info-highlight) !important;border-width:1px !important;border-style:solid !important}.sd-btn-outline-info{color:var(--sd-color-info) !important;border-color:var(--sd-color-info) !important;border-width:1px !important;border-style:solid !important}.sd-btn-warning,.sd-btn-outline-warning:hover,.sd-btn-outline-warning:focus{color:var(--sd-color-warning-text) !important;background-color:var(--sd-color-warning) !important;border-color:var(--sd-color-warning) !important;border-width:1px !important;border-style:solid !important}.sd-btn-warning:hover,.sd-btn-warning:focus{color:var(--sd-color-warning-text) !important;background-color:var(--sd-color-warning-highlight) !important;border-color:var(--sd-color-warning-highlight) !important;border-width:1px !important;border-style:solid !important}.sd-btn-outline-warning{color:var(--sd-color-warning) !important;border-color:var(--sd-color-warning) !important;border-width:1px !important;border-style:solid !important}.sd-btn-danger,.sd-btn-outline-danger:hover,.sd-btn-outline-danger:focus{color:var(--sd-color-danger-text) !important;background-color:var(--sd-color-danger) !important;border-color:var(--sd-color-danger) !important;border-width:1px !important;border-style:solid !important}.sd-btn-danger:hover,.sd-btn-danger:focus{color:var(--sd-color-danger-text) !important;background-color:var(--sd-color-danger-highlight) !important;border-color:var(--sd-color-danger-highlight) !important;border-width:1px !important;border-style:solid !important}.sd-btn-outline-danger{color:var(--sd-color-danger) !important;border-color:var(--sd-color-danger) !important;border-width:1px !important;border-style:solid !important}.sd-btn-light,.sd-btn-outline-light:hover,.sd-btn-outline-light:focus{color:var(--sd-color-light-text) !important;background-color:var(--sd-color-light) !important;border-color:var(--sd-color-light) !important;border-width:1px !important;border-style:solid !important}.sd-btn-light:hover,.sd-btn-light:focus{color:var(--sd-color-light-text) !important;background-color:var(--sd-color-light-highlight) !important;border-color:var(--sd-color-light-highlight) !important;border-width:1px !important;border-style:solid !important}.sd-btn-outline-light{color:var(--sd-color-light) !important;border-color:var(--sd-color-light) !important;border-width:1px !important;border-style:solid !important}.sd-btn-muted,.sd-btn-outline-muted:hover,.sd-btn-outline-muted:focus{color:var(--sd-color-muted-text) !important;background-color:var(--sd-color-muted) !important;border-color:var(--sd-color-muted) !important;border-width:1px !important;border-style:solid !important}.sd-btn-muted:hover,.sd-btn-muted:focus{color:var(--sd-color-muted-text) !important;background-color:var(--sd-color-muted-highlight) !important;border-color:var(--sd-color-muted-highlight) !important;border-width:1px !important;border-style:solid !important}.sd-btn-outline-muted{color:var(--sd-color-muted) !important;border-color:var(--sd-color-muted) !important;border-width:1px !important;border-style:solid !important}.sd-btn-dark,.sd-btn-outline-dark:hover,.sd-btn-outline-dark:focus{color:var(--sd-color-dark-text) !important;background-color:var(--sd-color-dark) !important;border-color:var(--sd-color-dark) !important;border-width:1px !important;border-style:solid !important}.sd-btn-dark:hover,.sd-btn-dark:focus{color:var(--sd-color-dark-text) !important;background-color:var(--sd-color-dark-highlight) !important;border-color:var(--sd-color-dark-highlight) !important;border-width:1px !important;border-style:solid !important}.sd-btn-outline-dark{color:var(--sd-color-dark) !important;border-color:var(--sd-color-dark) !important;border-width:1px !important;border-style:solid !important}.sd-btn-black,.sd-btn-outline-black:hover,.sd-btn-outline-black:focus{color:var(--sd-color-black-text) !important;background-color:var(--sd-color-black) !important;border-color:var(--sd-color-black) !important;border-width:1px !important;border-style:solid !important}.sd-btn-black:hover,.sd-btn-black:focus{color:var(--sd-color-black-text) !important;background-color:var(--sd-color-black-highlight) !important;border-color:var(--sd-color-black-highlight) !important;border-width:1px !important;border-style:solid !important}.sd-btn-outline-black{color:var(--sd-color-black) !important;border-color:var(--sd-color-black) !important;border-width:1px !important;border-style:solid !important}.sd-btn-white,.sd-btn-outline-white:hover,.sd-btn-outline-white:focus{color:var(--sd-color-white-text) !important;background-color:var(--sd-color-white) !important;border-color:var(--sd-color-white) !important;border-width:1px !important;border-style:solid !important}.sd-btn-white:hover,.sd-btn-white:focus{color:var(--sd-color-white-text) !important;background-color:var(--sd-color-white-highlight) !important;border-color:var(--sd-color-white-highlight) !important;border-width:1px !important;border-style:solid !important}.sd-btn-outline-white{color:var(--sd-color-white) !important;border-color:var(--sd-color-white) !important;border-width:1px !important;border-style:solid !important}.sd-stretched-link::after{position:absolute;top:0;right:0;bottom:0;left:0;z-index:1;content:""}.sd-hide-link-text{font-size:0}.sd-octicon,.sd-material-icon{display:inline-block;fill:currentColor;vertical-align:middle}.sd-avatar-xs{border-radius:50%;object-fit:cover;object-position:center;width:1rem;height:1rem}.sd-avatar-sm{border-radius:50%;object-fit:cover;object-position:center;width:3rem;height:3rem}.sd-avatar-md{border-radius:50%;object-fit:cover;object-position:center;width:5rem;height:5rem}.sd-avatar-lg{border-radius:50%;object-fit:cover;object-position:center;width:7rem;height:7rem}.sd-avatar-xl{border-radius:50%;object-fit:cover;object-position:center;width:10rem;height:10rem}.sd-avatar-inherit{border-radius:50%;object-fit:cover;object-position:center;width:inherit;height:inherit}.sd-avatar-initial{border-radius:50%;object-fit:cover;object-position:center;width:initial;height:initial}.sd-card{background-clip:border-box;background-color:var(--sd-color-card-background);border:1px solid var(--sd-color-card-border);border-radius:.25rem;color:var(--sd-color-card-text);display:-ms-flexbox;display:flex;-ms-flex-direction:column;flex-direction:column;min-width:0;position:relative;word-wrap:break-word}.sd-card>hr{margin-left:0;margin-right:0}.sd-card-hover:hover{border-color:var(--sd-color-card-border-hover);transform:scale(1.01)}.sd-card-body{-ms-flex:1 1 auto;flex:1 1 auto;padding:1rem 1rem}.sd-card-title{margin-bottom:.5rem}.sd-card-subtitle{margin-top:-0.25rem;margin-bottom:0}.sd-card-text:last-child{margin-bottom:0}.sd-card-link:hover{text-decoration:none}.sd-card-link+.card-link{margin-left:1rem}.sd-card-header{padding:.5rem 1rem;margin-bottom:0;background-color:var(--sd-color-card-header);border-bottom:1px solid var(--sd-color-card-border)}.sd-card-header:first-child{border-radius:calc(0.25rem - 1px) calc(0.25rem - 1px) 0 0}.sd-card-footer{padding:.5rem 1rem;background-color:var(--sd-color-card-footer);border-top:1px solid var(--sd-color-card-border)}.sd-card-footer:last-child{border-radius:0 0 calc(0.25rem - 1px) calc(0.25rem - 1px)}.sd-card-header-tabs{margin-right:-0.5rem;margin-bottom:-0.5rem;margin-left:-0.5rem;border-bottom:0}.sd-card-header-pills{margin-right:-0.5rem;margin-left:-0.5rem}.sd-card-img-overlay{position:absolute;top:0;right:0;bottom:0;left:0;padding:1rem;border-radius:calc(0.25rem - 1px)}.sd-card-img,.sd-card-img-bottom,.sd-card-img-top{width:100%}.sd-card-img,.sd-card-img-top{border-top-left-radius:calc(0.25rem - 1px);border-top-right-radius:calc(0.25rem - 1px)}.sd-card-img,.sd-card-img-bottom{border-bottom-left-radius:calc(0.25rem - 1px);border-bottom-right-radius:calc(0.25rem - 1px)}.sd-cards-carousel{width:100%;display:flex;flex-wrap:nowrap;-ms-flex-direction:row;flex-direction:row;overflow-x:hidden;scroll-snap-type:x mandatory}.sd-cards-carousel.sd-show-scrollbar{overflow-x:auto}.sd-cards-carousel:hover,.sd-cards-carousel:focus{overflow-x:auto}.sd-cards-carousel>.sd-card{flex-shrink:0;scroll-snap-align:start}.sd-cards-carousel>.sd-card:not(:last-child){margin-right:3px}.sd-card-cols-1>.sd-card{width:90%}.sd-card-cols-2>.sd-card{width:45%}.sd-card-cols-3>.sd-card{width:30%}.sd-card-cols-4>.sd-card{width:22.5%}.sd-card-cols-5>.sd-card{width:18%}.sd-card-cols-6>.sd-card{width:15%}.sd-card-cols-7>.sd-card{width:12.8571428571%}.sd-card-cols-8>.sd-card{width:11.25%}.sd-card-cols-9>.sd-card{width:10%}.sd-card-cols-10>.sd-card{width:9%}.sd-card-cols-11>.sd-card{width:8.1818181818%}.sd-card-cols-12>.sd-card{width:7.5%}.sd-container,.sd-container-fluid,.sd-container-lg,.sd-container-md,.sd-container-sm,.sd-container-xl{margin-left:auto;margin-right:auto;padding-left:var(--sd-gutter-x, 0.75rem);padding-right:var(--sd-gutter-x, 0.75rem);width:100%}@media(min-width: 576px){.sd-container-sm,.sd-container{max-width:540px}}@media(min-width: 768px){.sd-container-md,.sd-container-sm,.sd-container{max-width:720px}}@media(min-width: 992px){.sd-container-lg,.sd-container-md,.sd-container-sm,.sd-container{max-width:960px}}@media(min-width: 1200px){.sd-container-xl,.sd-container-lg,.sd-container-md,.sd-container-sm,.sd-container{max-width:1140px}}.sd-row{--sd-gutter-x: 1.5rem;--sd-gutter-y: 0;display:-ms-flexbox;display:flex;-ms-flex-wrap:wrap;flex-wrap:wrap;margin-top:calc(var(--sd-gutter-y) * -1);margin-right:calc(var(--sd-gutter-x) * -0.5);margin-left:calc(var(--sd-gutter-x) * -0.5)}.sd-row>*{box-sizing:border-box;flex-shrink:0;width:100%;max-width:100%;padding-right:calc(var(--sd-gutter-x) * 0.5);padding-left:calc(var(--sd-gutter-x) * 0.5);margin-top:var(--sd-gutter-y)}.sd-col{flex:1 0 0%;-ms-flex:1 0 0%}.sd-row-cols-auto>*{flex:0 0 auto;width:auto}.sd-row-cols-1>*{flex:0 0 auto;-ms-flex:0 0 auto;width:100%}.sd-row-cols-2>*{flex:0 0 auto;-ms-flex:0 0 auto;width:50%}.sd-row-cols-3>*{flex:0 0 auto;-ms-flex:0 0 auto;width:33.3333333333%}.sd-row-cols-4>*{flex:0 0 auto;-ms-flex:0 0 auto;width:25%}.sd-row-cols-5>*{flex:0 0 auto;-ms-flex:0 0 auto;width:20%}.sd-row-cols-6>*{flex:0 0 auto;-ms-flex:0 0 auto;width:16.6666666667%}.sd-row-cols-7>*{flex:0 0 auto;-ms-flex:0 0 auto;width:14.2857142857%}.sd-row-cols-8>*{flex:0 0 auto;-ms-flex:0 0 auto;width:12.5%}.sd-row-cols-9>*{flex:0 0 auto;-ms-flex:0 0 auto;width:11.1111111111%}.sd-row-cols-10>*{flex:0 0 auto;-ms-flex:0 0 auto;width:10%}.sd-row-cols-11>*{flex:0 0 auto;-ms-flex:0 0 auto;width:9.0909090909%}.sd-row-cols-12>*{flex:0 0 auto;-ms-flex:0 0 auto;width:8.3333333333%}@media(min-width: 576px){.sd-col-sm{flex:1 0 0%;-ms-flex:1 0 0%}.sd-row-cols-sm-auto{flex:1 0 auto;-ms-flex:1 0 auto;width:100%}.sd-row-cols-sm-1>*{flex:0 0 auto;-ms-flex:0 0 auto;width:100%}.sd-row-cols-sm-2>*{flex:0 0 auto;-ms-flex:0 0 auto;width:50%}.sd-row-cols-sm-3>*{flex:0 0 auto;-ms-flex:0 0 auto;width:33.3333333333%}.sd-row-cols-sm-4>*{flex:0 0 auto;-ms-flex:0 0 auto;width:25%}.sd-row-cols-sm-5>*{flex:0 0 auto;-ms-flex:0 0 auto;width:20%}.sd-row-cols-sm-6>*{flex:0 0 auto;-ms-flex:0 0 auto;width:16.6666666667%}.sd-row-cols-sm-7>*{flex:0 0 auto;-ms-flex:0 0 auto;width:14.2857142857%}.sd-row-cols-sm-8>*{flex:0 0 auto;-ms-flex:0 0 auto;width:12.5%}.sd-row-cols-sm-9>*{flex:0 0 auto;-ms-flex:0 0 auto;width:11.1111111111%}.sd-row-cols-sm-10>*{flex:0 0 auto;-ms-flex:0 0 auto;width:10%}.sd-row-cols-sm-11>*{flex:0 0 auto;-ms-flex:0 0 auto;width:9.0909090909%}.sd-row-cols-sm-12>*{flex:0 0 auto;-ms-flex:0 0 auto;width:8.3333333333%}}@media(min-width: 768px){.sd-col-md{flex:1 0 0%;-ms-flex:1 0 0%}.sd-row-cols-md-auto{flex:1 0 auto;-ms-flex:1 0 auto;width:100%}.sd-row-cols-md-1>*{flex:0 0 auto;-ms-flex:0 0 auto;width:100%}.sd-row-cols-md-2>*{flex:0 0 auto;-ms-flex:0 0 auto;width:50%}.sd-row-cols-md-3>*{flex:0 0 auto;-ms-flex:0 0 auto;width:33.3333333333%}.sd-row-cols-md-4>*{flex:0 0 auto;-ms-flex:0 0 auto;width:25%}.sd-row-cols-md-5>*{flex:0 0 auto;-ms-flex:0 0 auto;width:20%}.sd-row-cols-md-6>*{flex:0 0 auto;-ms-flex:0 0 auto;width:16.6666666667%}.sd-row-cols-md-7>*{flex:0 0 auto;-ms-flex:0 0 auto;width:14.2857142857%}.sd-row-cols-md-8>*{flex:0 0 auto;-ms-flex:0 0 auto;width:12.5%}.sd-row-cols-md-9>*{flex:0 0 auto;-ms-flex:0 0 auto;width:11.1111111111%}.sd-row-cols-md-10>*{flex:0 0 auto;-ms-flex:0 0 auto;width:10%}.sd-row-cols-md-11>*{flex:0 0 auto;-ms-flex:0 0 auto;width:9.0909090909%}.sd-row-cols-md-12>*{flex:0 0 auto;-ms-flex:0 0 auto;width:8.3333333333%}}@media(min-width: 992px){.sd-col-lg{flex:1 0 0%;-ms-flex:1 0 0%}.sd-row-cols-lg-auto{flex:1 0 auto;-ms-flex:1 0 auto;width:100%}.sd-row-cols-lg-1>*{flex:0 0 auto;-ms-flex:0 0 auto;width:100%}.sd-row-cols-lg-2>*{flex:0 0 auto;-ms-flex:0 0 auto;width:50%}.sd-row-cols-lg-3>*{flex:0 0 auto;-ms-flex:0 0 auto;width:33.3333333333%}.sd-row-cols-lg-4>*{flex:0 0 auto;-ms-flex:0 0 auto;width:25%}.sd-row-cols-lg-5>*{flex:0 0 auto;-ms-flex:0 0 auto;width:20%}.sd-row-cols-lg-6>*{flex:0 0 auto;-ms-flex:0 0 auto;width:16.6666666667%}.sd-row-cols-lg-7>*{flex:0 0 auto;-ms-flex:0 0 auto;width:14.2857142857%}.sd-row-cols-lg-8>*{flex:0 0 auto;-ms-flex:0 0 auto;width:12.5%}.sd-row-cols-lg-9>*{flex:0 0 auto;-ms-flex:0 0 auto;width:11.1111111111%}.sd-row-cols-lg-10>*{flex:0 0 auto;-ms-flex:0 0 auto;width:10%}.sd-row-cols-lg-11>*{flex:0 0 auto;-ms-flex:0 0 auto;width:9.0909090909%}.sd-row-cols-lg-12>*{flex:0 0 auto;-ms-flex:0 0 auto;width:8.3333333333%}}@media(min-width: 1200px){.sd-col-xl{flex:1 0 0%;-ms-flex:1 0 0%}.sd-row-cols-xl-auto{flex:1 0 auto;-ms-flex:1 0 auto;width:100%}.sd-row-cols-xl-1>*{flex:0 0 auto;-ms-flex:0 0 auto;width:100%}.sd-row-cols-xl-2>*{flex:0 0 auto;-ms-flex:0 0 auto;width:50%}.sd-row-cols-xl-3>*{flex:0 0 auto;-ms-flex:0 0 auto;width:33.3333333333%}.sd-row-cols-xl-4>*{flex:0 0 auto;-ms-flex:0 0 auto;width:25%}.sd-row-cols-xl-5>*{flex:0 0 auto;-ms-flex:0 0 auto;width:20%}.sd-row-cols-xl-6>*{flex:0 0 auto;-ms-flex:0 0 auto;width:16.6666666667%}.sd-row-cols-xl-7>*{flex:0 0 auto;-ms-flex:0 0 auto;width:14.2857142857%}.sd-row-cols-xl-8>*{flex:0 0 auto;-ms-flex:0 0 auto;width:12.5%}.sd-row-cols-xl-9>*{flex:0 0 auto;-ms-flex:0 0 auto;width:11.1111111111%}.sd-row-cols-xl-10>*{flex:0 0 auto;-ms-flex:0 0 auto;width:10%}.sd-row-cols-xl-11>*{flex:0 0 auto;-ms-flex:0 0 auto;width:9.0909090909%}.sd-row-cols-xl-12>*{flex:0 0 auto;-ms-flex:0 0 auto;width:8.3333333333%}}.sd-col-auto{flex:0 0 auto;-ms-flex:0 0 auto;width:auto}.sd-col-1{flex:0 0 auto;-ms-flex:0 0 auto;width:8.3333333333%}.sd-col-2{flex:0 0 auto;-ms-flex:0 0 auto;width:16.6666666667%}.sd-col-3{flex:0 0 auto;-ms-flex:0 0 auto;width:25%}.sd-col-4{flex:0 0 auto;-ms-flex:0 0 auto;width:33.3333333333%}.sd-col-5{flex:0 0 auto;-ms-flex:0 0 auto;width:41.6666666667%}.sd-col-6{flex:0 0 auto;-ms-flex:0 0 auto;width:50%}.sd-col-7{flex:0 0 auto;-ms-flex:0 0 auto;width:58.3333333333%}.sd-col-8{flex:0 0 auto;-ms-flex:0 0 auto;width:66.6666666667%}.sd-col-9{flex:0 0 auto;-ms-flex:0 0 auto;width:75%}.sd-col-10{flex:0 0 auto;-ms-flex:0 0 auto;width:83.3333333333%}.sd-col-11{flex:0 0 auto;-ms-flex:0 0 auto;width:91.6666666667%}.sd-col-12{flex:0 0 auto;-ms-flex:0 0 auto;width:100%}.sd-g-0,.sd-gy-0{--sd-gutter-y: 0}.sd-g-0,.sd-gx-0{--sd-gutter-x: 0}.sd-g-1,.sd-gy-1{--sd-gutter-y: 0.25rem}.sd-g-1,.sd-gx-1{--sd-gutter-x: 0.25rem}.sd-g-2,.sd-gy-2{--sd-gutter-y: 0.5rem}.sd-g-2,.sd-gx-2{--sd-gutter-x: 0.5rem}.sd-g-3,.sd-gy-3{--sd-gutter-y: 1rem}.sd-g-3,.sd-gx-3{--sd-gutter-x: 1rem}.sd-g-4,.sd-gy-4{--sd-gutter-y: 1.5rem}.sd-g-4,.sd-gx-4{--sd-gutter-x: 1.5rem}.sd-g-5,.sd-gy-5{--sd-gutter-y: 3rem}.sd-g-5,.sd-gx-5{--sd-gutter-x: 3rem}@media(min-width: 576px){.sd-col-sm-auto{-ms-flex:0 0 auto;flex:0 0 auto;width:auto}.sd-col-sm-1{-ms-flex:0 0 auto;flex:0 0 auto;width:8.3333333333%}.sd-col-sm-2{-ms-flex:0 0 auto;flex:0 0 auto;width:16.6666666667%}.sd-col-sm-3{-ms-flex:0 0 auto;flex:0 0 auto;width:25%}.sd-col-sm-4{-ms-flex:0 0 auto;flex:0 0 auto;width:33.3333333333%}.sd-col-sm-5{-ms-flex:0 0 auto;flex:0 0 auto;width:41.6666666667%}.sd-col-sm-6{-ms-flex:0 0 auto;flex:0 0 auto;width:50%}.sd-col-sm-7{-ms-flex:0 0 auto;flex:0 0 auto;width:58.3333333333%}.sd-col-sm-8{-ms-flex:0 0 auto;flex:0 0 auto;width:66.6666666667%}.sd-col-sm-9{-ms-flex:0 0 auto;flex:0 0 auto;width:75%}.sd-col-sm-10{-ms-flex:0 0 auto;flex:0 0 auto;width:83.3333333333%}.sd-col-sm-11{-ms-flex:0 0 auto;flex:0 0 auto;width:91.6666666667%}.sd-col-sm-12{-ms-flex:0 0 auto;flex:0 0 auto;width:100%}.sd-g-sm-0,.sd-gy-sm-0{--sd-gutter-y: 0}.sd-g-sm-0,.sd-gx-sm-0{--sd-gutter-x: 0}.sd-g-sm-1,.sd-gy-sm-1{--sd-gutter-y: 0.25rem}.sd-g-sm-1,.sd-gx-sm-1{--sd-gutter-x: 0.25rem}.sd-g-sm-2,.sd-gy-sm-2{--sd-gutter-y: 0.5rem}.sd-g-sm-2,.sd-gx-sm-2{--sd-gutter-x: 0.5rem}.sd-g-sm-3,.sd-gy-sm-3{--sd-gutter-y: 1rem}.sd-g-sm-3,.sd-gx-sm-3{--sd-gutter-x: 1rem}.sd-g-sm-4,.sd-gy-sm-4{--sd-gutter-y: 1.5rem}.sd-g-sm-4,.sd-gx-sm-4{--sd-gutter-x: 1.5rem}.sd-g-sm-5,.sd-gy-sm-5{--sd-gutter-y: 3rem}.sd-g-sm-5,.sd-gx-sm-5{--sd-gutter-x: 3rem}}@media(min-width: 768px){.sd-col-md-auto{-ms-flex:0 0 auto;flex:0 0 auto;width:auto}.sd-col-md-1{-ms-flex:0 0 auto;flex:0 0 auto;width:8.3333333333%}.sd-col-md-2{-ms-flex:0 0 auto;flex:0 0 auto;width:16.6666666667%}.sd-col-md-3{-ms-flex:0 0 auto;flex:0 0 auto;width:25%}.sd-col-md-4{-ms-flex:0 0 auto;flex:0 0 auto;width:33.3333333333%}.sd-col-md-5{-ms-flex:0 0 auto;flex:0 0 auto;width:41.6666666667%}.sd-col-md-6{-ms-flex:0 0 auto;flex:0 0 auto;width:50%}.sd-col-md-7{-ms-flex:0 0 auto;flex:0 0 auto;width:58.3333333333%}.sd-col-md-8{-ms-flex:0 0 auto;flex:0 0 auto;width:66.6666666667%}.sd-col-md-9{-ms-flex:0 0 auto;flex:0 0 auto;width:75%}.sd-col-md-10{-ms-flex:0 0 auto;flex:0 0 auto;width:83.3333333333%}.sd-col-md-11{-ms-flex:0 0 auto;flex:0 0 auto;width:91.6666666667%}.sd-col-md-12{-ms-flex:0 0 auto;flex:0 0 auto;width:100%}.sd-g-md-0,.sd-gy-md-0{--sd-gutter-y: 0}.sd-g-md-0,.sd-gx-md-0{--sd-gutter-x: 0}.sd-g-md-1,.sd-gy-md-1{--sd-gutter-y: 0.25rem}.sd-g-md-1,.sd-gx-md-1{--sd-gutter-x: 0.25rem}.sd-g-md-2,.sd-gy-md-2{--sd-gutter-y: 0.5rem}.sd-g-md-2,.sd-gx-md-2{--sd-gutter-x: 0.5rem}.sd-g-md-3,.sd-gy-md-3{--sd-gutter-y: 1rem}.sd-g-md-3,.sd-gx-md-3{--sd-gutter-x: 1rem}.sd-g-md-4,.sd-gy-md-4{--sd-gutter-y: 1.5rem}.sd-g-md-4,.sd-gx-md-4{--sd-gutter-x: 1.5rem}.sd-g-md-5,.sd-gy-md-5{--sd-gutter-y: 3rem}.sd-g-md-5,.sd-gx-md-5{--sd-gutter-x: 3rem}}@media(min-width: 992px){.sd-col-lg-auto{-ms-flex:0 0 auto;flex:0 0 auto;width:auto}.sd-col-lg-1{-ms-flex:0 0 auto;flex:0 0 auto;width:8.3333333333%}.sd-col-lg-2{-ms-flex:0 0 auto;flex:0 0 auto;width:16.6666666667%}.sd-col-lg-3{-ms-flex:0 0 auto;flex:0 0 auto;width:25%}.sd-col-lg-4{-ms-flex:0 0 auto;flex:0 0 auto;width:33.3333333333%}.sd-col-lg-5{-ms-flex:0 0 auto;flex:0 0 auto;width:41.6666666667%}.sd-col-lg-6{-ms-flex:0 0 auto;flex:0 0 auto;width:50%}.sd-col-lg-7{-ms-flex:0 0 auto;flex:0 0 auto;width:58.3333333333%}.sd-col-lg-8{-ms-flex:0 0 auto;flex:0 0 auto;width:66.6666666667%}.sd-col-lg-9{-ms-flex:0 0 auto;flex:0 0 auto;width:75%}.sd-col-lg-10{-ms-flex:0 0 auto;flex:0 0 auto;width:83.3333333333%}.sd-col-lg-11{-ms-flex:0 0 auto;flex:0 0 auto;width:91.6666666667%}.sd-col-lg-12{-ms-flex:0 0 auto;flex:0 0 auto;width:100%}.sd-g-lg-0,.sd-gy-lg-0{--sd-gutter-y: 0}.sd-g-lg-0,.sd-gx-lg-0{--sd-gutter-x: 0}.sd-g-lg-1,.sd-gy-lg-1{--sd-gutter-y: 0.25rem}.sd-g-lg-1,.sd-gx-lg-1{--sd-gutter-x: 0.25rem}.sd-g-lg-2,.sd-gy-lg-2{--sd-gutter-y: 0.5rem}.sd-g-lg-2,.sd-gx-lg-2{--sd-gutter-x: 0.5rem}.sd-g-lg-3,.sd-gy-lg-3{--sd-gutter-y: 1rem}.sd-g-lg-3,.sd-gx-lg-3{--sd-gutter-x: 1rem}.sd-g-lg-4,.sd-gy-lg-4{--sd-gutter-y: 1.5rem}.sd-g-lg-4,.sd-gx-lg-4{--sd-gutter-x: 1.5rem}.sd-g-lg-5,.sd-gy-lg-5{--sd-gutter-y: 3rem}.sd-g-lg-5,.sd-gx-lg-5{--sd-gutter-x: 3rem}}@media(min-width: 1200px){.sd-col-xl-auto{-ms-flex:0 0 auto;flex:0 0 auto;width:auto}.sd-col-xl-1{-ms-flex:0 0 auto;flex:0 0 auto;width:8.3333333333%}.sd-col-xl-2{-ms-flex:0 0 auto;flex:0 0 auto;width:16.6666666667%}.sd-col-xl-3{-ms-flex:0 0 auto;flex:0 0 auto;width:25%}.sd-col-xl-4{-ms-flex:0 0 auto;flex:0 0 auto;width:33.3333333333%}.sd-col-xl-5{-ms-flex:0 0 auto;flex:0 0 auto;width:41.6666666667%}.sd-col-xl-6{-ms-flex:0 0 auto;flex:0 0 auto;width:50%}.sd-col-xl-7{-ms-flex:0 0 auto;flex:0 0 auto;width:58.3333333333%}.sd-col-xl-8{-ms-flex:0 0 auto;flex:0 0 auto;width:66.6666666667%}.sd-col-xl-9{-ms-flex:0 0 auto;flex:0 0 auto;width:75%}.sd-col-xl-10{-ms-flex:0 0 auto;flex:0 0 auto;width:83.3333333333%}.sd-col-xl-11{-ms-flex:0 0 auto;flex:0 0 auto;width:91.6666666667%}.sd-col-xl-12{-ms-flex:0 0 auto;flex:0 0 auto;width:100%}.sd-g-xl-0,.sd-gy-xl-0{--sd-gutter-y: 0}.sd-g-xl-0,.sd-gx-xl-0{--sd-gutter-x: 0}.sd-g-xl-1,.sd-gy-xl-1{--sd-gutter-y: 0.25rem}.sd-g-xl-1,.sd-gx-xl-1{--sd-gutter-x: 0.25rem}.sd-g-xl-2,.sd-gy-xl-2{--sd-gutter-y: 0.5rem}.sd-g-xl-2,.sd-gx-xl-2{--sd-gutter-x: 0.5rem}.sd-g-xl-3,.sd-gy-xl-3{--sd-gutter-y: 1rem}.sd-g-xl-3,.sd-gx-xl-3{--sd-gutter-x: 1rem}.sd-g-xl-4,.sd-gy-xl-4{--sd-gutter-y: 1.5rem}.sd-g-xl-4,.sd-gx-xl-4{--sd-gutter-x: 1.5rem}.sd-g-xl-5,.sd-gy-xl-5{--sd-gutter-y: 3rem}.sd-g-xl-5,.sd-gx-xl-5{--sd-gutter-x: 3rem}}.sd-flex-row-reverse{flex-direction:row-reverse !important}details.sd-dropdown{position:relative;font-size:var(--sd-fontsize-dropdown)}details.sd-dropdown:hover{cursor:pointer}details.sd-dropdown .sd-summary-content{cursor:default}details.sd-dropdown summary.sd-summary-title{padding:.5em .6em .5em 1em;font-size:var(--sd-fontsize-dropdown-title);font-weight:var(--sd-fontweight-dropdown-title);user-select:none;-moz-user-select:none;-ms-user-select:none;-webkit-user-select:none;list-style:none;display:inline-flex;justify-content:space-between}details.sd-dropdown summary.sd-summary-title::-webkit-details-marker{display:none}details.sd-dropdown summary.sd-summary-title:focus{outline:none}details.sd-dropdown summary.sd-summary-title .sd-summary-icon{margin-right:.6em;display:inline-flex;align-items:center}details.sd-dropdown summary.sd-summary-title .sd-summary-icon svg{opacity:.8}details.sd-dropdown summary.sd-summary-title .sd-summary-text{flex-grow:1;line-height:1.5;padding-right:.5rem}details.sd-dropdown summary.sd-summary-title .sd-summary-state-marker{pointer-events:none;display:inline-flex;align-items:center}details.sd-dropdown summary.sd-summary-title .sd-summary-state-marker svg{opacity:.6}details.sd-dropdown summary.sd-summary-title:hover .sd-summary-state-marker svg{opacity:1;transform:scale(1.1)}details.sd-dropdown[open] summary .sd-octicon.no-title{visibility:hidden}details.sd-dropdown .sd-summary-chevron-right{transition:.25s}details.sd-dropdown[open]>.sd-summary-title .sd-summary-chevron-right{transform:rotate(90deg)}details.sd-dropdown[open]>.sd-summary-title .sd-summary-chevron-down{transform:rotate(180deg)}details.sd-dropdown:not([open]).sd-card{border:none}details.sd-dropdown:not([open])>.sd-card-header{border:1px solid var(--sd-color-card-border);border-radius:.25rem}details.sd-dropdown.sd-fade-in[open] summary~*{-moz-animation:sd-fade-in .5s ease-in-out;-webkit-animation:sd-fade-in .5s ease-in-out;animation:sd-fade-in .5s ease-in-out}details.sd-dropdown.sd-fade-in-slide-down[open] summary~*{-moz-animation:sd-fade-in .5s ease-in-out,sd-slide-down .5s ease-in-out;-webkit-animation:sd-fade-in .5s ease-in-out,sd-slide-down .5s ease-in-out;animation:sd-fade-in .5s ease-in-out,sd-slide-down .5s ease-in-out}.sd-col>.sd-dropdown{width:100%}.sd-summary-content>.sd-tab-set:first-child{margin-top:0}@keyframes sd-fade-in{0%{opacity:0}100%{opacity:1}}@keyframes sd-slide-down{0%{transform:translate(0, -10px)}100%{transform:translate(0, 0)}}.sd-tab-set{border-radius:.125rem;display:flex;flex-wrap:wrap;margin:1em 0;position:relative}.sd-tab-set>input{opacity:0;position:absolute}.sd-tab-set>input:checked+label{border-color:var(--sd-color-tabs-underline-active);color:var(--sd-color-tabs-label-active)}.sd-tab-set>input:checked+label+.sd-tab-content{display:block}.sd-tab-set>input:not(:checked)+label:hover{color:var(--sd-color-tabs-label-hover);border-color:var(--sd-color-tabs-underline-hover)}.sd-tab-set>input:focus+label{outline-style:auto}.sd-tab-set>input:not(.focus-visible)+label{outline:none;-webkit-tap-highlight-color:transparent}.sd-tab-set>label{border-bottom:.125rem solid transparent;margin-bottom:0;color:var(--sd-color-tabs-label-inactive);border-color:var(--sd-color-tabs-underline-inactive);cursor:pointer;font-size:var(--sd-fontsize-tabs-label);font-weight:700;padding:1em 1.25em .5em;transition:color 250ms;width:auto;z-index:1}html .sd-tab-set>label:hover{color:var(--sd-color-tabs-label-active)}.sd-col>.sd-tab-set{width:100%}.sd-tab-content{box-shadow:0 -0.0625rem var(--sd-color-tabs-overline),0 .0625rem var(--sd-color-tabs-underline);display:none;order:99;padding-bottom:.75rem;padding-top:.75rem;width:100%}.sd-tab-content>:first-child{margin-top:0 !important}.sd-tab-content>:last-child{margin-bottom:0 !important}.sd-tab-content>.sd-tab-set{margin:0}.sd-sphinx-override,.sd-sphinx-override *{-moz-box-sizing:border-box;-webkit-box-sizing:border-box;box-sizing:border-box}.sd-sphinx-override p{margin-top:0}:root{--sd-color-primary: #0071bc;--sd-color-secondary: #6c757d;--sd-color-success: #28a745;--sd-color-info: #17a2b8;--sd-color-warning: #f0b37e;--sd-color-danger: #dc3545;--sd-color-light: #f8f9fa;--sd-color-muted: #6c757d;--sd-color-dark: #212529;--sd-color-black: black;--sd-color-white: white;--sd-color-primary-highlight: #0060a0;--sd-color-secondary-highlight: #5c636a;--sd-color-success-highlight: #228e3b;--sd-color-info-highlight: #148a9c;--sd-color-warning-highlight: #cc986b;--sd-color-danger-highlight: #bb2d3b;--sd-color-light-highlight: #d3d4d5;--sd-color-muted-highlight: #5c636a;--sd-color-dark-highlight: #1c1f23;--sd-color-black-highlight: black;--sd-color-white-highlight: #d9d9d9;--sd-color-primary-bg: rgba(0, 113, 188, 0.2);--sd-color-secondary-bg: rgba(108, 117, 125, 0.2);--sd-color-success-bg: rgba(40, 167, 69, 0.2);--sd-color-info-bg: rgba(23, 162, 184, 0.2);--sd-color-warning-bg: rgba(240, 179, 126, 0.2);--sd-color-danger-bg: rgba(220, 53, 69, 0.2);--sd-color-light-bg: rgba(248, 249, 250, 0.2);--sd-color-muted-bg: rgba(108, 117, 125, 0.2);--sd-color-dark-bg: rgba(33, 37, 41, 0.2);--sd-color-black-bg: rgba(0, 0, 0, 0.2);--sd-color-white-bg: rgba(255, 255, 255, 0.2);--sd-color-primary-text: #fff;--sd-color-secondary-text: #fff;--sd-color-success-text: #fff;--sd-color-info-text: #fff;--sd-color-warning-text: #212529;--sd-color-danger-text: #fff;--sd-color-light-text: #212529;--sd-color-muted-text: #fff;--sd-color-dark-text: #fff;--sd-color-black-text: #fff;--sd-color-white-text: #212529;--sd-color-shadow: rgba(0, 0, 0, 0.15);--sd-color-card-border: rgba(0, 0, 0, 0.125);--sd-color-card-border-hover: hsla(231, 99%, 66%, 1);--sd-color-card-background: transparent;--sd-color-card-text: inherit;--sd-color-card-header: transparent;--sd-color-card-footer: transparent;--sd-color-tabs-label-active: hsla(231, 99%, 66%, 1);--sd-color-tabs-label-hover: hsla(231, 99%, 66%, 1);--sd-color-tabs-label-inactive: hsl(0, 0%, 66%);--sd-color-tabs-underline-active: hsla(231, 99%, 66%, 1);--sd-color-tabs-underline-hover: rgba(178, 206, 245, 0.62);--sd-color-tabs-underline-inactive: transparent;--sd-color-tabs-overline: rgb(222, 222, 222);--sd-color-tabs-underline: rgb(222, 222, 222);--sd-fontsize-tabs-label: 1rem;--sd-fontsize-dropdown: inherit;--sd-fontsize-dropdown-title: 1rem;--sd-fontweight-dropdown-title: 700} diff --git a/_static/sphinx_highlight.js b/_static/sphinx_highlight.js new file mode 100644 index 0000000..8a96c69 --- /dev/null +++ b/_static/sphinx_highlight.js @@ -0,0 +1,154 @@ +/* Highlighting utilities for Sphinx HTML documentation. */ +"use strict"; + +const SPHINX_HIGHLIGHT_ENABLED = true + +/** + * highlight a given string on a node by wrapping it in + * span elements with the given class name. + */ +const _highlight = (node, addItems, text, className) => { + if (node.nodeType === Node.TEXT_NODE) { + const val = node.nodeValue; + const parent = node.parentNode; + const pos = val.toLowerCase().indexOf(text); + if ( + pos >= 0 && + !parent.classList.contains(className) && + !parent.classList.contains("nohighlight") + ) { + let span; + + const closestNode = parent.closest("body, svg, foreignObject"); + const isInSVG = closestNode && closestNode.matches("svg"); + if (isInSVG) { + span = document.createElementNS("http://www.w3.org/2000/svg", "tspan"); + } else { + span = document.createElement("span"); + span.classList.add(className); + } + + span.appendChild(document.createTextNode(val.substr(pos, text.length))); + const rest = document.createTextNode(val.substr(pos + text.length)); + parent.insertBefore( + span, + parent.insertBefore( + rest, + node.nextSibling + ) + ); + node.nodeValue = val.substr(0, pos); + /* There may be more occurrences of search term in this node. So call this + * function recursively on the remaining fragment. + */ + _highlight(rest, addItems, text, className); + + if (isInSVG) { + const rect = document.createElementNS( + "http://www.w3.org/2000/svg", + "rect" + ); + const bbox = parent.getBBox(); + rect.x.baseVal.value = bbox.x; + rect.y.baseVal.value = bbox.y; + rect.width.baseVal.value = bbox.width; + rect.height.baseVal.value = bbox.height; + rect.setAttribute("class", className); + addItems.push({ parent: parent, target: rect }); + } + } + } else if (node.matches && !node.matches("button, select, textarea")) { + node.childNodes.forEach((el) => _highlight(el, addItems, text, className)); + } +}; +const _highlightText = (thisNode, text, className) => { + let addItems = []; + _highlight(thisNode, addItems, text, className); + addItems.forEach((obj) => + obj.parent.insertAdjacentElement("beforebegin", obj.target) + ); +}; + +/** + * Small JavaScript module for the documentation. + */ +const SphinxHighlight = { + + /** + * highlight the search words provided in localstorage in the text + */ + highlightSearchWords: () => { + if (!SPHINX_HIGHLIGHT_ENABLED) return; // bail if no highlight + + // get and clear terms from localstorage + const url = new URL(window.location); + const highlight = + localStorage.getItem("sphinx_highlight_terms") + || url.searchParams.get("highlight") + || ""; + localStorage.removeItem("sphinx_highlight_terms") + url.searchParams.delete("highlight"); + window.history.replaceState({}, "", url); + + // get individual terms from highlight string + const terms = highlight.toLowerCase().split(/\s+/).filter(x => x); + if (terms.length === 0) return; // nothing to do + + // There should never be more than one element matching "div.body" + const divBody = document.querySelectorAll("div.body"); + const body = divBody.length ? divBody[0] : document.querySelector("body"); + window.setTimeout(() => { + terms.forEach((term) => _highlightText(body, term, "highlighted")); + }, 10); + + const searchBox = document.getElementById("searchbox"); + if (searchBox === null) return; + searchBox.appendChild( + document + .createRange() + .createContextualFragment( + '" + ) + ); + }, + + /** + * helper function to hide the search marks again + */ + hideSearchWords: () => { + document + .querySelectorAll("#searchbox .highlight-link") + .forEach((el) => el.remove()); + document + .querySelectorAll("span.highlighted") + .forEach((el) => el.classList.remove("highlighted")); + localStorage.removeItem("sphinx_highlight_terms") + }, + + initEscapeListener: () => { + // only install a listener if it is really needed + if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) return; + + document.addEventListener("keydown", (event) => { + // bail for input elements + if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return; + // bail with special keys + if (event.shiftKey || event.altKey || event.ctrlKey || event.metaKey) return; + if (DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS && (event.key === "Escape")) { + SphinxHighlight.hideSearchWords(); + event.preventDefault(); + } + }); + }, +}; + +_ready(() => { + /* Do not call highlightSearchWords() when we are on the search page. + * It will highlight words from the *previous* search query. + */ + if (typeof Search === "undefined") SphinxHighlight.highlightSearchWords(); + SphinxHighlight.initEscapeListener(); +}); diff --git a/_static/styles/furo-extensions.css b/_static/styles/furo-extensions.css new file mode 100644 index 0000000..8229587 --- /dev/null +++ b/_static/styles/furo-extensions.css @@ -0,0 +1,2 @@ +#furo-sidebar-ad-placement{padding:var(--sidebar-item-spacing-vertical) var(--sidebar-item-spacing-horizontal)}#furo-sidebar-ad-placement .ethical-sidebar{background:var(--color-background-secondary);border:none;box-shadow:none}#furo-sidebar-ad-placement .ethical-sidebar:hover{background:var(--color-background-hover)}#furo-sidebar-ad-placement .ethical-sidebar a{color:var(--color-foreground-primary)}#furo-sidebar-ad-placement .ethical-callout a{color:var(--color-foreground-secondary)!important}#furo-readthedocs-versions{background:transparent;display:block;position:static;width:100%}#furo-readthedocs-versions .rst-versions{background:#1a1c1e}#furo-readthedocs-versions .rst-current-version{background:var(--color-sidebar-item-background);cursor:unset}#furo-readthedocs-versions .rst-current-version:hover{background:var(--color-sidebar-item-background)}#furo-readthedocs-versions .rst-current-version .fa-book{color:var(--color-foreground-primary)}#furo-readthedocs-versions>.rst-other-versions{padding:0}#furo-readthedocs-versions>.rst-other-versions small{opacity:1}#furo-readthedocs-versions .injected .rst-versions{position:unset}#furo-readthedocs-versions:focus-within,#furo-readthedocs-versions:hover{box-shadow:0 0 0 1px var(--color-sidebar-background-border)}#furo-readthedocs-versions:focus-within .rst-current-version,#furo-readthedocs-versions:hover .rst-current-version{background:#1a1c1e;font-size:inherit;height:auto;line-height:inherit;padding:12px;text-align:right}#furo-readthedocs-versions:focus-within .rst-current-version .fa-book,#furo-readthedocs-versions:hover .rst-current-version .fa-book{color:#fff;float:left}#furo-readthedocs-versions:focus-within .fa-caret-down,#furo-readthedocs-versions:hover .fa-caret-down{display:none}#furo-readthedocs-versions:focus-within .injected,#furo-readthedocs-versions:focus-within .rst-current-version,#furo-readthedocs-versions:focus-within .rst-other-versions,#furo-readthedocs-versions:hover .injected,#furo-readthedocs-versions:hover .rst-current-version,#furo-readthedocs-versions:hover .rst-other-versions{display:block}#furo-readthedocs-versions:focus-within>.rst-current-version,#furo-readthedocs-versions:hover>.rst-current-version{display:none}.highlight:hover button.copybtn{color:var(--color-code-foreground)}.highlight button.copybtn{align-items:center;background-color:var(--color-code-background);border:none;color:var(--color-background-item);cursor:pointer;height:1.25em;right:.5rem;top:.625rem;transition:color .3s,opacity .3s;width:1.25em}.highlight button.copybtn:hover{background-color:var(--color-code-background);color:var(--color-brand-content)}.highlight button.copybtn:after{background-color:transparent;color:var(--color-code-foreground);display:none}.highlight button.copybtn.success{color:#22863a;transition:color 0ms}.highlight button.copybtn.success:after{display:block}.highlight button.copybtn svg{padding:0}body{--sd-color-primary:var(--color-brand-primary);--sd-color-primary-highlight:var(--color-brand-content);--sd-color-primary-text:var(--color-background-primary);--sd-color-shadow:rgba(0,0,0,.05);--sd-color-card-border:var(--color-card-border);--sd-color-card-border-hover:var(--color-brand-content);--sd-color-card-background:var(--color-card-background);--sd-color-card-text:var(--color-foreground-primary);--sd-color-card-header:var(--color-card-marginals-background);--sd-color-card-footer:var(--color-card-marginals-background);--sd-color-tabs-label-active:var(--color-brand-content);--sd-color-tabs-label-hover:var(--color-foreground-muted);--sd-color-tabs-label-inactive:var(--color-foreground-muted);--sd-color-tabs-underline-active:var(--color-brand-content);--sd-color-tabs-underline-hover:var(--color-foreground-border);--sd-color-tabs-underline-inactive:var(--color-background-border);--sd-color-tabs-overline:var(--color-background-border);--sd-color-tabs-underline:var(--color-background-border)}.sd-tab-content{box-shadow:0 -2px var(--sd-color-tabs-overline),0 1px var(--sd-color-tabs-underline)}.sd-card{box-shadow:0 .1rem .25rem var(--sd-color-shadow),0 0 .0625rem rgba(0,0,0,.1)}.sd-shadow-sm{box-shadow:0 .1rem .25rem var(--sd-color-shadow),0 0 .0625rem rgba(0,0,0,.1)!important}.sd-shadow-md{box-shadow:0 .3rem .75rem var(--sd-color-shadow),0 0 .0625rem rgba(0,0,0,.1)!important}.sd-shadow-lg{box-shadow:0 .6rem 1.5rem var(--sd-color-shadow),0 0 .0625rem rgba(0,0,0,.1)!important}.sd-card-hover:hover{transform:none}.sd-cards-carousel{gap:.25rem;padding:.25rem}body{--tabs--label-text:var(--color-foreground-muted);--tabs--label-text--hover:var(--color-foreground-muted);--tabs--label-text--active:var(--color-brand-content);--tabs--label-text--active--hover:var(--color-brand-content);--tabs--label-background:transparent;--tabs--label-background--hover:transparent;--tabs--label-background--active:transparent;--tabs--label-background--active--hover:transparent;--tabs--padding-x:0.25em;--tabs--margin-x:1em;--tabs--border:var(--color-background-border);--tabs--label-border:transparent;--tabs--label-border--hover:var(--color-foreground-muted);--tabs--label-border--active:var(--color-brand-content);--tabs--label-border--active--hover:var(--color-brand-content)}[role=main] .container{max-width:none;padding-left:0;padding-right:0}.shadow.docutils{border:none;box-shadow:0 .2rem .5rem rgba(0,0,0,.05),0 0 .0625rem rgba(0,0,0,.1)!important}.sphinx-bs .card{background-color:var(--color-background-secondary);color:var(--color-foreground)} +/*# sourceMappingURL=furo-extensions.css.map*/ \ No newline at end of file diff --git a/_static/styles/furo-extensions.css.map b/_static/styles/furo-extensions.css.map new file mode 100644 index 0000000..c26eac7 --- /dev/null +++ b/_static/styles/furo-extensions.css.map @@ -0,0 +1 @@ +{"version":3,"file":"styles/furo-extensions.css","mappings":"AAGA,2BACE,oFACA,4CAKE,6CAHA,YACA,eAEA,CACA,kDACE,yCAEF,8CACE,sCAEJ,8CACE,kDAEJ,2BAGE,uBACA,cAHA,gBACA,UAEA,CAGA,yCACE,mBAEF,gDAEE,gDADA,YACA,CACA,sDACE,gDACF,yDACE,sCAEJ,+CACE,UACA,qDACE,UAGF,mDACE,eAEJ,yEAEE,4DAEA,mHASE,mBAPA,kBAEA,YADA,oBAGA,aADA,gBAIA,CAEA,qIAEE,WADA,UACA,CAEJ,uGACE,aAEF,iUAGE,cAEF,mHACE,aC1EJ,gCACE,mCAEF,0BAEE,mBAUA,8CACA,YAFA,mCAKA,eAZA,cAIA,YADA,YAYA,iCAdA,YAcA,CAEA,gCAEE,8CADA,gCACA,CAEF,gCAGE,6BADA,mCADA,YAEA,CAEF,kCAEE,cADA,oBACA,CACA,wCACE,cAEJ,8BACE,UCzCN,KAEE,6CAA8C,CAC9C,uDAAwD,CACxD,uDAAwD,CAGxD,iCAAsC,CAGtC,+CAAgD,CAChD,uDAAwD,CACxD,uDAAwD,CACxD,oDAAqD,CACrD,6DAA8D,CAC9D,6DAA8D,CAG9D,uDAAwD,CACxD,yDAA0D,CAC1D,4DAA6D,CAC7D,2DAA4D,CAC5D,8DAA+D,CAC/D,iEAAkE,CAClE,uDAAwD,CACxD,wDAAyD,CAG3D,gBACE,qFAGF,SACE,6EAEF,cACE,uFAEF,cACE,uFAEF,cACE,uFAGF,qBACE,eAEF,mBACE,WACA,eChDF,KACE,gDAAiD,CACjD,uDAAwD,CACxD,qDAAsD,CACtD,4DAA6D,CAC7D,oCAAqC,CACrC,2CAA4C,CAC5C,4CAA6C,CAC7C,mDAAoD,CACpD,wBAAyB,CACzB,oBAAqB,CACrB,6CAA8C,CAC9C,gCAAiC,CACjC,yDAA0D,CAC1D,uDAAwD,CACxD,8DAA+D,CCbjE,uBACE,eACA,eACA,gBAGF,iBACE,YACA,+EAGF,iBACE,mDACA","sources":["webpack:///./src/furo/assets/styles/extensions/_readthedocs.sass","webpack:///./src/furo/assets/styles/extensions/_copybutton.sass","webpack:///./src/furo/assets/styles/extensions/_sphinx-design.sass","webpack:///./src/furo/assets/styles/extensions/_sphinx-inline-tabs.sass","webpack:///./src/furo/assets/styles/extensions/_sphinx-panels.sass"],"sourcesContent":["// This file contains the styles used for tweaking how ReadTheDoc's embedded\n// contents would show up inside the theme.\n\n#furo-sidebar-ad-placement\n padding: var(--sidebar-item-spacing-vertical) var(--sidebar-item-spacing-horizontal)\n .ethical-sidebar\n // Remove the border and box-shadow.\n border: none\n box-shadow: none\n // Manage the background colors.\n background: var(--color-background-secondary)\n &:hover\n background: var(--color-background-hover)\n // Ensure the text is legible.\n a\n color: var(--color-foreground-primary)\n\n .ethical-callout a\n color: var(--color-foreground-secondary) !important\n\n#furo-readthedocs-versions\n position: static\n width: 100%\n background: transparent\n display: block\n\n // Make the background color fit with the theme's aesthetic.\n .rst-versions\n background: rgb(26, 28, 30)\n\n .rst-current-version\n cursor: unset\n background: var(--color-sidebar-item-background)\n &:hover\n background: var(--color-sidebar-item-background)\n .fa-book\n color: var(--color-foreground-primary)\n\n > .rst-other-versions\n padding: 0\n small\n opacity: 1\n\n .injected\n .rst-versions\n position: unset\n\n &:hover,\n &:focus-within\n box-shadow: 0 0 0 1px var(--color-sidebar-background-border)\n\n .rst-current-version\n // Undo the tweaks done in RTD's CSS\n font-size: inherit\n line-height: inherit\n height: auto\n text-align: right\n padding: 12px\n\n // Match the rest of the body\n background: #1a1c1e\n\n .fa-book\n float: left\n color: white\n\n .fa-caret-down\n display: none\n\n .rst-current-version,\n .rst-other-versions,\n .injected\n display: block\n\n > .rst-current-version\n display: none\n",".highlight\n &:hover button.copybtn\n color: var(--color-code-foreground)\n\n button.copybtn\n // Align things correctly\n align-items: center\n\n height: 1.25em\n width: 1.25em\n\n top: 0.625rem // $code-spacing-vertical\n right: 0.5rem\n\n // Make it look better\n color: var(--color-background-item)\n background-color: var(--color-code-background)\n border: none\n\n // Change to cursor to make it obvious that you can click on it\n cursor: pointer\n\n // Transition smoothly, for aesthetics\n transition: color 300ms, opacity 300ms\n\n &:hover\n color: var(--color-brand-content)\n background-color: var(--color-code-background)\n\n &::after\n display: none\n color: var(--color-code-foreground)\n background-color: transparent\n\n &.success\n transition: color 0ms\n color: #22863a\n &::after\n display: block\n\n svg\n padding: 0\n","body\n // Colors\n --sd-color-primary: var(--color-brand-primary)\n --sd-color-primary-highlight: var(--color-brand-content)\n --sd-color-primary-text: var(--color-background-primary)\n\n // Shadows\n --sd-color-shadow: rgba(0, 0, 0, 0.05)\n\n // Cards\n --sd-color-card-border: var(--color-card-border)\n --sd-color-card-border-hover: var(--color-brand-content)\n --sd-color-card-background: var(--color-card-background)\n --sd-color-card-text: var(--color-foreground-primary)\n --sd-color-card-header: var(--color-card-marginals-background)\n --sd-color-card-footer: var(--color-card-marginals-background)\n\n // Tabs\n --sd-color-tabs-label-active: var(--color-brand-content)\n --sd-color-tabs-label-hover: var(--color-foreground-muted)\n --sd-color-tabs-label-inactive: var(--color-foreground-muted)\n --sd-color-tabs-underline-active: var(--color-brand-content)\n --sd-color-tabs-underline-hover: var(--color-foreground-border)\n --sd-color-tabs-underline-inactive: var(--color-background-border)\n --sd-color-tabs-overline: var(--color-background-border)\n --sd-color-tabs-underline: var(--color-background-border)\n\n// Tabs\n.sd-tab-content\n box-shadow: 0 -2px var(--sd-color-tabs-overline), 0 1px var(--sd-color-tabs-underline)\n\n// Shadows\n.sd-card // Have a shadow by default\n box-shadow: 0 0.1rem 0.25rem var(--sd-color-shadow), 0 0 0.0625rem rgba(0, 0, 0, 0.1)\n\n.sd-shadow-sm\n box-shadow: 0 0.1rem 0.25rem var(--sd-color-shadow), 0 0 0.0625rem rgba(0, 0, 0, 0.1) !important\n\n.sd-shadow-md\n box-shadow: 0 0.3rem 0.75rem var(--sd-color-shadow), 0 0 0.0625rem rgba(0, 0, 0, 0.1) !important\n\n.sd-shadow-lg\n box-shadow: 0 0.6rem 1.5rem var(--sd-color-shadow), 0 0 0.0625rem rgba(0, 0, 0, 0.1) !important\n\n// Cards\n.sd-card-hover:hover // Don't change scale on hover\n transform: none\n\n.sd-cards-carousel // Have a bit of gap in the carousel by default\n gap: 0.25rem\n padding: 0.25rem\n","// This file contains styles to tweak sphinx-inline-tabs to work well with Furo.\n\nbody\n --tabs--label-text: var(--color-foreground-muted)\n --tabs--label-text--hover: var(--color-foreground-muted)\n --tabs--label-text--active: var(--color-brand-content)\n --tabs--label-text--active--hover: var(--color-brand-content)\n --tabs--label-background: transparent\n --tabs--label-background--hover: transparent\n --tabs--label-background--active: transparent\n --tabs--label-background--active--hover: transparent\n --tabs--padding-x: 0.25em\n --tabs--margin-x: 1em\n --tabs--border: var(--color-background-border)\n --tabs--label-border: transparent\n --tabs--label-border--hover: var(--color-foreground-muted)\n --tabs--label-border--active: var(--color-brand-content)\n --tabs--label-border--active--hover: var(--color-brand-content)\n","// This file contains styles to tweak sphinx-panels to work well with Furo.\n\n// sphinx-panels includes Bootstrap 4, which uses .container which can conflict\n// with docutils' `.. container::` directive.\n[role=\"main\"] .container\n max-width: initial\n padding-left: initial\n padding-right: initial\n\n// Make the panels look nicer!\n.shadow.docutils\n border: none\n box-shadow: 0 0.2rem 0.5rem rgba(0, 0, 0, 0.05), 0 0 0.0625rem rgba(0, 0, 0, 0.1) !important\n\n// Make panel colors respond to dark mode\n.sphinx-bs .card\n background-color: var(--color-background-secondary)\n color: var(--color-foreground)\n"],"names":[],"sourceRoot":""} \ No newline at end of file diff --git a/_static/styles/furo.css b/_static/styles/furo.css new file mode 100644 index 0000000..05a56b1 --- /dev/null +++ b/_static/styles/furo.css @@ -0,0 +1,2 @@ +/*! normalize.css v8.0.1 | MIT License | github.com/necolas/normalize.css */html{line-height:1.15;-webkit-text-size-adjust:100%}body{margin:0}main{display:block}h1{font-size:2em;margin:.67em 0}hr{box-sizing:content-box;height:0;overflow:visible}pre{font-family:monospace,monospace;font-size:1em}a{background-color:transparent}abbr[title]{border-bottom:none;text-decoration:underline;text-decoration:underline dotted}b,strong{font-weight:bolder}code,kbd,samp{font-family:monospace,monospace;font-size:1em}sub,sup{font-size:75%;line-height:0;position:relative;vertical-align:baseline}sub{bottom:-.25em}sup{top:-.5em}img{border-style:none}button,input,optgroup,select,textarea{font-family:inherit;font-size:100%;line-height:1.15;margin:0}button,input{overflow:visible}button,select{text-transform:none}[type=button],[type=reset],[type=submit],button{-webkit-appearance:button}[type=button]::-moz-focus-inner,[type=reset]::-moz-focus-inner,[type=submit]::-moz-focus-inner,button::-moz-focus-inner{border-style:none;padding:0}[type=button]:-moz-focusring,[type=reset]:-moz-focusring,[type=submit]:-moz-focusring,button:-moz-focusring{outline:1px dotted ButtonText}fieldset{padding:.35em .75em .625em}legend{box-sizing:border-box;color:inherit;display:table;max-width:100%;padding:0;white-space:normal}progress{vertical-align:baseline}textarea{overflow:auto}[type=checkbox],[type=radio]{box-sizing:border-box;padding:0}[type=number]::-webkit-inner-spin-button,[type=number]::-webkit-outer-spin-button{height:auto}[type=search]{-webkit-appearance:textfield;outline-offset:-2px}[type=search]::-webkit-search-decoration{-webkit-appearance:none}::-webkit-file-upload-button{-webkit-appearance:button;font:inherit}details{display:block}summary{display:list-item}[hidden],template{display:none}@media print{.content-icon-container,.headerlink,.mobile-header,.related-pages{display:none!important}.highlight{border:.1pt solid var(--color-foreground-border)}a,blockquote,dl,ol,p,pre,table,ul{page-break-inside:avoid}caption,figure,h1,h2,h3,h4,h5,h6,img{page-break-after:avoid;page-break-inside:avoid}dl,ol,ul{page-break-before:avoid}}.visually-hidden{height:1px!important;margin:-1px!important;overflow:hidden!important;padding:0!important;position:absolute!important;width:1px!important;clip:rect(0,0,0,0)!important;background:var(--color-background-primary);border:0!important;color:var(--color-foreground-primary);white-space:nowrap!important}:-moz-focusring{outline:auto}body{--font-stack:-apple-system,BlinkMacSystemFont,Segoe UI,Helvetica,Arial,sans-serif,Apple Color Emoji,Segoe UI Emoji;--font-stack--monospace:"SFMono-Regular",Menlo,Consolas,Monaco,Liberation Mono,Lucida Console,monospace;--font-stack--headings:var(--font-stack);--font-size--normal:100%;--font-size--small:87.5%;--font-size--small--2:81.25%;--font-size--small--3:75%;--font-size--small--4:62.5%;--sidebar-caption-font-size:var(--font-size--small--2);--sidebar-item-font-size:var(--font-size--small);--sidebar-search-input-font-size:var(--font-size--small);--toc-font-size:var(--font-size--small--3);--toc-font-size--mobile:var(--font-size--normal);--toc-title-font-size:var(--font-size--small--4);--admonition-font-size:0.8125rem;--admonition-title-font-size:0.8125rem;--code-font-size:var(--font-size--small--2);--api-font-size:var(--font-size--small);--header-height:calc(var(--sidebar-item-line-height) + var(--sidebar-item-spacing-vertical)*4);--header-padding:0.5rem;--sidebar-tree-space-above:1.5rem;--sidebar-caption-space-above:1rem;--sidebar-item-line-height:1rem;--sidebar-item-spacing-vertical:0.5rem;--sidebar-item-spacing-horizontal:1rem;--sidebar-item-height:calc(var(--sidebar-item-line-height) + var(--sidebar-item-spacing-vertical)*2);--sidebar-expander-width:var(--sidebar-item-height);--sidebar-search-space-above:0.5rem;--sidebar-search-input-spacing-vertical:0.5rem;--sidebar-search-input-spacing-horizontal:0.5rem;--sidebar-search-input-height:1rem;--sidebar-search-icon-size:var(--sidebar-search-input-height);--toc-title-padding:0.25rem 0;--toc-spacing-vertical:1.5rem;--toc-spacing-horizontal:1.5rem;--toc-item-spacing-vertical:0.4rem;--toc-item-spacing-horizontal:1rem;--icon-search:url('data:image/svg+xml;charset=utf-8,');--icon-pencil:url('data:image/svg+xml;charset=utf-8,');--icon-abstract:url('data:image/svg+xml;charset=utf-8,');--icon-info:url('data:image/svg+xml;charset=utf-8,');--icon-flame:url('data:image/svg+xml;charset=utf-8,');--icon-question:url('data:image/svg+xml;charset=utf-8,');--icon-warning:url('data:image/svg+xml;charset=utf-8,');--icon-failure:url('data:image/svg+xml;charset=utf-8,');--icon-spark:url('data:image/svg+xml;charset=utf-8,');--color-admonition-title--caution:#ff9100;--color-admonition-title-background--caution:rgba(255,145,0,.2);--color-admonition-title--warning:#ff9100;--color-admonition-title-background--warning:rgba(255,145,0,.2);--color-admonition-title--danger:#ff5252;--color-admonition-title-background--danger:rgba(255,82,82,.2);--color-admonition-title--attention:#ff5252;--color-admonition-title-background--attention:rgba(255,82,82,.2);--color-admonition-title--error:#ff5252;--color-admonition-title-background--error:rgba(255,82,82,.2);--color-admonition-title--hint:#00c852;--color-admonition-title-background--hint:rgba(0,200,82,.2);--color-admonition-title--tip:#00c852;--color-admonition-title-background--tip:rgba(0,200,82,.2);--color-admonition-title--important:#00bfa5;--color-admonition-title-background--important:rgba(0,191,165,.2);--color-admonition-title--note:#00b0ff;--color-admonition-title-background--note:rgba(0,176,255,.2);--color-admonition-title--seealso:#448aff;--color-admonition-title-background--seealso:rgba(68,138,255,.2);--color-admonition-title--admonition-todo:grey;--color-admonition-title-background--admonition-todo:hsla(0,0%,50%,.2);--color-admonition-title:#651fff;--color-admonition-title-background:rgba(101,31,255,.2);--icon-admonition-default:var(--icon-abstract);--color-topic-title:#14b8a6;--color-topic-title-background:rgba(20,184,166,.2);--icon-topic-default:var(--icon-pencil);--color-problematic:#b30000;--color-foreground-primary:#000;--color-foreground-secondary:#5a5c63;--color-foreground-muted:#6b6f76;--color-foreground-border:#878787;--color-background-primary:#fff;--color-background-secondary:#f8f9fb;--color-background-hover:#efeff4;--color-background-hover--transparent:#efeff400;--color-background-border:#eeebee;--color-background-item:#ccc;--color-announcement-background:#000000dd;--color-announcement-text:#eeebee;--color-brand-primary:#0a4bff;--color-brand-content:#2757dd;--color-brand-visited:#872ee0;--color-api-background:var(--color-background-hover--transparent);--color-api-background-hover:var(--color-background-hover);--color-api-overall:var(--color-foreground-secondary);--color-api-name:var(--color-problematic);--color-api-pre-name:var(--color-problematic);--color-api-paren:var(--color-foreground-secondary);--color-api-keyword:var(--color-foreground-primary);--color-api-added:#21632c;--color-api-added-border:#38a84d;--color-api-changed:#046172;--color-api-changed-border:#06a1bc;--color-api-deprecated:#605706;--color-api-deprecated-border:#f0d90f;--color-api-removed:#b30000;--color-api-removed-border:#ff5c5c;--color-highlight-on-target:#ffc;--color-inline-code-background:var(--color-background-secondary);--color-highlighted-background:#def;--color-highlighted-text:var(--color-foreground-primary);--color-guilabel-background:#ddeeff80;--color-guilabel-border:#bedaf580;--color-guilabel-text:var(--color-foreground-primary);--color-admonition-background:transparent;--color-table-header-background:var(--color-background-secondary);--color-table-border:var(--color-background-border);--color-card-border:var(--color-background-secondary);--color-card-background:transparent;--color-card-marginals-background:var(--color-background-secondary);--color-header-background:var(--color-background-primary);--color-header-border:var(--color-background-border);--color-header-text:var(--color-foreground-primary);--color-sidebar-background:var(--color-background-secondary);--color-sidebar-background-border:var(--color-background-border);--color-sidebar-brand-text:var(--color-foreground-primary);--color-sidebar-caption-text:var(--color-foreground-muted);--color-sidebar-link-text:var(--color-foreground-secondary);--color-sidebar-link-text--top-level:var(--color-brand-primary);--color-sidebar-item-background:var(--color-sidebar-background);--color-sidebar-item-background--current:var( --color-sidebar-item-background );--color-sidebar-item-background--hover:linear-gradient(90deg,var(--color-background-hover--transparent) 0%,var(--color-background-hover) var(--sidebar-item-spacing-horizontal),var(--color-background-hover) 100%);--color-sidebar-item-expander-background:transparent;--color-sidebar-item-expander-background--hover:var( --color-background-hover );--color-sidebar-search-text:var(--color-foreground-primary);--color-sidebar-search-background:var(--color-background-secondary);--color-sidebar-search-background--focus:var(--color-background-primary);--color-sidebar-search-border:var(--color-background-border);--color-sidebar-search-icon:var(--color-foreground-muted);--color-toc-background:var(--color-background-primary);--color-toc-title-text:var(--color-foreground-muted);--color-toc-item-text:var(--color-foreground-secondary);--color-toc-item-text--hover:var(--color-foreground-primary);--color-toc-item-text--active:var(--color-brand-primary);--color-content-foreground:var(--color-foreground-primary);--color-content-background:transparent;--color-link:var(--color-brand-content);--color-link-underline:var(--color-background-border);--color-link--hover:var(--color-brand-content);--color-link-underline--hover:var(--color-foreground-border);--color-link--visited:var(--color-brand-visited);--color-link-underline--visited:var(--color-background-border);--color-link--visited--hover:var(--color-brand-visited);--color-link-underline--visited--hover:var(--color-foreground-border)}.only-light{display:block!important}html body .only-dark{display:none!important}@media not print{body[data-theme=dark]{--color-problematic:#ee5151;--color-foreground-primary:#cfd0d0;--color-foreground-secondary:#9ca0a5;--color-foreground-muted:#81868d;--color-foreground-border:#666;--color-background-primary:#131416;--color-background-secondary:#1a1c1e;--color-background-hover:#1e2124;--color-background-hover--transparent:#1e212400;--color-background-border:#303335;--color-background-item:#444;--color-announcement-background:#000000dd;--color-announcement-text:#eeebee;--color-brand-primary:#3d94ff;--color-brand-content:#5ca5ff;--color-brand-visited:#b27aeb;--color-highlighted-background:#083563;--color-guilabel-background:#08356380;--color-guilabel-border:#13395f80;--color-api-keyword:var(--color-foreground-secondary);--color-highlight-on-target:#330;--color-api-added:#3db854;--color-api-added-border:#267334;--color-api-changed:#09b0ce;--color-api-changed-border:#056d80;--color-api-deprecated:#b1a10b;--color-api-deprecated-border:#6e6407;--color-api-removed:#ff7575;--color-api-removed-border:#b03b3b;--color-admonition-background:#18181a;--color-card-border:var(--color-background-secondary);--color-card-background:#18181a;--color-card-marginals-background:var(--color-background-hover)}html body[data-theme=dark] .only-light{display:none!important}body[data-theme=dark] .only-dark{display:block!important}@media(prefers-color-scheme:dark){body:not([data-theme=light]){--color-problematic:#ee5151;--color-foreground-primary:#cfd0d0;--color-foreground-secondary:#9ca0a5;--color-foreground-muted:#81868d;--color-foreground-border:#666;--color-background-primary:#131416;--color-background-secondary:#1a1c1e;--color-background-hover:#1e2124;--color-background-hover--transparent:#1e212400;--color-background-border:#303335;--color-background-item:#444;--color-announcement-background:#000000dd;--color-announcement-text:#eeebee;--color-brand-primary:#3d94ff;--color-brand-content:#5ca5ff;--color-brand-visited:#b27aeb;--color-highlighted-background:#083563;--color-guilabel-background:#08356380;--color-guilabel-border:#13395f80;--color-api-keyword:var(--color-foreground-secondary);--color-highlight-on-target:#330;--color-api-added:#3db854;--color-api-added-border:#267334;--color-api-changed:#09b0ce;--color-api-changed-border:#056d80;--color-api-deprecated:#b1a10b;--color-api-deprecated-border:#6e6407;--color-api-removed:#ff7575;--color-api-removed-border:#b03b3b;--color-admonition-background:#18181a;--color-card-border:var(--color-background-secondary);--color-card-background:#18181a;--color-card-marginals-background:var(--color-background-hover)}html body:not([data-theme=light]) .only-light{display:none!important}body:not([data-theme=light]) .only-dark{display:block!important}}}body[data-theme=auto] .theme-toggle svg.theme-icon-when-auto-light{display:block}@media(prefers-color-scheme:dark){body[data-theme=auto] .theme-toggle svg.theme-icon-when-auto-dark{display:block}body[data-theme=auto] .theme-toggle svg.theme-icon-when-auto-light{display:none}}body[data-theme=dark] .theme-toggle svg.theme-icon-when-dark,body[data-theme=light] .theme-toggle svg.theme-icon-when-light{display:block}body{font-family:var(--font-stack)}code,kbd,pre,samp{font-family:var(--font-stack--monospace)}body{-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale}article{line-height:1.5}h1,h2,h3,h4,h5,h6{border-radius:.5rem;font-family:var(--font-stack--headings);font-weight:700;line-height:1.25;margin:.5rem -.5rem;padding-left:.5rem;padding-right:.5rem}h1+p,h2+p,h3+p,h4+p,h5+p,h6+p{margin-top:0}h1{font-size:2.5em;margin-bottom:1rem}h1,h2{margin-top:1.75rem}h2{font-size:2em}h3{font-size:1.5em}h4{font-size:1.25em}h5{font-size:1.125em}h6{font-size:1em}small{font-size:80%;opacity:75%}p{margin-bottom:.75rem;margin-top:.5rem}hr.docutils{background-color:var(--color-background-border);border:0;height:1px;margin:2rem 0;padding:0}.centered{text-align:center}a{color:var(--color-link);text-decoration:underline;text-decoration-color:var(--color-link-underline)}a:visited{color:var(--color-link--visited);text-decoration-color:var(--color-link-underline--visited)}a:visited:hover{color:var(--color-link--visited--hover);text-decoration-color:var(--color-link-underline--visited--hover)}a:hover{color:var(--color-link--hover);text-decoration-color:var(--color-link-underline--hover)}a.muted-link{color:inherit}a.muted-link:hover{color:var(--color-link--hover);text-decoration-color:var(--color-link-underline--hover)}a.muted-link:hover:visited{color:var(--color-link--visited--hover);text-decoration-color:var(--color-link-underline--visited--hover)}html{overflow-x:hidden;overflow-y:scroll;scroll-behavior:smooth}.sidebar-scroll,.toc-scroll,article[role=main] *{scrollbar-color:var(--color-foreground-border) transparent;scrollbar-width:thin}.sidebar-scroll::-webkit-scrollbar,.toc-scroll::-webkit-scrollbar,article[role=main] ::-webkit-scrollbar{height:.25rem;width:.25rem}.sidebar-scroll::-webkit-scrollbar-thumb,.toc-scroll::-webkit-scrollbar-thumb,article[role=main] ::-webkit-scrollbar-thumb{background-color:var(--color-foreground-border);border-radius:.125rem}body,html{height:100%}.skip-to-content,body,html{background:var(--color-background-primary);color:var(--color-foreground-primary)}.skip-to-content{border-radius:1rem;left:.25rem;padding:1rem;position:fixed;top:.25rem;transform:translateY(-200%);transition:transform .3s ease-in-out;z-index:40}.skip-to-content:focus-within{transform:translateY(0)}article{background:var(--color-content-background);color:var(--color-content-foreground);overflow-wrap:break-word}.page{display:flex;min-height:100%}.mobile-header{background-color:var(--color-header-background);border-bottom:1px solid var(--color-header-border);color:var(--color-header-text);display:none;height:var(--header-height);width:100%;z-index:10}.mobile-header.scrolled{border-bottom:none;box-shadow:0 0 .2rem rgba(0,0,0,.1),0 .2rem .4rem rgba(0,0,0,.2)}.mobile-header .header-center a{color:var(--color-header-text);text-decoration:none}.main{display:flex;flex:1}.sidebar-drawer{background:var(--color-sidebar-background);border-right:1px solid var(--color-sidebar-background-border);box-sizing:border-box;display:flex;justify-content:flex-end;min-width:15em;width:calc(50% - 26em)}.sidebar-container,.toc-drawer{box-sizing:border-box;width:15em}.toc-drawer{background:var(--color-toc-background);padding-right:1rem}.sidebar-sticky,.toc-sticky{display:flex;flex-direction:column;height:min(100%,100vh);height:100vh;position:sticky;top:0}.sidebar-scroll,.toc-scroll{flex-grow:1;flex-shrink:1;overflow:auto;scroll-behavior:smooth}.content{display:flex;flex-direction:column;justify-content:space-between;padding:0 3em;width:46em}.icon{display:inline-block;height:1rem;width:1rem}.icon svg{height:100%;width:100%}.announcement{align-items:center;background-color:var(--color-announcement-background);color:var(--color-announcement-text);display:flex;height:var(--header-height);overflow-x:auto}.announcement+.page{min-height:calc(100% - var(--header-height))}.announcement-content{box-sizing:border-box;min-width:100%;padding:.5rem;text-align:center;white-space:nowrap}.announcement-content a{color:var(--color-announcement-text);text-decoration-color:var(--color-announcement-text)}.announcement-content a:hover{color:var(--color-announcement-text);text-decoration-color:var(--color-link--hover)}.no-js .theme-toggle-container{display:none}.theme-toggle-container{display:flex}.theme-toggle{background:transparent;border:none;cursor:pointer;display:flex;padding:0}.theme-toggle svg{color:var(--color-foreground-primary);display:none;height:1.25rem;width:1.25rem}.theme-toggle-header{align-items:center;display:flex;justify-content:center}.nav-overlay-icon,.toc-overlay-icon{cursor:pointer;display:none}.nav-overlay-icon .icon,.toc-overlay-icon .icon{color:var(--color-foreground-secondary);height:1.5rem;width:1.5rem}.nav-overlay-icon,.toc-header-icon{align-items:center;justify-content:center}.toc-content-icon{height:1.5rem;width:1.5rem}.content-icon-container{display:flex;float:right;gap:.5rem;margin-bottom:1rem;margin-left:1rem;margin-top:1.5rem}.content-icon-container .edit-this-page svg,.content-icon-container .view-this-page svg{color:inherit;height:1.25rem;width:1.25rem}.sidebar-toggle{display:none;position:absolute}.sidebar-toggle[name=__toc]{left:20px}.sidebar-toggle:checked{left:40px}.overlay{background-color:rgba(0,0,0,.54);height:0;opacity:0;position:fixed;top:0;transition:width 0ms,height 0ms,opacity .25s ease-out;width:0}.sidebar-overlay{z-index:20}.toc-overlay{z-index:40}.sidebar-drawer{transition:left .25s ease-in-out;z-index:30}.toc-drawer{transition:right .25s ease-in-out;z-index:50}#__navigation:checked~.sidebar-overlay{height:100%;opacity:1;width:100%}#__navigation:checked~.page .sidebar-drawer{left:0;top:0}#__toc:checked~.toc-overlay{height:100%;opacity:1;width:100%}#__toc:checked~.page .toc-drawer{right:0;top:0}.back-to-top{background:var(--color-background-primary);border-radius:1rem;box-shadow:0 .2rem .5rem rgba(0,0,0,.05),0 0 1px 0 hsla(220,9%,46%,.502);display:none;font-size:.8125rem;left:0;margin-left:50%;padding:.5rem .75rem .5rem .5rem;position:fixed;text-decoration:none;top:1rem;transform:translateX(-50%);z-index:10}.back-to-top svg{height:1rem;width:1rem;fill:currentColor;display:inline-block}.back-to-top span{margin-left:.25rem}.show-back-to-top .back-to-top{align-items:center;display:flex}@media(min-width:97em){html{font-size:110%}}@media(max-width:82em){.toc-content-icon{display:flex}.toc-drawer{border-left:1px solid var(--color-background-muted);height:100vh;position:fixed;right:-15em;top:0}.toc-tree{border-left:none;font-size:var(--toc-font-size--mobile)}.sidebar-drawer{width:calc(50% - 18.5em)}}@media(max-width:67em){.content{margin-left:auto;margin-right:auto;padding:0 1em}}@media(max-width:63em){.nav-overlay-icon{display:flex}.sidebar-drawer{height:100vh;left:-15em;position:fixed;top:0;width:15em}.theme-toggle-header,.toc-header-icon{display:flex}.theme-toggle-content,.toc-content-icon{display:none}.mobile-header{align-items:center;display:flex;justify-content:space-between;position:sticky;top:0}.mobile-header .header-left,.mobile-header .header-right{display:flex;height:var(--header-height);padding:0 var(--header-padding)}.mobile-header .header-left label,.mobile-header .header-right label{height:100%;-webkit-user-select:none;-moz-user-select:none;user-select:none;width:100%}.nav-overlay-icon .icon,.theme-toggle svg{height:1.5rem;width:1.5rem}:target{scroll-margin-top:calc(var(--header-height) + 2.5rem)}.back-to-top{top:calc(var(--header-height) + .5rem)}.page{flex-direction:column;justify-content:center}}@media(max-width:48em){.content{overflow-x:auto;width:100%}}@media(max-width:46em){article[role=main] aside.sidebar{float:none;margin:1rem 0;width:100%}}.admonition,.topic{background:var(--color-admonition-background);border-radius:.2rem;box-shadow:0 .2rem .5rem rgba(0,0,0,.05),0 0 .0625rem rgba(0,0,0,.1);font-size:var(--admonition-font-size);margin:1rem auto;overflow:hidden;padding:0 .5rem .5rem;page-break-inside:avoid}.admonition>:nth-child(2),.topic>:nth-child(2){margin-top:0}.admonition>:last-child,.topic>:last-child{margin-bottom:0}.admonition p.admonition-title,p.topic-title{font-size:var(--admonition-title-font-size);font-weight:500;line-height:1.3;margin:0 -.5rem .5rem;padding:.4rem .5rem .4rem 2rem;position:relative}.admonition p.admonition-title:before,p.topic-title:before{content:"";height:1rem;left:.5rem;position:absolute;width:1rem}p.admonition-title{background-color:var(--color-admonition-title-background)}p.admonition-title:before{background-color:var(--color-admonition-title);-webkit-mask-image:var(--icon-admonition-default);mask-image:var(--icon-admonition-default);-webkit-mask-repeat:no-repeat;mask-repeat:no-repeat}p.topic-title{background-color:var(--color-topic-title-background)}p.topic-title:before{background-color:var(--color-topic-title);-webkit-mask-image:var(--icon-topic-default);mask-image:var(--icon-topic-default);-webkit-mask-repeat:no-repeat;mask-repeat:no-repeat}.admonition{border-left:.2rem solid var(--color-admonition-title)}.admonition.caution{border-left-color:var(--color-admonition-title--caution)}.admonition.caution>.admonition-title{background-color:var(--color-admonition-title-background--caution)}.admonition.caution>.admonition-title:before{background-color:var(--color-admonition-title--caution);-webkit-mask-image:var(--icon-spark);mask-image:var(--icon-spark)}.admonition.warning{border-left-color:var(--color-admonition-title--warning)}.admonition.warning>.admonition-title{background-color:var(--color-admonition-title-background--warning)}.admonition.warning>.admonition-title:before{background-color:var(--color-admonition-title--warning);-webkit-mask-image:var(--icon-warning);mask-image:var(--icon-warning)}.admonition.danger{border-left-color:var(--color-admonition-title--danger)}.admonition.danger>.admonition-title{background-color:var(--color-admonition-title-background--danger)}.admonition.danger>.admonition-title:before{background-color:var(--color-admonition-title--danger);-webkit-mask-image:var(--icon-spark);mask-image:var(--icon-spark)}.admonition.attention{border-left-color:var(--color-admonition-title--attention)}.admonition.attention>.admonition-title{background-color:var(--color-admonition-title-background--attention)}.admonition.attention>.admonition-title:before{background-color:var(--color-admonition-title--attention);-webkit-mask-image:var(--icon-warning);mask-image:var(--icon-warning)}.admonition.error{border-left-color:var(--color-admonition-title--error)}.admonition.error>.admonition-title{background-color:var(--color-admonition-title-background--error)}.admonition.error>.admonition-title:before{background-color:var(--color-admonition-title--error);-webkit-mask-image:var(--icon-failure);mask-image:var(--icon-failure)}.admonition.hint{border-left-color:var(--color-admonition-title--hint)}.admonition.hint>.admonition-title{background-color:var(--color-admonition-title-background--hint)}.admonition.hint>.admonition-title:before{background-color:var(--color-admonition-title--hint);-webkit-mask-image:var(--icon-question);mask-image:var(--icon-question)}.admonition.tip{border-left-color:var(--color-admonition-title--tip)}.admonition.tip>.admonition-title{background-color:var(--color-admonition-title-background--tip)}.admonition.tip>.admonition-title:before{background-color:var(--color-admonition-title--tip);-webkit-mask-image:var(--icon-info);mask-image:var(--icon-info)}.admonition.important{border-left-color:var(--color-admonition-title--important)}.admonition.important>.admonition-title{background-color:var(--color-admonition-title-background--important)}.admonition.important>.admonition-title:before{background-color:var(--color-admonition-title--important);-webkit-mask-image:var(--icon-flame);mask-image:var(--icon-flame)}.admonition.note{border-left-color:var(--color-admonition-title--note)}.admonition.note>.admonition-title{background-color:var(--color-admonition-title-background--note)}.admonition.note>.admonition-title:before{background-color:var(--color-admonition-title--note);-webkit-mask-image:var(--icon-pencil);mask-image:var(--icon-pencil)}.admonition.seealso{border-left-color:var(--color-admonition-title--seealso)}.admonition.seealso>.admonition-title{background-color:var(--color-admonition-title-background--seealso)}.admonition.seealso>.admonition-title:before{background-color:var(--color-admonition-title--seealso);-webkit-mask-image:var(--icon-info);mask-image:var(--icon-info)}.admonition.admonition-todo{border-left-color:var(--color-admonition-title--admonition-todo)}.admonition.admonition-todo>.admonition-title{background-color:var(--color-admonition-title-background--admonition-todo)}.admonition.admonition-todo>.admonition-title:before{background-color:var(--color-admonition-title--admonition-todo);-webkit-mask-image:var(--icon-pencil);mask-image:var(--icon-pencil)}.admonition-todo>.admonition-title{text-transform:uppercase}dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple) dd{margin-left:2rem}dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple) dd>:first-child{margin-top:.125rem}dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple) .field-list,dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple) dd>:last-child{margin-bottom:.75rem}dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple) .field-list>dt{font-size:var(--font-size--small);text-transform:uppercase}dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple) .field-list dd:empty{margin-bottom:.5rem}dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple) .field-list dd>ul{margin-left:-1.2rem}dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple) .field-list dd>ul>li>p:nth-child(2){margin-top:0}dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple) .field-list dd>ul>li>p+p:last-child:empty{margin-bottom:0;margin-top:0}dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple)>dt{color:var(--color-api-overall)}.sig:not(.sig-inline){background:var(--color-api-background);border-radius:.25rem;font-family:var(--font-stack--monospace);font-size:var(--api-font-size);font-weight:700;margin-left:-.25rem;margin-right:-.25rem;padding:.25rem .5rem .25rem 3em;text-indent:-2.5em;transition:background .1s ease-out}.sig:not(.sig-inline):hover{background:var(--color-api-background-hover)}.sig:not(.sig-inline) a.reference .viewcode-link{font-weight:400;width:4.25rem}em.property{font-style:normal}em.property:first-child{color:var(--color-api-keyword)}.sig-name{color:var(--color-api-name)}.sig-prename{color:var(--color-api-pre-name);font-weight:400}.sig-paren{color:var(--color-api-paren)}.sig-param{font-style:normal}div.deprecated,div.versionadded,div.versionchanged,div.versionremoved{border-left:.1875rem solid;border-radius:.125rem;padding-left:.75rem}div.deprecated p,div.versionadded p,div.versionchanged p,div.versionremoved p{margin-bottom:.125rem;margin-top:.125rem}div.versionadded{border-color:var(--color-api-added-border)}div.versionadded .versionmodified{color:var(--color-api-added)}div.versionchanged{border-color:var(--color-api-changed-border)}div.versionchanged .versionmodified{color:var(--color-api-changed)}div.deprecated{border-color:var(--color-api-deprecated-border)}div.deprecated .versionmodified{color:var(--color-api-deprecated)}div.versionremoved{border-color:var(--color-api-removed-border)}div.versionremoved .versionmodified{color:var(--color-api-removed)}.viewcode-back,.viewcode-link{float:right;text-align:right}.line-block{margin-bottom:.75rem;margin-top:.5rem}.line-block .line-block{margin-bottom:0;margin-top:0;padding-left:1rem}.code-block-caption,article p.caption,table>caption{font-size:var(--font-size--small);text-align:center}.toctree-wrapper.compound .caption,.toctree-wrapper.compound :not(.caption)>.caption-text{font-size:var(--font-size--small);margin-bottom:0;text-align:initial;text-transform:uppercase}.toctree-wrapper.compound>ul{margin-bottom:0;margin-top:0}.sig-inline,code.literal{background:var(--color-inline-code-background);border-radius:.2em;font-size:var(--font-size--small--2);padding:.1em .2em}pre.literal-block .sig-inline,pre.literal-block code.literal{font-size:inherit;padding:0}p .sig-inline,p code.literal{border:1px solid var(--color-background-border)}.sig-inline{font-family:var(--font-stack--monospace)}div[class*=" highlight-"],div[class^=highlight-]{display:flex;margin:1em 0}div[class*=" highlight-"] .table-wrapper,div[class^=highlight-] .table-wrapper,pre{margin:0;padding:0}pre{overflow:auto}article[role=main] .highlight pre{line-height:1.5}.highlight pre,pre.literal-block{font-size:var(--code-font-size);padding:.625rem .875rem}pre.literal-block{background-color:var(--color-code-background);border-radius:.2rem;color:var(--color-code-foreground);margin-bottom:1rem;margin-top:1rem}.highlight{border-radius:.2rem;width:100%}.highlight .gp,.highlight span.linenos{pointer-events:none;-webkit-user-select:none;-moz-user-select:none;user-select:none}.highlight .hll{display:block;margin-left:-.875rem;margin-right:-.875rem;padding-left:.875rem;padding-right:.875rem}.code-block-caption{background-color:var(--color-code-background);border-bottom:1px solid;border-radius:.25rem;border-bottom-left-radius:0;border-bottom-right-radius:0;border-color:var(--color-background-border);color:var(--color-code-foreground);display:flex;font-weight:300;padding:.625rem .875rem}.code-block-caption+div[class]{margin-top:0}.code-block-caption+div[class] pre{border-top-left-radius:0;border-top-right-radius:0}.highlighttable{display:block;width:100%}.highlighttable tbody{display:block}.highlighttable tr{display:flex}.highlighttable td.linenos{background-color:var(--color-code-background);border-bottom-left-radius:.2rem;border-top-left-radius:.2rem;color:var(--color-code-foreground);padding:.625rem 0 .625rem .875rem}.highlighttable .linenodiv{box-shadow:-.0625rem 0 var(--color-foreground-border) inset;font-size:var(--code-font-size);padding-right:.875rem}.highlighttable td.code{display:block;flex:1;overflow:hidden;padding:0}.highlighttable td.code .highlight{border-bottom-left-radius:0;border-top-left-radius:0}.highlight span.linenos{box-shadow:-.0625rem 0 var(--color-foreground-border) inset;display:inline-block;margin-right:.875rem;padding-left:0;padding-right:.875rem}.footnote-reference{font-size:var(--font-size--small--4);vertical-align:super}dl.footnote.brackets{color:var(--color-foreground-secondary);display:grid;font-size:var(--font-size--small);grid-template-columns:max-content auto}dl.footnote.brackets dt{margin:0}dl.footnote.brackets dt>.fn-backref{margin-left:.25rem}dl.footnote.brackets dt:after{content:":"}dl.footnote.brackets dt .brackets:before{content:"["}dl.footnote.brackets dt .brackets:after{content:"]"}dl.footnote.brackets dd{margin:0;padding:0 1rem}aside.footnote{color:var(--color-foreground-secondary);font-size:var(--font-size--small)}aside.footnote>span,div.citation>span{float:left;font-weight:500;padding-right:.25rem}aside.footnote>:not(span),div.citation>p{margin-left:2rem}img{box-sizing:border-box;height:auto;max-width:100%}article .figure,article figure{border-radius:.2rem;margin:0}article .figure :last-child,article figure :last-child{margin-bottom:0}article .align-left{clear:left;float:left;margin:0 1rem 1rem}article .align-right{clear:right;float:right;margin:0 1rem 1rem}article .align-center,article .align-default{display:block;margin-left:auto;margin-right:auto;text-align:center}article table.align-default{display:table;text-align:initial}.domainindex-jumpbox,.genindex-jumpbox{border-bottom:1px solid var(--color-background-border);border-top:1px solid var(--color-background-border);padding:.25rem}.domainindex-section h2,.genindex-section h2{margin-bottom:.5rem;margin-top:.75rem}.domainindex-section ul,.genindex-section ul{margin-bottom:0;margin-top:0}ol,ul{margin-bottom:1rem;margin-top:1rem;padding-left:1.2rem}ol li>p:first-child,ul li>p:first-child{margin-bottom:.25rem;margin-top:.25rem}ol li>p:last-child,ul li>p:last-child{margin-top:.25rem}ol li>ol,ol li>ul,ul li>ol,ul li>ul{margin-bottom:.5rem;margin-top:.5rem}ol.arabic{list-style:decimal}ol.loweralpha{list-style:lower-alpha}ol.upperalpha{list-style:upper-alpha}ol.lowerroman{list-style:lower-roman}ol.upperroman{list-style:upper-roman}.simple li>ol,.simple li>ul,.toctree-wrapper li>ol,.toctree-wrapper li>ul{margin-bottom:0;margin-top:0}.field-list dt,.option-list dt,dl.footnote dt,dl.glossary dt,dl.simple dt,dl:not([class]) dt{font-weight:500;margin-top:.25rem}.field-list dt+dt,.option-list dt+dt,dl.footnote dt+dt,dl.glossary dt+dt,dl.simple dt+dt,dl:not([class]) dt+dt{margin-top:0}.field-list dt .classifier:before,.option-list dt .classifier:before,dl.footnote dt .classifier:before,dl.glossary dt .classifier:before,dl.simple dt .classifier:before,dl:not([class]) dt .classifier:before{content:":";margin-left:.2rem;margin-right:.2rem}.field-list dd ul,.field-list dd>p:first-child,.option-list dd ul,.option-list dd>p:first-child,dl.footnote dd ul,dl.footnote dd>p:first-child,dl.glossary dd ul,dl.glossary dd>p:first-child,dl.simple dd ul,dl.simple dd>p:first-child,dl:not([class]) dd ul,dl:not([class]) dd>p:first-child{margin-top:.125rem}.field-list dd ul,.option-list dd ul,dl.footnote dd ul,dl.glossary dd ul,dl.simple dd ul,dl:not([class]) dd ul{margin-bottom:.125rem}.math-wrapper{overflow-x:auto;width:100%}div.math{position:relative;text-align:center}div.math .headerlink,div.math:focus .headerlink{display:none}div.math:hover .headerlink{display:inline-block}div.math span.eqno{position:absolute;right:.5rem;top:50%;transform:translateY(-50%);z-index:1}abbr[title]{cursor:help}.problematic{color:var(--color-problematic)}kbd:not(.compound){background-color:var(--color-background-secondary);border:1px solid var(--color-foreground-border);border-radius:.2rem;box-shadow:0 .0625rem 0 rgba(0,0,0,.2),inset 0 0 0 .125rem var(--color-background-primary);color:var(--color-foreground-primary);display:inline-block;font-size:var(--font-size--small--3);margin:0 .2rem;padding:0 .2rem;vertical-align:text-bottom}blockquote{background:var(--color-background-secondary);border-left:4px solid var(--color-background-border);margin-left:0;margin-right:0;padding:.5rem 1rem}blockquote .attribution{font-weight:600;text-align:right}blockquote.highlights,blockquote.pull-quote{font-size:1.25em}blockquote.epigraph,blockquote.pull-quote{border-left-width:0;border-radius:.5rem}blockquote.highlights{background:transparent;border-left-width:0}p .reference img{vertical-align:middle}p.rubric{font-size:1.125em;font-weight:700;line-height:1.25}dd p.rubric{font-size:var(--font-size--small);font-weight:inherit;line-height:inherit;text-transform:uppercase}article .sidebar{background-color:var(--color-background-secondary);border:1px solid var(--color-background-border);border-radius:.2rem;clear:right;float:right;margin-left:1rem;margin-right:0;width:30%}article .sidebar>*{padding-left:1rem;padding-right:1rem}article .sidebar>ol,article .sidebar>ul{padding-left:2.2rem}article .sidebar .sidebar-title{border-bottom:1px solid var(--color-background-border);font-weight:500;margin:0;padding:.5rem 1rem}[role=main] .table-wrapper.container{margin-bottom:.5rem;margin-top:1rem;overflow-x:auto;padding:.2rem .2rem .75rem;width:100%}table.docutils{border-collapse:collapse;border-radius:.2rem;border-spacing:0;box-shadow:0 .2rem .5rem rgba(0,0,0,.05),0 0 .0625rem rgba(0,0,0,.1)}table.docutils th{background:var(--color-table-header-background)}table.docutils td,table.docutils th{border-bottom:1px solid var(--color-table-border);border-left:1px solid var(--color-table-border);border-right:1px solid var(--color-table-border);padding:0 .25rem}table.docutils td p,table.docutils th p{margin:.25rem}table.docutils td:first-child,table.docutils th:first-child{border-left:none}table.docutils td:last-child,table.docutils th:last-child{border-right:none}table.docutils td.text-left,table.docutils th.text-left{text-align:left}table.docutils td.text-right,table.docutils th.text-right{text-align:right}table.docutils td.text-center,table.docutils th.text-center{text-align:center}:target{scroll-margin-top:2.5rem}@media(max-width:67em){:target{scroll-margin-top:calc(2.5rem + var(--header-height))}section>span:target{scroll-margin-top:calc(2.8rem + var(--header-height))}}.headerlink{font-weight:100;-webkit-user-select:none;-moz-user-select:none;user-select:none}.code-block-caption>.headerlink,dl dt>.headerlink,figcaption p>.headerlink,h1>.headerlink,h2>.headerlink,h3>.headerlink,h4>.headerlink,h5>.headerlink,h6>.headerlink,p.caption>.headerlink,table>caption>.headerlink{margin-left:.5rem;visibility:hidden}.code-block-caption:hover>.headerlink,dl dt:hover>.headerlink,figcaption p:hover>.headerlink,h1:hover>.headerlink,h2:hover>.headerlink,h3:hover>.headerlink,h4:hover>.headerlink,h5:hover>.headerlink,h6:hover>.headerlink,p.caption:hover>.headerlink,table>caption:hover>.headerlink{visibility:visible}.code-block-caption>.toc-backref,dl dt>.toc-backref,figcaption p>.toc-backref,h1>.toc-backref,h2>.toc-backref,h3>.toc-backref,h4>.toc-backref,h5>.toc-backref,h6>.toc-backref,p.caption>.toc-backref,table>caption>.toc-backref{color:inherit;text-decoration-line:none}figure:hover>figcaption>p>.headerlink,table:hover>caption>.headerlink{visibility:visible}:target>h1:first-of-type,:target>h2:first-of-type,:target>h3:first-of-type,:target>h4:first-of-type,:target>h5:first-of-type,:target>h6:first-of-type,span:target~h1:first-of-type,span:target~h2:first-of-type,span:target~h3:first-of-type,span:target~h4:first-of-type,span:target~h5:first-of-type,span:target~h6:first-of-type{background-color:var(--color-highlight-on-target)}:target>h1:first-of-type code.literal,:target>h2:first-of-type code.literal,:target>h3:first-of-type code.literal,:target>h4:first-of-type code.literal,:target>h5:first-of-type code.literal,:target>h6:first-of-type code.literal,span:target~h1:first-of-type code.literal,span:target~h2:first-of-type code.literal,span:target~h3:first-of-type code.literal,span:target~h4:first-of-type code.literal,span:target~h5:first-of-type code.literal,span:target~h6:first-of-type code.literal{background-color:transparent}.literal-block-wrapper:target .code-block-caption,.this-will-duplicate-information-and-it-is-still-useful-here li :target,figure:target,table:target>caption{background-color:var(--color-highlight-on-target)}dt:target{background-color:var(--color-highlight-on-target)!important}.footnote-reference:target,.footnote>dt:target+dd{background-color:var(--color-highlight-on-target)}.guilabel{background-color:var(--color-guilabel-background);border:1px solid var(--color-guilabel-border);border-radius:.5em;color:var(--color-guilabel-text);font-size:.9em;padding:0 .3em}footer{display:flex;flex-direction:column;font-size:var(--font-size--small);margin-top:2rem}.bottom-of-page{align-items:center;border-top:1px solid var(--color-background-border);color:var(--color-foreground-secondary);display:flex;justify-content:space-between;line-height:1.5;margin-top:1rem;padding-bottom:1rem;padding-top:1rem}@media(max-width:46em){.bottom-of-page{flex-direction:column-reverse;gap:.25rem;text-align:center}}.bottom-of-page .left-details{font-size:var(--font-size--small)}.bottom-of-page .right-details{display:flex;flex-direction:column;gap:.25rem;text-align:right}.bottom-of-page .icons{display:flex;font-size:1rem;gap:.25rem;justify-content:flex-end}.bottom-of-page .icons a{text-decoration:none}.bottom-of-page .icons img,.bottom-of-page .icons svg{font-size:1.125rem;height:1em;width:1em}.related-pages a{align-items:center;display:flex;text-decoration:none}.related-pages a:hover .page-info .title{color:var(--color-link);text-decoration:underline;text-decoration-color:var(--color-link-underline)}.related-pages a svg.furo-related-icon,.related-pages a svg.furo-related-icon>use{color:var(--color-foreground-border);flex-shrink:0;height:.75rem;margin:0 .5rem;width:.75rem}.related-pages a.next-page{clear:right;float:right;max-width:50%;text-align:right}.related-pages a.prev-page{clear:left;float:left;max-width:50%}.related-pages a.prev-page svg{transform:rotate(180deg)}.page-info{display:flex;flex-direction:column;overflow-wrap:anywhere}.next-page .page-info{align-items:flex-end}.page-info .context{align-items:center;color:var(--color-foreground-muted);display:flex;font-size:var(--font-size--small);padding-bottom:.1rem;text-decoration:none}ul.search{list-style:none;padding-left:0}ul.search li{border-bottom:1px solid var(--color-background-border);padding:1rem 0}[role=main] .highlighted{background-color:var(--color-highlighted-background);color:var(--color-highlighted-text)}.sidebar-brand{display:flex;flex-direction:column;flex-shrink:0;padding:var(--sidebar-item-spacing-vertical) var(--sidebar-item-spacing-horizontal);text-decoration:none}.sidebar-brand-text{color:var(--color-sidebar-brand-text);font-size:1.5rem;overflow-wrap:break-word}.sidebar-brand-text,.sidebar-logo-container{margin:var(--sidebar-item-spacing-vertical) 0}.sidebar-logo{display:block;margin:0 auto;max-width:100%}.sidebar-search-container{align-items:center;background:var(--color-sidebar-search-background);display:flex;margin-top:var(--sidebar-search-space-above);position:relative}.sidebar-search-container:focus-within,.sidebar-search-container:hover{background:var(--color-sidebar-search-background--focus)}.sidebar-search-container:before{background-color:var(--color-sidebar-search-icon);content:"";height:var(--sidebar-search-icon-size);left:var(--sidebar-item-spacing-horizontal);-webkit-mask-image:var(--icon-search);mask-image:var(--icon-search);position:absolute;width:var(--sidebar-search-icon-size)}.sidebar-search{background:transparent;border:none;border-bottom:1px solid var(--color-sidebar-search-border);border-top:1px solid var(--color-sidebar-search-border);box-sizing:border-box;color:var(--color-sidebar-search-foreground);padding:var(--sidebar-search-input-spacing-vertical) var(--sidebar-search-input-spacing-horizontal) var(--sidebar-search-input-spacing-vertical) calc(var(--sidebar-item-spacing-horizontal) + var(--sidebar-search-input-spacing-horizontal) + var(--sidebar-search-icon-size));width:100%;z-index:10}.sidebar-search:focus{outline:none}.sidebar-search::-moz-placeholder{font-size:var(--sidebar-search-input-font-size)}.sidebar-search::placeholder{font-size:var(--sidebar-search-input-font-size)}#searchbox .highlight-link{margin:0;padding:var(--sidebar-item-spacing-vertical) var(--sidebar-item-spacing-horizontal) 0;text-align:center}#searchbox .highlight-link a{color:var(--color-sidebar-search-icon);font-size:var(--font-size--small--2)}.sidebar-tree{font-size:var(--sidebar-item-font-size);margin-bottom:var(--sidebar-item-spacing-vertical);margin-top:var(--sidebar-tree-space-above)}.sidebar-tree ul{display:flex;flex-direction:column;list-style:none;margin-bottom:0;margin-top:0;padding:0}.sidebar-tree li{margin:0;position:relative}.sidebar-tree li>ul{margin-left:var(--sidebar-item-spacing-horizontal)}.sidebar-tree .icon,.sidebar-tree .reference{color:var(--color-sidebar-link-text)}.sidebar-tree .reference{box-sizing:border-box;display:inline-block;height:100%;line-height:var(--sidebar-item-line-height);overflow-wrap:anywhere;padding:var(--sidebar-item-spacing-vertical) var(--sidebar-item-spacing-horizontal);text-decoration:none;width:100%}.sidebar-tree .reference:hover{background:var(--color-sidebar-item-background--hover);color:var(--color-sidebar-link-text)}.sidebar-tree .reference.external:after{color:var(--color-sidebar-link-text);content:url("data:image/svg+xml;charset=utf-8,%3Csvg xmlns='http://www.w3.org/2000/svg' width='12' height='12' fill='none' stroke='%23607D8B' stroke-linecap='round' stroke-linejoin='round' stroke-width='1.5' viewBox='0 0 24 24'%3E%3Cpath stroke='none' d='M0 0h24v24H0z'/%3E%3Cpath d='M11 7H6a2 2 0 0 0-2 2v9a2 2 0 0 0 2 2h9a2 2 0 0 0 2-2v-5M10 14 20 4M15 4h5v5'/%3E%3C/svg%3E");margin:0 .25rem;vertical-align:middle}.sidebar-tree .current-page>.reference{font-weight:700}.sidebar-tree label{align-items:center;cursor:pointer;display:flex;height:var(--sidebar-item-height);justify-content:center;position:absolute;right:0;top:0;-webkit-user-select:none;-moz-user-select:none;user-select:none;width:var(--sidebar-expander-width)}.sidebar-tree .caption,.sidebar-tree :not(.caption)>.caption-text{color:var(--color-sidebar-caption-text);font-size:var(--sidebar-caption-font-size);font-weight:700;margin:var(--sidebar-caption-space-above) 0 0 0;padding:var(--sidebar-item-spacing-vertical) var(--sidebar-item-spacing-horizontal);text-transform:uppercase}.sidebar-tree li.has-children>.reference{padding-right:var(--sidebar-expander-width)}.sidebar-tree .toctree-l1>.reference,.sidebar-tree .toctree-l1>label .icon{color:var(--color-sidebar-link-text--top-level)}.sidebar-tree label{background:var(--color-sidebar-item-expander-background)}.sidebar-tree label:hover{background:var(--color-sidebar-item-expander-background--hover)}.sidebar-tree .current>.reference{background:var(--color-sidebar-item-background--current)}.sidebar-tree .current>.reference:hover{background:var(--color-sidebar-item-background--hover)}.toctree-checkbox{display:none;position:absolute}.toctree-checkbox~ul{display:none}.toctree-checkbox~label .icon svg{transform:rotate(90deg)}.toctree-checkbox:checked~ul{display:block}.toctree-checkbox:checked~label .icon svg{transform:rotate(-90deg)}.toc-title-container{padding:var(--toc-title-padding);padding-top:var(--toc-spacing-vertical)}.toc-title{color:var(--color-toc-title-text);font-size:var(--toc-title-font-size);padding-left:var(--toc-spacing-horizontal);text-transform:uppercase}.no-toc{display:none}.toc-tree-container{padding-bottom:var(--toc-spacing-vertical)}.toc-tree{border-left:1px solid var(--color-background-border);font-size:var(--toc-font-size);line-height:1.3;padding-left:calc(var(--toc-spacing-horizontal) - var(--toc-item-spacing-horizontal))}.toc-tree>ul>li:first-child{padding-top:0}.toc-tree>ul>li:first-child>ul{padding-left:0}.toc-tree>ul>li:first-child>a{display:none}.toc-tree ul{list-style-type:none;margin-bottom:0;margin-top:0;padding-left:var(--toc-item-spacing-horizontal)}.toc-tree li{padding-top:var(--toc-item-spacing-vertical)}.toc-tree li.scroll-current>.reference{color:var(--color-toc-item-text--active);font-weight:700}.toc-tree a.reference{color:var(--color-toc-item-text);overflow-wrap:anywhere;text-decoration:none}.toc-scroll{max-height:100vh;overflow-y:scroll}.contents:not(.this-will-duplicate-information-and-it-is-still-useful-here){background:rgba(255,0,0,.25);color:var(--color-problematic)}.contents:not(.this-will-duplicate-information-and-it-is-still-useful-here):before{content:"ERROR: Adding a table of contents in Furo-based documentation is unnecessary, and does not work well with existing styling. Add a 'this-will-duplicate-information-and-it-is-still-useful-here' class, if you want an escape hatch."}.text-align\:left>p{text-align:left}.text-align\:center>p{text-align:center}.text-align\:right>p{text-align:right} +/*# sourceMappingURL=furo.css.map*/ \ No newline at end of file diff --git a/_static/styles/furo.css.map b/_static/styles/furo.css.map new file mode 100644 index 0000000..3ecc371 --- /dev/null +++ b/_static/styles/furo.css.map @@ -0,0 +1 @@ +{"version":3,"file":"styles/furo.css","mappings":"AAAA,2EAA2E,CAU3E,KACE,gBAAiB,CACjB,6BACF,CASA,KACE,QACF,CAMA,KACE,aACF,CAOA,GACE,aAAc,CACd,cACF,CAUA,GACE,sBAAuB,CACvB,QAAS,CACT,gBACF,CAOA,IACE,+BAAiC,CACjC,aACF,CASA,EACE,4BACF,CAOA,YACE,kBAAmB,CACnB,yBAA0B,CAC1B,gCACF,CAMA,SAEE,kBACF,CAOA,cAGE,+BAAiC,CACjC,aACF,CAeA,QAEE,aAAc,CACd,aAAc,CACd,iBAAkB,CAClB,uBACF,CAEA,IACE,aACF,CAEA,IACE,SACF,CASA,IACE,iBACF,CAUA,sCAKE,mBAAoB,CACpB,cAAe,CACf,gBAAiB,CACjB,QACF,CAOA,aAEE,gBACF,CAOA,cAEE,mBACF,CAMA,gDAIE,yBACF,CAMA,wHAIE,iBAAkB,CAClB,SACF,CAMA,4GAIE,6BACF,CAMA,SACE,0BACF,CASA,OACE,qBAAsB,CACtB,aAAc,CACd,aAAc,CACd,cAAe,CACf,SAAU,CACV,kBACF,CAMA,SACE,uBACF,CAMA,SACE,aACF,CAOA,6BAEE,qBAAsB,CACtB,SACF,CAMA,kFAEE,WACF,CAOA,cACE,4BAA6B,CAC7B,mBACF,CAMA,yCACE,uBACF,CAOA,6BACE,yBAA0B,CAC1B,YACF,CASA,QACE,aACF,CAMA,QACE,iBACF,CAiBA,kBACE,YACF,CCvVA,aAcE,kEACE,uBAOF,WACE,iDAMF,kCACE,wBAEF,qCAEE,uBADA,uBACA,CAEF,SACE,wBAtBA,CCpBJ,iBAGE,qBAEA,sBACA,0BAFA,oBAHA,4BACA,oBAKA,6BAIA,2CAFA,mBACA,sCAFA,4BAGA,CAEF,gBACE,aCTF,KCGE,mHAEA,wGAEA,wCAAyC,CAEzC,wBAAyB,CACzB,wBAAyB,CACzB,4BAA6B,CAC7B,yBAA0B,CAC1B,2BAA4B,CAG5B,sDAAuD,CACvD,gDAAiD,CACjD,wDAAyD,CAGzD,0CAA2C,CAC3C,gDAAiD,CACjD,gDAAiD,CAKjD,gCAAiC,CACjC,sCAAuC,CAGvC,2CAA4C,CAG5C,uCAAwC,CCjCxC,+FAGA,uBAAwB,CAGxB,iCAAkC,CAClC,kCAAmC,CAEnC,+BAAgC,CAChC,sCAAuC,CACvC,sCAAuC,CACvC,qGAIA,mDAAoD,CAEpD,mCAAoC,CACpC,8CAA+C,CAC/C,gDAAiD,CACjD,kCAAmC,CACnC,6DAA8D,CAG9D,6BAA8B,CAC9B,6BAA8B,CAC9B,+BAAgC,CAChC,kCAAmC,CACnC,kCAAmC,CCPjC,+jBCYA,iqCAZF,iaCVA,8KAOA,4SAWA,4SAUA,0CACA,gEAGA,0CAGA,gEAGA,yCACA,+DAIA,4CACA,kEAGA,wCAUA,8DACA,uCAGA,4DACA,sCACA,2DAGA,4CACA,kEACA,uCAGA,6DACA,2GAGA,sHAEA,yFAEA,+CACA,+EAGA,4MAOA,gCACA,sHAIA,kCACA,uEACA,gEACA,4DACA,kEAGA,2DACA,sDACA,0CACA,8CACA,wGAGA,0BACA,iCAGA,+DACA,+BACA,sCACA,+DAEA,kGACA,oCACA,yDACA,sCL7HF,kCAEA,sDAIA,0CK2HE,kEAIA,oDACA,sDAGA,oCACA,oEAEA,0DACA,qDAIA,oDACA,6DAIA,iEAIA,2DAIA,2DAGA,4DACA,gEAIA,gEAEA,gFAEA,oNASA,qDLxKE,gFAGE,4DAIF,oEKkHF,yEAEA,6DAGA,0DAEA,uDACA,qDACA,wDAIA,6DAIA,yDACA,2DAIA,uCAGA,wCACA,sDAGA,+CAGA,6DAEA,iDACA,+DAEA,wDAEA,sEAMA,0DACA,sBACA,mEL9JI,wEAEA,iCACE,+BAMN,wEAGA,iCACE,kFAEA,uEAIF,gEACE,8BAGF,qEMvDA,sCAKA,wFAKA,iCAIA,0BAWA,iCACA,4BACA,mCAGA,+BAEA,sCACA,4BAEA,mCAEA,sCAKA,sDAIA,gCAEA,gEAQF,wCAME,sBACA,kCAKA,uBAEA,gEAIA,2BAIA,mCAEA,qCACA,iCAGE,+BACA,wEAEE,iCACA,kFAGF,6BACA,0CACF,kCAEE,8BACE,8BACA,qEAEE,sCACA,wFCnFN,iCAGF,2DAEE,4BACA,oCAGA,mIAGA,4HACE,gEAMJ,+CAGE,sBACA,yCAEF,uBAEE,sEAKA,gDACA,kEAGA,iFAGE,YAGF,EACA,4HAQF,mBACE,6BACA,mBACA,wCACA,wCACA,2CAIA,eAGA,mBAKE,mBAGA,CAJA,uCACA,iBAFF,gBACE,CAKE,mBACA,mBAGJ,oBAIF,+BAGE,kDACA,OADA,kBAGA,CAFA,gBAEA,mBACA,oBAEA,sCACA,OAGF,cAHE,WAGF,GAEE,oBACA,CAHF,gBAGE,CC9Gc,YDiHd,+CAIF,SAEE,CAPF,UACE,wBAMA,4BAEA,GAGA,uBACA,CAJA,yBAGA,CACA,iDAKA,2CAGA,2DAQA,iBACA,uCAGA,kEAKE,SAKJ,8BACE,yDACA,2BAEA,oBACA,8BAEA,yDAEE,4BAEJ,uCACE,CACA,iEAGA,CAEA,wCACE,uBACA,kDAEA,0DAEE,CAJF,oBAIE,0GAWN,aACE,CAHA,YAGA,4HASA,+CAGF,sBACE,WACA,WAQA,4BAFF,0CAEE,CARA,qCAsBA,CAdA,iBAEA,kBACE,aADF,4BACE,WAMF,2BAGF,qCAEE,CAXE,UAWF,+BAGA,uBAEA,SAEA,0CAIE,CANF,qCAEA,CAIE,2DACE,gBAIN,+CAIA,CAEA,kDAKE,CAPF,8BAEA,CAOE,YACA,CAjBI,2BAGN,CAHM,WAcJ,UAGA,CAEA,2GAIF,iCAGE,8BAIA,qBACA,oBACF,uBAOI,0CAIA,CATF,6DAKE,CALF,sBASE,qCAKF,CACE,cACA,CAFF,sBAEE,CACA,+BAEA,qBAEE,WAKN,aACE,sCAGA,mBAEA,6BAMA,kCACA,CAJA,sBACA,aAEA,CAJA,eACA,MAIA,2FAEA,UAGA,YACA,sBACE,8BAEA,CALF,aACA,WAIE,OACA,oBAEF,uBACE,WAEF,YAFE,UAEF,eAgBA,kBACE,CAhBA,qDAQF,qCAGF,CAGI,YACF,CAJF,2BAGI,CAEA,eACA,qBAGA,mEAEA,qBACA,8BAIA,kBADF,kBACE,yBAEJ,oCAGI,qDAIJ,+BAGI,oCAEA,+CAQF,4CACE,yBACF,2BAOE,sBACA,CAHA,WACA,CAFF,cACE,CAJA,YAGF,CAEE,SAEA,mBAGA,kDAEE,CAJF,cAEA,cAEE,sBAEA,mBADA,YACA,uBACA,mDACE,CADF,YACE,iDAEA,uCAEN,+DAOE,mBADF,sBACE,mBAGF,aACE,sCAIA,aADF,WACE,CAKF,SACE,CAHJ,kBAEE,CAJE,gBAEJ,CAHI,iBAMA,yFAKA,aACA,eACA,cElbJ,iBAEE,aADA,iBACA,6BAEA,kCAEA,SACA,UAIA,gCACA,CALA,SAEA,SAEA,CAJA,0EAEA,CAFA,OAKA,CAGA,mDACE,iBAGF,gCACE,CADF,UACE,aAEJ,iCAEE,CAFF,UAEE,wCAEA,WACA,WADA,UACA,CACA,4CAGA,MACA,CADA,KACA,wCACA,UAGA,CAJA,UAIA,6DAUA,0CACE,CAFF,mBAEE,wEACA,CAVA,YACA,CAMF,mBAJE,OAOA,gBAJJ,gCACE,CANE,cACA,CAHA,oBACA,CAGA,QAGJ,CAII,0BACA,CADA,UACA,wCAEJ,kBACE,0DACA,gCACE,kBACA,CADA,YACA,oEACA,2CAMF,mDAII,CALN,YACE,CANE,cAKJ,CACE,iBAII,kEACA,yCACE,kDACA,yDACE,+CACA,uBANN,CAMM,+BANN,uCACE,qDACA,4BAEE,mBADA,0CACA,CADA,qBACA,0DACE,wCACA,sGALJ,oCACA,sBACE,kBAFF,UAEE,2CACA,wFACE,cACA,kEANN,uBACE,iDACA,CADA,UACA,0DACE,wDAEE,iEACA,qEANN,sCACE,CAGE,iBAHF,gBAGE,qBACE,CAJJ,uBACA,gDACE,wDACA,6DAHF,2CACA,CADA,gBACA,eACE,CAGE,sBANN,8BACE,CAII,iBAFF,4DACA,WACE,YADF,uCACE,6EACA,2BANN,8CACE,kDACA,0CACE,8BACA,yFACE,sBACA,sFALJ,mEACA,sBACE,kEACA,6EACE,uCACA,kEALJ,qGAEE,kEACA,6EACE,uCACA,kEALJ,8CACA,uDACE,sEACA,2EACE,sCACA,iEALJ,mGACA,qCACE,oDACA,0DACE,6GACA,gDAGR,yDCrEA,sEACE,CACA,6GACE,gEACF,iGAIF,wFACE,qDAGA,mGAEE,2CAEF,4FACE,gCACF,wGACE,8DAEE,6FAIA,iJAKN,6GACE,gDAKF,yDACA,qCAGA,6BACA,kBACA,qDAKA,oCAEA,+DAGA,2CAGE,oDAIA,oEAEE,qBAGJ,wDAEE,uCAEF,kEAGA,8CAEA,uDAIF,gEAIE,6BACA,gEAIA,+CACE,0EAIF,sDAEE,+DAGF,sCACA,8BACE,oCAEJ,wBACE,4FAEE,gBAEJ,yGAGI,kBAGJ,CCnHE,2MCFF,oBAGE,wGAKA,iCACE,CADF,wBACE,8GAQA,mBCjBJ,2GAIE,mBACA,6HAMA,YACE,mIAYF,eACA,CAHF,YAGE,4FAGE,8BAKF,uBAkBE,sCACA,CADA,qBAbA,wCAIA,CALF,8BACE,CADF,gBAKE,wCACA,CAOA,kDACA,CACA,kCAKF,6BAGA,4CACE,kDACA,eAGF,cACE,aACA,iBACA,yBACA,8BACA,WAGJ,2BACE,cAGA,+BACA,CAHA,eAGA,wCACA,YACA,iBACA,uEAGA,0BACA,2CAEA,8EAGI,qBACA,CAFF,kBAEE,kBAGN,0CAGE,mCAGA,4BAIA,gEACE,qCACA,8BAEA,gBACA,+CACA,iCAEF,iCAEE,gEACA,qCAGF,8BAEE,+BAIA,yCAEE,qBADA,gBACA,yBAKF,eACA,CAFF,YACE,CACA,iBACA,qDAEA,mDCvIJ,2FAOE,iCACA,CAEA,eACA,CAHA,kBAEA,CAFA,wBAGA,8BACA,eACE,CAFF,YAEE,0BACA,8CAGA,oBACE,oCAGA,kBACE,8DAEA,iBAEN,UACE,8BAIJ,+CAEE,qDAEF,kDAIE,YAEF,CAFE,YAEF,CCpCE,mFADA,kBAKE,CAJF,IAGA,aACE,mCAGA,iDACE,+BAEJ,wBAEE,mBAMA,6CAEF,CAJE,mBAEA,CAEF,kCAGE,CARF,kBACE,CAHA,eAUA,YACA,mBACA,CADA,UACA,wCC9BF,oBDkCE,wBCnCJ,uCACE,+BACA,+DACA,sBAGA,qBCDA,6CAIE,CAPF,uBAGA,CDGE,oBACF,yDAEE,CCDE,2CAGF,CAJA,kCACE,CDJJ,YACE,CAIA,eCTF,CDKE,uBCMA,gCACE,YAEF,oCAEE,wBACA,0BAIF,iBAEA,cADF,UACE,uBAEA,iCAEA,wCAEA,6CAMA,CAYF,gCATI,4BASJ,CAZE,mCAEE,iCAUJ,4BAGE,4DADA,+BACA,CAHF,qBAGE,sCACE,OAEF,iBAHA,SAGA,iHACE,2DAKF,CANA,8EAMA,uSAEE,kBAEF,+FACE,yCCjEJ,WACA,yBAGA,uBACA,gBAEA,uCAIA,CAJA,iCAIA,uCAGA,UACE,gBACA,qBAEA,0CClBJ,gBACE,KAGF,qBACE,YAGF,CAHE,cAGF,gCAEE,mBACA,iEAEA,oCACA,wCAEA,sBACA,WAEA,CAFA,YAEA,8EAEA,mCAFA,iBAEA,6BAIA,wEAKA,sDAIE,CARF,mDAIA,CAIE,cAEF,8CAIA,oBAFE,iBAEF,8CAGE,eAEF,CAFE,YAEF,OAEE,kBAGJ,CAJI,eACA,CAFF,mBAKF,yCCjDE,oBACA,CAFA,iBAEA,uCAKE,iBACA,qCAGA,mBCZJ,CDWI,gBCXJ,6BAEE,eACA,sBAGA,eAEA,sBACA,oDACA,iGAMA,gBAFE,YAEF,8FAME,iJCnBF,YACA,gNAWE,gDAEF,iSAaE,kBACE,gHAKF,oCACE,eACF,CADE,UACF,8CACE,gDACF,wCACE,oBCxCJ,oBAEF,6BACE,QACE,kDAGF,yBACE,kDAmBA,kDAEF,CAhBA,+CAaA,CAbA,oBAaA,0FACE,CADF,gGAfF,cACE,gBACA,CAaA,0BAGA,mQACE,gBAGF,oMACE,iBACA,CAFF,eACE,CADF,gBAEE,aAGJ,iCAEE,CAFF,wCAEE,wBAUE,+VAIE,uEAHA,2BAGA,wXAKJ,iDAGF,CARM,+CACE,iDAIN,CALI,gBAQN,mHACE,gBAGF,2DACE,0EAOA,0EAGF,gBAEE,6DC/EA,kDACA,gCACA,qDAGA,qBACA,qDCFA,cACA,eAEA,yBAGF,sBAEE,iBACA,sNAWA,iBACE,kBACA,wRAgBA,kBAEA,iOAgBA,uCACE,uEAEA,kBAEF,qUAuBE,iDAIJ,CACA,geCxFF,4BAEE,CAQA,6JACA,iDAIA,sEAGA,mDAOF,iDAGE,4DAIA,8CACA,qDAEE,eAFF,cAEE,oBAEF,uBAFE,kCAGA,eACA,iBACA,mBAIA,mDACA,CAHA,uCAEA,CAJA,0CACA,CAIA,gBAJA,gBACA,oBADA,gBAIA,wBAEJ,gBAGE,6BACA,YAHA,iBAGA,gCACA,iEAEA,6CACA,sDACA,0BADA,wBACA,0BACA,oIAIA,mBAFA,YAEA,qBACA,0CAIE,uBAEF,CAHA,yBACE,CAEF,iDACE,mFAKJ,oCACE,CANE,aAKJ,CACE,qEAIA,YAFA,WAEA,CAHA,aACA,CAEA,gBACE,4BACA,sBADA,aACA,gCAMF,oCACA,yDACA,2CAEA,qBAGE,kBAEA,CACA,mCAIF,CARE,YACA,CAOF,iCAEE,CAPA,oBACA,CAQA,oBACE,uDAEJ,sDAGA,CAHA,cAGA,0BACE,oDAIA,oCACA,4BACA,sBAGA,cAEA,oFAGA,sBAEA,yDACE,CAIF,iBAJE,wBAIF,6CAHE,6CAKA,eACA,aACA,CADA,cACA,yCAGJ,kBACE,CAKA,iDAEA,CARF,aACE,4CAGA,kBAIA,wEAGA,wDAGA,kCAOA,iDAGA,CAPF,WAEE,sCAEA,CAJF,2CACE,CAMA,qCACA,+BARF,kBACE,qCAOA,iBAsBA,sBACE,CAvBF,WAKA,CACE,0DAIF,CALA,uDACE,CANF,sBAqBA,4CACA,CALA,gRAIA,YAEE,6CAEN,mCAEE,+CASA,6EAIA,4BChNA,SDmNA,qFCnNA,gDACA,sCAGA,qCACA,sDACA,CAKA,kDAGA,CARA,0CAQA,kBAGA,YACA,sBACA,iBAFA,gBADF,YACE,CAHA,SAKA,kBAEA,SAFA,iBAEA,uEAGA,CAEE,6CAFF,oCAgBI,CAdF,yBACE,qBACF,CAGF,oBACE,CAIF,WACE,CALA,2CAGA,uBACF,CACE,mFAGE,CALF,qBAEA,UAGE,gCAIF,sDAEA,CALE,oCAKF,yCC7CJ,oCACE,CD+CA,yXAQE,sCCrDJ,wCAGA,oCACE","sources":["webpack:///./node_modules/normalize.css/normalize.css","webpack:///./src/furo/assets/styles/base/_print.sass","webpack:///./src/furo/assets/styles/base/_screen-readers.sass","webpack:///./src/furo/assets/styles/base/_theme.sass","webpack:///./src/furo/assets/styles/variables/_fonts.scss","webpack:///./src/furo/assets/styles/variables/_spacing.scss","webpack:///./src/furo/assets/styles/variables/_icons.scss","webpack:///./src/furo/assets/styles/variables/_admonitions.scss","webpack:///./src/furo/assets/styles/variables/_colors.scss","webpack:///./src/furo/assets/styles/base/_typography.sass","webpack:///./src/furo/assets/styles/_scaffold.sass","webpack:///./src/furo/assets/styles/variables/_layout.scss","webpack:///./src/furo/assets/styles/content/_admonitions.sass","webpack:///./src/furo/assets/styles/content/_api.sass","webpack:///./src/furo/assets/styles/content/_blocks.sass","webpack:///./src/furo/assets/styles/content/_captions.sass","webpack:///./src/furo/assets/styles/content/_code.sass","webpack:///./src/furo/assets/styles/content/_footnotes.sass","webpack:///./src/furo/assets/styles/content/_images.sass","webpack:///./src/furo/assets/styles/content/_indexes.sass","webpack:///./src/furo/assets/styles/content/_lists.sass","webpack:///./src/furo/assets/styles/content/_math.sass","webpack:///./src/furo/assets/styles/content/_misc.sass","webpack:///./src/furo/assets/styles/content/_rubrics.sass","webpack:///./src/furo/assets/styles/content/_sidebar.sass","webpack:///./src/furo/assets/styles/content/_tables.sass","webpack:///./src/furo/assets/styles/content/_target.sass","webpack:///./src/furo/assets/styles/content/_gui-labels.sass","webpack:///./src/furo/assets/styles/components/_footer.sass","webpack:///./src/furo/assets/styles/components/_sidebar.sass","webpack:///./src/furo/assets/styles/components/_table_of_contents.sass","webpack:///./src/furo/assets/styles/_shame.sass"],"sourcesContent":["/*! normalize.css v8.0.1 | MIT License | github.com/necolas/normalize.css */\n\n/* Document\n ========================================================================== */\n\n/**\n * 1. Correct the line height in all browsers.\n * 2. Prevent adjustments of font size after orientation changes in iOS.\n */\n\nhtml {\n line-height: 1.15; /* 1 */\n -webkit-text-size-adjust: 100%; /* 2 */\n}\n\n/* Sections\n ========================================================================== */\n\n/**\n * Remove the margin in all browsers.\n */\n\nbody {\n margin: 0;\n}\n\n/**\n * Render the `main` element consistently in IE.\n */\n\nmain {\n display: block;\n}\n\n/**\n * Correct the font size and margin on `h1` elements within `section` and\n * `article` contexts in Chrome, Firefox, and Safari.\n */\n\nh1 {\n font-size: 2em;\n margin: 0.67em 0;\n}\n\n/* Grouping content\n ========================================================================== */\n\n/**\n * 1. Add the correct box sizing in Firefox.\n * 2. Show the overflow in Edge and IE.\n */\n\nhr {\n box-sizing: content-box; /* 1 */\n height: 0; /* 1 */\n overflow: visible; /* 2 */\n}\n\n/**\n * 1. Correct the inheritance and scaling of font size in all browsers.\n * 2. Correct the odd `em` font sizing in all browsers.\n */\n\npre {\n font-family: monospace, monospace; /* 1 */\n font-size: 1em; /* 2 */\n}\n\n/* Text-level semantics\n ========================================================================== */\n\n/**\n * Remove the gray background on active links in IE 10.\n */\n\na {\n background-color: transparent;\n}\n\n/**\n * 1. Remove the bottom border in Chrome 57-\n * 2. Add the correct text decoration in Chrome, Edge, IE, Opera, and Safari.\n */\n\nabbr[title] {\n border-bottom: none; /* 1 */\n text-decoration: underline; /* 2 */\n text-decoration: underline dotted; /* 2 */\n}\n\n/**\n * Add the correct font weight in Chrome, Edge, and Safari.\n */\n\nb,\nstrong {\n font-weight: bolder;\n}\n\n/**\n * 1. Correct the inheritance and scaling of font size in all browsers.\n * 2. Correct the odd `em` font sizing in all browsers.\n */\n\ncode,\nkbd,\nsamp {\n font-family: monospace, monospace; /* 1 */\n font-size: 1em; /* 2 */\n}\n\n/**\n * Add the correct font size in all browsers.\n */\n\nsmall {\n font-size: 80%;\n}\n\n/**\n * Prevent `sub` and `sup` elements from affecting the line height in\n * all browsers.\n */\n\nsub,\nsup {\n font-size: 75%;\n line-height: 0;\n position: relative;\n vertical-align: baseline;\n}\n\nsub {\n bottom: -0.25em;\n}\n\nsup {\n top: -0.5em;\n}\n\n/* Embedded content\n ========================================================================== */\n\n/**\n * Remove the border on images inside links in IE 10.\n */\n\nimg {\n border-style: none;\n}\n\n/* Forms\n ========================================================================== */\n\n/**\n * 1. Change the font styles in all browsers.\n * 2. Remove the margin in Firefox and Safari.\n */\n\nbutton,\ninput,\noptgroup,\nselect,\ntextarea {\n font-family: inherit; /* 1 */\n font-size: 100%; /* 1 */\n line-height: 1.15; /* 1 */\n margin: 0; /* 2 */\n}\n\n/**\n * Show the overflow in IE.\n * 1. Show the overflow in Edge.\n */\n\nbutton,\ninput { /* 1 */\n overflow: visible;\n}\n\n/**\n * Remove the inheritance of text transform in Edge, Firefox, and IE.\n * 1. Remove the inheritance of text transform in Firefox.\n */\n\nbutton,\nselect { /* 1 */\n text-transform: none;\n}\n\n/**\n * Correct the inability to style clickable types in iOS and Safari.\n */\n\nbutton,\n[type=\"button\"],\n[type=\"reset\"],\n[type=\"submit\"] {\n -webkit-appearance: button;\n}\n\n/**\n * Remove the inner border and padding in Firefox.\n */\n\nbutton::-moz-focus-inner,\n[type=\"button\"]::-moz-focus-inner,\n[type=\"reset\"]::-moz-focus-inner,\n[type=\"submit\"]::-moz-focus-inner {\n border-style: none;\n padding: 0;\n}\n\n/**\n * Restore the focus styles unset by the previous rule.\n */\n\nbutton:-moz-focusring,\n[type=\"button\"]:-moz-focusring,\n[type=\"reset\"]:-moz-focusring,\n[type=\"submit\"]:-moz-focusring {\n outline: 1px dotted ButtonText;\n}\n\n/**\n * Correct the padding in Firefox.\n */\n\nfieldset {\n padding: 0.35em 0.75em 0.625em;\n}\n\n/**\n * 1. Correct the text wrapping in Edge and IE.\n * 2. Correct the color inheritance from `fieldset` elements in IE.\n * 3. Remove the padding so developers are not caught out when they zero out\n * `fieldset` elements in all browsers.\n */\n\nlegend {\n box-sizing: border-box; /* 1 */\n color: inherit; /* 2 */\n display: table; /* 1 */\n max-width: 100%; /* 1 */\n padding: 0; /* 3 */\n white-space: normal; /* 1 */\n}\n\n/**\n * Add the correct vertical alignment in Chrome, Firefox, and Opera.\n */\n\nprogress {\n vertical-align: baseline;\n}\n\n/**\n * Remove the default vertical scrollbar in IE 10+.\n */\n\ntextarea {\n overflow: auto;\n}\n\n/**\n * 1. Add the correct box sizing in IE 10.\n * 2. Remove the padding in IE 10.\n */\n\n[type=\"checkbox\"],\n[type=\"radio\"] {\n box-sizing: border-box; /* 1 */\n padding: 0; /* 2 */\n}\n\n/**\n * Correct the cursor style of increment and decrement buttons in Chrome.\n */\n\n[type=\"number\"]::-webkit-inner-spin-button,\n[type=\"number\"]::-webkit-outer-spin-button {\n height: auto;\n}\n\n/**\n * 1. Correct the odd appearance in Chrome and Safari.\n * 2. Correct the outline style in Safari.\n */\n\n[type=\"search\"] {\n -webkit-appearance: textfield; /* 1 */\n outline-offset: -2px; /* 2 */\n}\n\n/**\n * Remove the inner padding in Chrome and Safari on macOS.\n */\n\n[type=\"search\"]::-webkit-search-decoration {\n -webkit-appearance: none;\n}\n\n/**\n * 1. Correct the inability to style clickable types in iOS and Safari.\n * 2. Change font properties to `inherit` in Safari.\n */\n\n::-webkit-file-upload-button {\n -webkit-appearance: button; /* 1 */\n font: inherit; /* 2 */\n}\n\n/* Interactive\n ========================================================================== */\n\n/*\n * Add the correct display in Edge, IE 10+, and Firefox.\n */\n\ndetails {\n display: block;\n}\n\n/*\n * Add the correct display in all browsers.\n */\n\nsummary {\n display: list-item;\n}\n\n/* Misc\n ========================================================================== */\n\n/**\n * Add the correct display in IE 10+.\n */\n\ntemplate {\n display: none;\n}\n\n/**\n * Add the correct display in IE 10.\n */\n\n[hidden] {\n display: none;\n}\n","// This file contains styles for managing print media.\n\n////////////////////////////////////////////////////////////////////////////////\n// Hide elements not relevant to print media.\n////////////////////////////////////////////////////////////////////////////////\n@media print\n // Hide icon container.\n .content-icon-container\n display: none !important\n\n // Hide showing header links if hovering over when printing.\n .headerlink\n display: none !important\n\n // Hide mobile header.\n .mobile-header\n display: none !important\n\n // Hide navigation links.\n .related-pages\n display: none !important\n\n////////////////////////////////////////////////////////////////////////////////\n// Tweaks related to decolorization.\n////////////////////////////////////////////////////////////////////////////////\n@media print\n // Apply a border around code which no longer have a color background.\n .highlight\n border: 0.1pt solid var(--color-foreground-border)\n\n////////////////////////////////////////////////////////////////////////////////\n// Avoid page break in some relevant cases.\n////////////////////////////////////////////////////////////////////////////////\n@media print\n ul, ol, dl, a, table, pre, blockquote, p\n page-break-inside: avoid\n\n h1, h2, h3, h4, h5, h6, img, figure, caption\n page-break-inside: avoid\n page-break-after: avoid\n\n ul, ol, dl\n page-break-before: avoid\n",".visually-hidden\n position: absolute !important\n width: 1px !important\n height: 1px !important\n padding: 0 !important\n margin: -1px !important\n overflow: hidden !important\n clip: rect(0,0,0,0) !important\n white-space: nowrap !important\n border: 0 !important\n color: var(--color-foreground-primary)\n background: var(--color-background-primary)\n\n:-moz-focusring\n outline: auto\n","// This file serves as the \"skeleton\" of the theming logic.\n//\n// This contains the bulk of the logic for handling dark mode, color scheme\n// toggling and the handling of color-scheme-specific hiding of elements.\n\nbody\n @include fonts\n @include spacing\n @include icons\n @include admonitions\n @include default-admonition(#651fff, \"abstract\")\n @include default-topic(#14B8A6, \"pencil\")\n\n @include colors\n\n.only-light\n display: block !important\nhtml body .only-dark\n display: none !important\n\n// Ignore dark-mode hints if print media.\n@media not print\n // Enable dark-mode, if requested.\n body[data-theme=\"dark\"]\n @include colors-dark\n\n html & .only-light\n display: none !important\n .only-dark\n display: block !important\n\n // Enable dark mode, unless explicitly told to avoid.\n @media (prefers-color-scheme: dark)\n body:not([data-theme=\"light\"])\n @include colors-dark\n\n html & .only-light\n display: none !important\n .only-dark\n display: block !important\n\n//\n// Theme toggle presentation\n//\nbody[data-theme=\"auto\"]\n .theme-toggle svg.theme-icon-when-auto-light\n display: block\n\n @media (prefers-color-scheme: dark)\n .theme-toggle svg.theme-icon-when-auto-dark\n display: block\n .theme-toggle svg.theme-icon-when-auto-light\n display: none\n\nbody[data-theme=\"dark\"]\n .theme-toggle svg.theme-icon-when-dark\n display: block\n\nbody[data-theme=\"light\"]\n .theme-toggle svg.theme-icon-when-light\n display: block\n","// Fonts used by this theme.\n//\n// There are basically two things here -- using the system font stack and\n// defining sizes for various elements in %ages. We could have also used `em`\n// but %age is easier to reason about for me.\n\n@mixin fonts {\n // These are adapted from https://systemfontstack.com/\n --font-stack: -apple-system, BlinkMacSystemFont, Segoe UI, Helvetica, Arial,\n sans-serif, Apple Color Emoji, Segoe UI Emoji;\n --font-stack--monospace: \"SFMono-Regular\", Menlo, Consolas, Monaco,\n Liberation Mono, Lucida Console, monospace;\n --font-stack--headings: var(--font-stack);\n\n --font-size--normal: 100%;\n --font-size--small: 87.5%;\n --font-size--small--2: 81.25%;\n --font-size--small--3: 75%;\n --font-size--small--4: 62.5%;\n\n // Sidebar\n --sidebar-caption-font-size: var(--font-size--small--2);\n --sidebar-item-font-size: var(--font-size--small);\n --sidebar-search-input-font-size: var(--font-size--small);\n\n // Table of Contents\n --toc-font-size: var(--font-size--small--3);\n --toc-font-size--mobile: var(--font-size--normal);\n --toc-title-font-size: var(--font-size--small--4);\n\n // Admonitions\n //\n // These aren't defined in terms of %ages, since nesting these is permitted.\n --admonition-font-size: 0.8125rem;\n --admonition-title-font-size: 0.8125rem;\n\n // Code\n --code-font-size: var(--font-size--small--2);\n\n // API\n --api-font-size: var(--font-size--small);\n}\n","// Spacing for various elements on the page\n//\n// If the user wants to tweak things in a certain way, they are permitted to.\n// They also have to deal with the consequences though!\n\n@mixin spacing {\n // Header!\n --header-height: calc(\n var(--sidebar-item-line-height) + 4 * #{var(--sidebar-item-spacing-vertical)}\n );\n --header-padding: 0.5rem;\n\n // Sidebar\n --sidebar-tree-space-above: 1.5rem;\n --sidebar-caption-space-above: 1rem;\n\n --sidebar-item-line-height: 1rem;\n --sidebar-item-spacing-vertical: 0.5rem;\n --sidebar-item-spacing-horizontal: 1rem;\n --sidebar-item-height: calc(\n var(--sidebar-item-line-height) + 2 *#{var(--sidebar-item-spacing-vertical)}\n );\n\n --sidebar-expander-width: var(--sidebar-item-height); // be square\n\n --sidebar-search-space-above: 0.5rem;\n --sidebar-search-input-spacing-vertical: 0.5rem;\n --sidebar-search-input-spacing-horizontal: 0.5rem;\n --sidebar-search-input-height: 1rem;\n --sidebar-search-icon-size: var(--sidebar-search-input-height);\n\n // Table of Contents\n --toc-title-padding: 0.25rem 0;\n --toc-spacing-vertical: 1.5rem;\n --toc-spacing-horizontal: 1.5rem;\n --toc-item-spacing-vertical: 0.4rem;\n --toc-item-spacing-horizontal: 1rem;\n}\n","// Expose theme icons as CSS variables.\n\n$icons: (\n // Adapted from tabler-icons\n // url: https://tablericons.com/\n \"search\":\n url('data:image/svg+xml;charset=utf-8,'),\n // Factored out from mkdocs-material on 24-Aug-2020.\n // url: https://squidfunk.github.io/mkdocs-material/reference/admonitions/\n \"pencil\":\n url('data:image/svg+xml;charset=utf-8,'),\n \"abstract\":\n url('data:image/svg+xml;charset=utf-8,'),\n \"info\":\n url('data:image/svg+xml;charset=utf-8,'),\n \"flame\":\n url('data:image/svg+xml;charset=utf-8,'),\n \"question\":\n url('data:image/svg+xml;charset=utf-8,'),\n \"warning\":\n url('data:image/svg+xml;charset=utf-8,'),\n \"failure\":\n url('data:image/svg+xml;charset=utf-8,'),\n \"spark\":\n url('data:image/svg+xml;charset=utf-8,')\n);\n\n@mixin icons {\n @each $name, $glyph in $icons {\n --icon-#{$name}: #{$glyph};\n }\n}\n","// Admonitions\n\n// Structure of these is:\n// admonition-class: color \"icon-name\";\n//\n// The colors are translated into CSS variables below. The icons are\n// used directly in the main declarations to set the `mask-image` in\n// the title.\n\n// prettier-ignore\n$admonitions: (\n // Each of these has an reST directives for it.\n \"caution\": #ff9100 \"spark\",\n \"warning\": #ff9100 \"warning\",\n \"danger\": #ff5252 \"spark\",\n \"attention\": #ff5252 \"warning\",\n \"error\": #ff5252 \"failure\",\n \"hint\": #00c852 \"question\",\n \"tip\": #00c852 \"info\",\n \"important\": #00bfa5 \"flame\",\n \"note\": #00b0ff \"pencil\",\n \"seealso\": #448aff \"info\",\n \"admonition-todo\": #808080 \"pencil\"\n);\n\n@mixin default-admonition($color, $icon-name) {\n --color-admonition-title: #{$color};\n --color-admonition-title-background: #{rgba($color, 0.2)};\n\n --icon-admonition-default: var(--icon-#{$icon-name});\n}\n\n@mixin default-topic($color, $icon-name) {\n --color-topic-title: #{$color};\n --color-topic-title-background: #{rgba($color, 0.2)};\n\n --icon-topic-default: var(--icon-#{$icon-name});\n}\n\n@mixin admonitions {\n @each $name, $values in $admonitions {\n --color-admonition-title--#{$name}: #{nth($values, 1)};\n --color-admonition-title-background--#{$name}: #{rgba(\n nth($values, 1),\n 0.2\n )};\n }\n}\n","// Colors used throughout this theme.\n//\n// The aim is to give the user more control. Thus, instead of hard-coding colors\n// in various parts of the stylesheet, the approach taken is to define all\n// colors as CSS variables and reusing them in all the places.\n//\n// `colors-dark` depends on `colors` being included at a lower specificity.\n\n@mixin colors {\n --color-problematic: #b30000;\n\n // Base Colors\n --color-foreground-primary: black; // for main text and headings\n --color-foreground-secondary: #5a5c63; // for secondary text\n --color-foreground-muted: #6b6f76; // for muted text\n --color-foreground-border: #878787; // for content borders\n\n --color-background-primary: white; // for content\n --color-background-secondary: #f8f9fb; // for navigation + ToC\n --color-background-hover: #efeff4ff; // for navigation-item hover\n --color-background-hover--transparent: #efeff400;\n --color-background-border: #eeebee; // for UI borders\n --color-background-item: #ccc; // for \"background\" items (eg: copybutton)\n\n // Announcements\n --color-announcement-background: #000000dd;\n --color-announcement-text: #eeebee;\n\n // Brand colors\n --color-brand-primary: #0a4bff;\n --color-brand-content: #2757dd;\n --color-brand-visited: #872ee0;\n\n // API documentation\n --color-api-background: var(--color-background-hover--transparent);\n --color-api-background-hover: var(--color-background-hover);\n --color-api-overall: var(--color-foreground-secondary);\n --color-api-name: var(--color-problematic);\n --color-api-pre-name: var(--color-problematic);\n --color-api-paren: var(--color-foreground-secondary);\n --color-api-keyword: var(--color-foreground-primary);\n\n --color-api-added: #21632c;\n --color-api-added-border: #38a84d;\n --color-api-changed: #046172;\n --color-api-changed-border: #06a1bc;\n --color-api-deprecated: #605706;\n --color-api-deprecated-border: #f0d90f;\n --color-api-removed: #b30000;\n --color-api-removed-border: #ff5c5c;\n\n --color-highlight-on-target: #ffffcc;\n\n // Inline code background\n --color-inline-code-background: var(--color-background-secondary);\n\n // Highlighted text (search)\n --color-highlighted-background: #ddeeff;\n --color-highlighted-text: var(--color-foreground-primary);\n\n // GUI Labels\n --color-guilabel-background: #ddeeff80;\n --color-guilabel-border: #bedaf580;\n --color-guilabel-text: var(--color-foreground-primary);\n\n // Admonitions!\n --color-admonition-background: transparent;\n\n //////////////////////////////////////////////////////////////////////////////\n // Everything below this should be one of:\n // - var(...)\n // - *-gradient(...)\n // - special literal values (eg: transparent, none)\n //////////////////////////////////////////////////////////////////////////////\n\n // Tables\n --color-table-header-background: var(--color-background-secondary);\n --color-table-border: var(--color-background-border);\n\n // Cards\n --color-card-border: var(--color-background-secondary);\n --color-card-background: transparent;\n --color-card-marginals-background: var(--color-background-secondary);\n\n // Header\n --color-header-background: var(--color-background-primary);\n --color-header-border: var(--color-background-border);\n --color-header-text: var(--color-foreground-primary);\n\n // Sidebar (left)\n --color-sidebar-background: var(--color-background-secondary);\n --color-sidebar-background-border: var(--color-background-border);\n\n --color-sidebar-brand-text: var(--color-foreground-primary);\n --color-sidebar-caption-text: var(--color-foreground-muted);\n --color-sidebar-link-text: var(--color-foreground-secondary);\n --color-sidebar-link-text--top-level: var(--color-brand-primary);\n\n --color-sidebar-item-background: var(--color-sidebar-background);\n --color-sidebar-item-background--current: var(\n --color-sidebar-item-background\n );\n --color-sidebar-item-background--hover: linear-gradient(\n 90deg,\n var(--color-background-hover--transparent) 0%,\n var(--color-background-hover) var(--sidebar-item-spacing-horizontal),\n var(--color-background-hover) 100%\n );\n\n --color-sidebar-item-expander-background: transparent;\n --color-sidebar-item-expander-background--hover: var(\n --color-background-hover\n );\n\n --color-sidebar-search-text: var(--color-foreground-primary);\n --color-sidebar-search-background: var(--color-background-secondary);\n --color-sidebar-search-background--focus: var(--color-background-primary);\n --color-sidebar-search-border: var(--color-background-border);\n --color-sidebar-search-icon: var(--color-foreground-muted);\n\n // Table of Contents (right)\n --color-toc-background: var(--color-background-primary);\n --color-toc-title-text: var(--color-foreground-muted);\n --color-toc-item-text: var(--color-foreground-secondary);\n --color-toc-item-text--hover: var(--color-foreground-primary);\n --color-toc-item-text--active: var(--color-brand-primary);\n\n // Actual page contents\n --color-content-foreground: var(--color-foreground-primary);\n --color-content-background: transparent;\n\n // Links\n --color-link: var(--color-brand-content);\n --color-link-underline: var(--color-background-border);\n --color-link--hover: var(--color-brand-content);\n --color-link-underline--hover: var(--color-foreground-border);\n\n --color-link--visited: var(--color-brand-visited);\n --color-link-underline--visited: var(--color-background-border);\n --color-link--visited--hover: var(--color-brand-visited);\n --color-link-underline--visited--hover: var(--color-foreground-border);\n}\n\n@mixin colors-dark {\n --color-problematic: #ee5151;\n\n // Base Colors\n --color-foreground-primary: #cfd0d0; // for main text and headings\n --color-foreground-secondary: #9ca0a5; // for secondary text\n --color-foreground-muted: #81868d; // for muted text\n --color-foreground-border: #666666; // for content borders\n\n --color-background-primary: #131416; // for content\n --color-background-secondary: #1a1c1e; // for navigation + ToC\n --color-background-hover: #1e2124ff; // for navigation-item hover\n --color-background-hover--transparent: #1e212400;\n --color-background-border: #303335; // for UI borders\n --color-background-item: #444; // for \"background\" items (eg: copybutton)\n\n // Announcements\n --color-announcement-background: #000000dd;\n --color-announcement-text: #eeebee;\n\n // Brand colors\n --color-brand-primary: #3d94ff;\n --color-brand-content: #5ca5ff;\n --color-brand-visited: #b27aeb;\n\n // Highlighted text (search)\n --color-highlighted-background: #083563;\n\n // GUI Labels\n --color-guilabel-background: #08356380;\n --color-guilabel-border: #13395f80;\n\n // API documentation\n --color-api-keyword: var(--color-foreground-secondary);\n --color-highlight-on-target: #333300;\n\n --color-api-added: #3db854;\n --color-api-added-border: #267334;\n --color-api-changed: #09b0ce;\n --color-api-changed-border: #056d80;\n --color-api-deprecated: #b1a10b;\n --color-api-deprecated-border: #6e6407;\n --color-api-removed: #ff7575;\n --color-api-removed-border: #b03b3b;\n\n // Admonitions\n --color-admonition-background: #18181a;\n\n // Cards\n --color-card-border: var(--color-background-secondary);\n --color-card-background: #18181a;\n --color-card-marginals-background: var(--color-background-hover);\n}\n","// This file contains the styling for making the content throughout the page,\n// including fonts, paragraphs, headings and spacing among these elements.\n\nbody\n font-family: var(--font-stack)\npre,\ncode,\nkbd,\nsamp\n font-family: var(--font-stack--monospace)\n\n// Make fonts look slightly nicer.\nbody\n -webkit-font-smoothing: antialiased\n -moz-osx-font-smoothing: grayscale\n\n// Line height from Bootstrap 4.1\narticle\n line-height: 1.5\n\n//\n// Headings\n//\nh1,\nh2,\nh3,\nh4,\nh5,\nh6\n line-height: 1.25\n font-family: var(--font-stack--headings)\n font-weight: bold\n\n border-radius: 0.5rem\n margin-top: 0.5rem\n margin-bottom: 0.5rem\n margin-left: -0.5rem\n margin-right: -0.5rem\n padding-left: 0.5rem\n padding-right: 0.5rem\n\n + p\n margin-top: 0\n\nh1\n font-size: 2.5em\n margin-top: 1.75rem\n margin-bottom: 1rem\nh2\n font-size: 2em\n margin-top: 1.75rem\nh3\n font-size: 1.5em\nh4\n font-size: 1.25em\nh5\n font-size: 1.125em\nh6\n font-size: 1em\n\nsmall\n opacity: 75%\n font-size: 80%\n\n// Paragraph\np\n margin-top: 0.5rem\n margin-bottom: 0.75rem\n\n// Horizontal rules\nhr.docutils\n height: 1px\n padding: 0\n margin: 2rem 0\n background-color: var(--color-background-border)\n border: 0\n\n.centered\n text-align: center\n\n// Links\na\n text-decoration: underline\n\n color: var(--color-link)\n text-decoration-color: var(--color-link-underline)\n\n &:visited\n color: var(--color-link--visited)\n text-decoration-color: var(--color-link-underline--visited)\n &:hover\n color: var(--color-link--visited--hover)\n text-decoration-color: var(--color-link-underline--visited--hover)\n\n &:hover\n color: var(--color-link--hover)\n text-decoration-color: var(--color-link-underline--hover)\n &.muted-link\n color: inherit\n &:hover\n color: var(--color-link--hover)\n text-decoration-color: var(--color-link-underline--hover)\n &:visited\n color: var(--color-link--visited--hover)\n text-decoration-color: var(--color-link-underline--visited--hover)\n","// This file contains the styles for the overall layouting of the documentation\n// skeleton, including the responsive changes as well as sidebar toggles.\n//\n// This is implemented as a mobile-last design, which isn't ideal, but it is\n// reasonably good-enough and I got pretty tired by the time I'd finished this\n// to move the rules around to fix this. Shouldn't take more than 3-4 hours,\n// if you know what you're doing tho.\n\n// HACK: Not all browsers account for the scrollbar width in media queries.\n// This results in horizontal scrollbars in the breakpoint where we go\n// from displaying everything to hiding the ToC. We accomodate for this by\n// adding a bit of padding to the TOC drawer, disabling the horizontal\n// scrollbar and allowing the scrollbars to cover the padding.\n// https://www.456bereastreet.com/archive/201301/media_query_width_and_vertical_scrollbars/\n\n// HACK: Always having the scrollbar visible, prevents certain browsers from\n// causing the content to stutter horizontally between taller-than-viewport and\n// not-taller-than-viewport pages.\n\nhtml\n overflow-x: hidden\n overflow-y: scroll\n scroll-behavior: smooth\n\n.sidebar-scroll, .toc-scroll, article[role=main] *\n // Override Firefox scrollbar style\n scrollbar-width: thin\n scrollbar-color: var(--color-foreground-border) transparent\n\n // Override Chrome scrollbar styles\n &::-webkit-scrollbar\n width: 0.25rem\n height: 0.25rem\n &::-webkit-scrollbar-thumb\n background-color: var(--color-foreground-border)\n border-radius: 0.125rem\n\n//\n// Overalls\n//\nhtml,\nbody\n height: 100%\n color: var(--color-foreground-primary)\n background: var(--color-background-primary)\n\n.skip-to-content\n position: fixed\n padding: 1rem\n border-radius: 1rem\n left: 0.25rem\n top: 0.25rem\n z-index: 40\n background: var(--color-background-primary)\n color: var(--color-foreground-primary)\n\n transform: translateY(-200%)\n transition: transform 300ms ease-in-out\n\n &:focus-within\n transform: translateY(0%)\n\narticle\n color: var(--color-content-foreground)\n background: var(--color-content-background)\n overflow-wrap: break-word\n\n.page\n display: flex\n // fill the viewport for pages with little content.\n min-height: 100%\n\n.mobile-header\n width: 100%\n height: var(--header-height)\n background-color: var(--color-header-background)\n color: var(--color-header-text)\n border-bottom: 1px solid var(--color-header-border)\n\n // Looks like sub-script/super-script have this, and we need this to\n // be \"on top\" of those.\n z-index: 10\n\n // We don't show the header on large screens.\n display: none\n\n // Add shadow when scrolled\n &.scrolled\n border-bottom: none\n box-shadow: 0 0 0.2rem rgba(0, 0, 0, 0.1), 0 0.2rem 0.4rem rgba(0, 0, 0, 0.2)\n\n .header-center\n a\n color: var(--color-header-text)\n text-decoration: none\n\n.main\n display: flex\n flex: 1\n\n// Sidebar (left) also covers the entire left portion of screen.\n.sidebar-drawer\n box-sizing: border-box\n\n border-right: 1px solid var(--color-sidebar-background-border)\n background: var(--color-sidebar-background)\n\n display: flex\n justify-content: flex-end\n // These next two lines took me two days to figure out.\n width: calc((100% - #{$full-width}) / 2 + #{$sidebar-width})\n min-width: $sidebar-width\n\n// Scroll-along sidebars\n.sidebar-container,\n.toc-drawer\n box-sizing: border-box\n width: $sidebar-width\n\n.toc-drawer\n background: var(--color-toc-background)\n // See HACK described on top of this document\n padding-right: 1rem\n\n.sidebar-sticky,\n.toc-sticky\n position: sticky\n top: 0\n height: min(100%, 100vh)\n height: 100vh\n\n display: flex\n flex-direction: column\n\n.sidebar-scroll,\n.toc-scroll\n flex-grow: 1\n flex-shrink: 1\n\n overflow: auto\n scroll-behavior: smooth\n\n// Central items.\n.content\n padding: 0 $content-padding\n width: $content-width\n\n display: flex\n flex-direction: column\n justify-content: space-between\n\n.icon\n display: inline-block\n height: 1rem\n width: 1rem\n svg\n width: 100%\n height: 100%\n\n//\n// Accommodate announcement banner\n//\n.announcement\n background-color: var(--color-announcement-background)\n color: var(--color-announcement-text)\n\n height: var(--header-height)\n display: flex\n align-items: center\n overflow-x: auto\n & + .page\n min-height: calc(100% - var(--header-height))\n\n.announcement-content\n box-sizing: border-box\n padding: 0.5rem\n min-width: 100%\n white-space: nowrap\n text-align: center\n\n a\n color: var(--color-announcement-text)\n text-decoration-color: var(--color-announcement-text)\n\n &:hover\n color: var(--color-announcement-text)\n text-decoration-color: var(--color-link--hover)\n\n////////////////////////////////////////////////////////////////////////////////\n// Toggles for theme\n////////////////////////////////////////////////////////////////////////////////\n.no-js .theme-toggle-container // don't show theme toggle if there's no JS\n display: none\n\n.theme-toggle-container\n display: flex\n\n.theme-toggle\n display: flex\n cursor: pointer\n border: none\n padding: 0\n background: transparent\n\n.theme-toggle svg\n height: 1.25rem\n width: 1.25rem\n color: var(--color-foreground-primary)\n display: none\n\n.theme-toggle-header\n display: flex\n align-items: center\n justify-content: center\n\n////////////////////////////////////////////////////////////////////////////////\n// Toggles for elements\n////////////////////////////////////////////////////////////////////////////////\n.toc-overlay-icon, .nav-overlay-icon\n display: none\n cursor: pointer\n\n .icon\n color: var(--color-foreground-secondary)\n height: 1.5rem\n width: 1.5rem\n\n.toc-header-icon, .nav-overlay-icon\n // for when we set display: flex\n justify-content: center\n align-items: center\n\n.toc-content-icon\n height: 1.5rem\n width: 1.5rem\n\n.content-icon-container\n float: right\n display: flex\n margin-top: 1.5rem\n margin-left: 1rem\n margin-bottom: 1rem\n gap: 0.5rem\n\n .edit-this-page, .view-this-page\n svg\n color: inherit\n height: 1.25rem\n width: 1.25rem\n\n.sidebar-toggle\n position: absolute\n display: none\n// \n.sidebar-toggle[name=\"__toc\"]\n left: 20px\n.sidebar-toggle:checked\n left: 40px\n// \n\n.overlay\n position: fixed\n top: 0\n width: 0\n height: 0\n\n transition: width 0ms, height 0ms, opacity 250ms ease-out\n\n opacity: 0\n background-color: rgba(0, 0, 0, 0.54)\n.sidebar-overlay\n z-index: 20\n.toc-overlay\n z-index: 40\n\n// Keep things on top and smooth.\n.sidebar-drawer\n z-index: 30\n transition: left 250ms ease-in-out\n.toc-drawer\n z-index: 50\n transition: right 250ms ease-in-out\n\n// Show the Sidebar\n#__navigation:checked\n & ~ .sidebar-overlay\n width: 100%\n height: 100%\n opacity: 1\n & ~ .page\n .sidebar-drawer\n top: 0\n left: 0\n // Show the toc sidebar\n#__toc:checked\n & ~ .toc-overlay\n width: 100%\n height: 100%\n opacity: 1\n & ~ .page\n .toc-drawer\n top: 0\n right: 0\n\n////////////////////////////////////////////////////////////////////////////////\n// Back to top\n////////////////////////////////////////////////////////////////////////////////\n.back-to-top\n text-decoration: none\n\n display: none\n position: fixed\n left: 0\n top: 1rem\n padding: 0.5rem\n padding-right: 0.75rem\n border-radius: 1rem\n font-size: 0.8125rem\n\n background: var(--color-background-primary)\n box-shadow: 0 0.2rem 0.5rem rgba(0, 0, 0, 0.05), #6b728080 0px 0px 1px 0px\n\n z-index: 10\n\n margin-left: 50%\n transform: translateX(-50%)\n svg\n height: 1rem\n width: 1rem\n fill: currentColor\n display: inline-block\n\n span\n margin-left: 0.25rem\n\n .show-back-to-top &\n display: flex\n align-items: center\n\n////////////////////////////////////////////////////////////////////////////////\n// Responsive layouting\n////////////////////////////////////////////////////////////////////////////////\n// Make things a bit bigger on bigger screens.\n@media (min-width: $full-width + $sidebar-width)\n html\n font-size: 110%\n\n@media (max-width: $full-width)\n // Collapse \"toc\" into the icon.\n .toc-content-icon\n display: flex\n .toc-drawer\n position: fixed\n height: 100vh\n top: 0\n right: -$sidebar-width\n border-left: 1px solid var(--color-background-muted)\n .toc-tree\n border-left: none\n font-size: var(--toc-font-size--mobile)\n\n // Accomodate for a changed content width.\n .sidebar-drawer\n width: calc((100% - #{$full-width - $sidebar-width}) / 2 + #{$sidebar-width})\n\n@media (max-width: $content-padded-width + $sidebar-width)\n // Center the page\n .content\n margin-left: auto\n margin-right: auto\n padding: 0 $content-padding--small\n\n@media (max-width: $content-padded-width--small + $sidebar-width)\n // Collapse \"navigation\".\n .nav-overlay-icon\n display: flex\n .sidebar-drawer\n position: fixed\n height: 100vh\n width: $sidebar-width\n\n top: 0\n left: -$sidebar-width\n\n // Swap which icon is visible.\n .toc-header-icon, .theme-toggle-header\n display: flex\n .toc-content-icon, .theme-toggle-content\n display: none\n\n // Show the header.\n .mobile-header\n position: sticky\n top: 0\n display: flex\n justify-content: space-between\n align-items: center\n\n .header-left,\n .header-right\n display: flex\n height: var(--header-height)\n padding: 0 var(--header-padding)\n label\n height: 100%\n width: 100%\n user-select: none\n\n .nav-overlay-icon .icon,\n .theme-toggle svg\n height: 1.5rem\n width: 1.5rem\n\n // Add a scroll margin for the content\n :target\n scroll-margin-top: calc(var(--header-height) + 2.5rem)\n\n // Show back-to-top below the header\n .back-to-top\n top: calc(var(--header-height) + 0.5rem)\n\n // Accommodate for the header.\n .page\n flex-direction: column\n justify-content: center\n\n@media (max-width: $content-width + 2* $content-padding--small)\n // Content should respect window limits.\n .content\n width: 100%\n overflow-x: auto\n\n@media (max-width: $content-width)\n article[role=main] aside.sidebar\n float: none\n width: 100%\n margin: 1rem 0\n","// Overall Layout Variables\n//\n// Because CSS variables can't be used in media queries. The fact that this\n// makes the layout non-user-configurable is a good thing.\n$content-padding: 3em;\n$content-padding--small: 1em;\n$content-width: 46em;\n$sidebar-width: 15em;\n$content-padded-width: $content-width + 2 * $content-padding;\n$content-padded-width--small: $content-width + 2 * $content-padding--small;\n$full-width: $content-padded-width + 2 * $sidebar-width;\n","//\n// The design here is strongly inspired by mkdocs-material.\n.admonition, .topic\n margin: 1rem auto\n padding: 0 0.5rem 0.5rem 0.5rem\n\n background: var(--color-admonition-background)\n\n border-radius: 0.2rem\n box-shadow: 0 0.2rem 0.5rem rgba(0, 0, 0, 0.05), 0 0 0.0625rem rgba(0, 0, 0, 0.1)\n\n font-size: var(--admonition-font-size)\n\n overflow: hidden\n page-break-inside: avoid\n\n // First element should have no margin, since the title has it.\n > :nth-child(2)\n margin-top: 0\n\n // Last item should have no margin, since we'll control that w/ padding\n > :last-child\n margin-bottom: 0\n\n.admonition p.admonition-title,\np.topic-title\n position: relative\n margin: 0 -0.5rem 0.5rem\n padding-left: 2rem\n padding-right: .5rem\n padding-top: .4rem\n padding-bottom: .4rem\n\n font-weight: 500\n font-size: var(--admonition-title-font-size)\n line-height: 1.3\n\n // Our fancy icon\n &::before\n content: \"\"\n position: absolute\n left: 0.5rem\n width: 1rem\n height: 1rem\n\n// Default styles\np.admonition-title\n background-color: var(--color-admonition-title-background)\n &::before\n background-color: var(--color-admonition-title)\n mask-image: var(--icon-admonition-default)\n mask-repeat: no-repeat\n\np.topic-title\n background-color: var(--color-topic-title-background)\n &::before\n background-color: var(--color-topic-title)\n mask-image: var(--icon-topic-default)\n mask-repeat: no-repeat\n\n//\n// Variants\n//\n.admonition\n border-left: 0.2rem solid var(--color-admonition-title)\n\n @each $type, $value in $admonitions\n &.#{$type}\n border-left-color: var(--color-admonition-title--#{$type})\n > .admonition-title\n background-color: var(--color-admonition-title-background--#{$type})\n &::before\n background-color: var(--color-admonition-title--#{$type})\n mask-image: var(--icon-#{nth($value, 2)})\n\n.admonition-todo > .admonition-title\n text-transform: uppercase\n","// This file stylizes the API documentation (stuff generated by autodoc). It's\n// deeply nested due to how autodoc structures the HTML without enough classes\n// to select the relevant items.\n\n// API docs!\ndl[class]:not(.option-list):not(.field-list):not(.footnote):not(.glossary):not(.simple)\n // Tweak the spacing of all the things!\n dd\n margin-left: 2rem\n > :first-child\n margin-top: 0.125rem\n > :last-child\n margin-bottom: 0.75rem\n\n // This is used for the arguments\n .field-list\n margin-bottom: 0.75rem\n\n // \"Headings\" (like \"Parameters\" and \"Return\")\n > dt\n text-transform: uppercase\n font-size: var(--font-size--small)\n\n dd:empty\n margin-bottom: 0.5rem\n dd > ul\n margin-left: -1.2rem\n > li\n > p:nth-child(2)\n margin-top: 0\n // When the last-empty-paragraph follows a paragraph, it doesn't need\n // to augument the existing spacing.\n > p + p:last-child:empty\n margin-top: 0\n margin-bottom: 0\n\n // Colorize the elements\n > dt\n color: var(--color-api-overall)\n\n.sig:not(.sig-inline)\n font-weight: bold\n\n font-size: var(--api-font-size)\n font-family: var(--font-stack--monospace)\n\n margin-left: -0.25rem\n margin-right: -0.25rem\n padding-top: 0.25rem\n padding-bottom: 0.25rem\n padding-right: 0.5rem\n\n // These are intentionally em, to properly match the font size.\n padding-left: 3em\n text-indent: -2.5em\n\n border-radius: 0.25rem\n\n background: var(--color-api-background)\n transition: background 100ms ease-out\n\n &:hover\n background: var(--color-api-background-hover)\n\n // adjust the size of the [source] link on the right.\n a.reference\n .viewcode-link\n font-weight: normal\n width: 4.25rem\n\nem.property\n font-style: normal\n &:first-child\n color: var(--color-api-keyword)\n.sig-name\n color: var(--color-api-name)\n.sig-prename\n font-weight: normal\n color: var(--color-api-pre-name)\n.sig-paren\n color: var(--color-api-paren)\n.sig-param\n font-style: normal\n\ndiv.versionadded,\ndiv.versionchanged,\ndiv.deprecated,\ndiv.versionremoved\n border-left: 0.1875rem solid\n border-radius: 0.125rem\n\n padding-left: 0.75rem\n\n p\n margin-top: 0.125rem\n margin-bottom: 0.125rem\n\ndiv.versionadded\n border-color: var(--color-api-added-border)\n .versionmodified\n color: var(--color-api-added)\n\ndiv.versionchanged\n border-color: var(--color-api-changed-border)\n .versionmodified\n color: var(--color-api-changed)\n\ndiv.deprecated\n border-color: var(--color-api-deprecated-border)\n .versionmodified\n color: var(--color-api-deprecated)\n\ndiv.versionremoved\n border-color: var(--color-api-removed-border)\n .versionmodified\n color: var(--color-api-removed)\n\n// Align the [docs] and [source] to the right.\n.viewcode-link, .viewcode-back\n float: right\n text-align: right\n",".line-block\n margin-top: 0.5rem\n margin-bottom: 0.75rem\n .line-block\n margin-top: 0rem\n margin-bottom: 0rem\n padding-left: 1rem\n","// Captions\narticle p.caption,\ntable > caption,\n.code-block-caption\n font-size: var(--font-size--small)\n text-align: center\n\n// Caption above a TOCTree\n.toctree-wrapper.compound\n .caption, :not(.caption) > .caption-text\n font-size: var(--font-size--small)\n text-transform: uppercase\n\n text-align: initial\n margin-bottom: 0\n\n > ul\n margin-top: 0\n margin-bottom: 0\n","// Inline code\ncode.literal, .sig-inline\n background: var(--color-inline-code-background)\n border-radius: 0.2em\n // Make the font smaller, and use padding to recover.\n font-size: var(--font-size--small--2)\n padding: 0.1em 0.2em\n\n pre.literal-block &\n font-size: inherit\n padding: 0\n\n p &\n border: 1px solid var(--color-background-border)\n\n.sig-inline\n font-family: var(--font-stack--monospace)\n\n// Code and Literal Blocks\n$code-spacing-vertical: 0.625rem\n$code-spacing-horizontal: 0.875rem\n\n// Wraps every literal block + line numbers.\ndiv[class*=\" highlight-\"],\ndiv[class^=\"highlight-\"]\n margin: 1em 0\n display: flex\n\n .table-wrapper\n margin: 0\n padding: 0\n\npre\n margin: 0\n padding: 0\n overflow: auto\n\n // Needed to have more specificity than pygments' \"pre\" selector. :(\n article[role=\"main\"] .highlight &\n line-height: 1.5\n\n &.literal-block,\n .highlight &\n font-size: var(--code-font-size)\n padding: $code-spacing-vertical $code-spacing-horizontal\n\n // Make it look like all the other blocks.\n &.literal-block\n margin-top: 1rem\n margin-bottom: 1rem\n\n border-radius: 0.2rem\n background-color: var(--color-code-background)\n color: var(--color-code-foreground)\n\n// All code is always contained in this.\n.highlight\n width: 100%\n border-radius: 0.2rem\n\n // Make line numbers and prompts un-selectable.\n .gp, span.linenos\n user-select: none\n pointer-events: none\n\n // Expand the line-highlighting.\n .hll\n display: block\n margin-left: -$code-spacing-horizontal\n margin-right: -$code-spacing-horizontal\n padding-left: $code-spacing-horizontal\n padding-right: $code-spacing-horizontal\n\n/* Make code block captions be nicely integrated */\n.code-block-caption\n display: flex\n padding: $code-spacing-vertical $code-spacing-horizontal\n\n border-radius: 0.25rem\n border-bottom-left-radius: 0\n border-bottom-right-radius: 0\n font-weight: 300\n border-bottom: 1px solid\n\n background-color: var(--color-code-background)\n color: var(--color-code-foreground)\n border-color: var(--color-background-border)\n\n + div[class]\n margin-top: 0\n pre\n border-top-left-radius: 0\n border-top-right-radius: 0\n\n// When `html_codeblock_linenos_style` is table.\n.highlighttable\n width: 100%\n display: block\n tbody\n display: block\n\n tr\n display: flex\n\n // Line numbers\n td.linenos\n background-color: var(--color-code-background)\n color: var(--color-code-foreground)\n padding: $code-spacing-vertical $code-spacing-horizontal\n padding-right: 0\n border-top-left-radius: 0.2rem\n border-bottom-left-radius: 0.2rem\n\n .linenodiv\n padding-right: $code-spacing-horizontal\n font-size: var(--code-font-size)\n box-shadow: -0.0625rem 0 var(--color-foreground-border) inset\n\n // Actual code\n td.code\n padding: 0\n display: block\n flex: 1\n overflow: hidden\n\n .highlight\n border-top-left-radius: 0\n border-bottom-left-radius: 0\n\n// When `html_codeblock_linenos_style` is inline.\n.highlight\n span.linenos\n display: inline-block\n padding-left: 0\n padding-right: $code-spacing-horizontal\n margin-right: $code-spacing-horizontal\n box-shadow: -0.0625rem 0 var(--color-foreground-border) inset\n","// Inline Footnote Reference\n.footnote-reference\n font-size: var(--font-size--small--4)\n vertical-align: super\n\n// Definition list, listing the content of each note.\n// docutils <= 0.17\ndl.footnote.brackets\n font-size: var(--font-size--small)\n color: var(--color-foreground-secondary)\n\n display: grid\n grid-template-columns: max-content auto\n dt\n margin: 0\n > .fn-backref\n margin-left: 0.25rem\n\n &:after\n content: \":\"\n\n .brackets\n &:before\n content: \"[\"\n &:after\n content: \"]\"\n\n dd\n margin: 0\n padding: 0 1rem\n\n// docutils >= 0.18\naside.footnote\n font-size: var(--font-size--small)\n color: var(--color-foreground-secondary)\n\naside.footnote > span,\ndiv.citation > span\n float: left\n font-weight: 500\n padding-right: 0.25rem\n\naside.footnote > *:not(span),\ndiv.citation > p\n margin-left: 2rem\n","//\n// Figures\n//\nimg\n box-sizing: border-box\n max-width: 100%\n height: auto\n\narticle\n figure, .figure\n border-radius: 0.2rem\n\n margin: 0\n :last-child\n margin-bottom: 0\n\n .align-left\n float: left\n clear: left\n margin: 0 1rem 1rem\n\n .align-right\n float: right\n clear: right\n margin: 0 1rem 1rem\n\n .align-default,\n .align-center\n display: block\n text-align: center\n margin-left: auto\n margin-right: auto\n\n // WELL, table needs to be stylised like a table.\n table.align-default\n display: table\n text-align: initial\n",".genindex-jumpbox, .domainindex-jumpbox\n border-top: 1px solid var(--color-background-border)\n border-bottom: 1px solid var(--color-background-border)\n padding: 0.25rem\n\n.genindex-section, .domainindex-section\n h2\n margin-top: 0.75rem\n margin-bottom: 0.5rem\n ul\n margin-top: 0\n margin-bottom: 0\n","ul,\nol\n padding-left: 1.2rem\n\n // Space lists out like paragraphs\n margin-top: 1rem\n margin-bottom: 1rem\n // reduce margins within li.\n li\n > p:first-child\n margin-top: 0.25rem\n margin-bottom: 0.25rem\n\n > p:last-child\n margin-top: 0.25rem\n\n > ul,\n > ol\n margin-top: 0.5rem\n margin-bottom: 0.5rem\n\nol\n &.arabic\n list-style: decimal\n &.loweralpha\n list-style: lower-alpha\n &.upperalpha\n list-style: upper-alpha\n &.lowerroman\n list-style: lower-roman\n &.upperroman\n list-style: upper-roman\n\n// Don't space lists out when they're \"simple\" or in a `.. toctree::`\n.simple,\n.toctree-wrapper\n li\n > ul,\n > ol\n margin-top: 0\n margin-bottom: 0\n\n// Definition Lists\n.field-list,\n.option-list,\ndl:not([class]),\ndl.simple,\ndl.footnote,\ndl.glossary\n dt\n font-weight: 500\n margin-top: 0.25rem\n + dt\n margin-top: 0\n\n .classifier::before\n content: \":\"\n margin-left: 0.2rem\n margin-right: 0.2rem\n\n dd\n > p:first-child,\n ul\n margin-top: 0.125rem\n\n ul\n margin-bottom: 0.125rem\n",".math-wrapper\n width: 100%\n overflow-x: auto\n\ndiv.math\n position: relative\n text-align: center\n\n .headerlink,\n &:focus .headerlink\n display: none\n\n &:hover .headerlink\n display: inline-block\n\n span.eqno\n position: absolute\n right: 0.5rem\n top: 50%\n transform: translate(0, -50%)\n z-index: 1\n","// Abbreviations\nabbr[title]\n cursor: help\n\n// \"Problematic\" content, as identified by Sphinx\n.problematic\n color: var(--color-problematic)\n\n// Keyboard / Mouse \"instructions\"\nkbd:not(.compound)\n margin: 0 0.2rem\n padding: 0 0.2rem\n border-radius: 0.2rem\n border: 1px solid var(--color-foreground-border)\n color: var(--color-foreground-primary)\n vertical-align: text-bottom\n\n font-size: var(--font-size--small--3)\n display: inline-block\n\n box-shadow: 0 0.0625rem 0 rgba(0, 0, 0, 0.2), inset 0 0 0 0.125rem var(--color-background-primary)\n\n background-color: var(--color-background-secondary)\n\n// Blockquote\nblockquote\n border-left: 4px solid var(--color-background-border)\n background: var(--color-background-secondary)\n\n margin-left: 0\n margin-right: 0\n padding: 0.5rem 1rem\n\n .attribution\n font-weight: 600\n text-align: right\n\n &.pull-quote,\n &.highlights\n font-size: 1.25em\n\n &.epigraph,\n &.pull-quote\n border-left-width: 0\n border-radius: 0.5rem\n\n &.highlights\n border-left-width: 0\n background: transparent\n\n// Center align embedded-in-text images\np .reference img\n vertical-align: middle\n","p.rubric\n line-height: 1.25\n font-weight: bold\n font-size: 1.125em\n\n // For Numpy-style documentation that's got rubrics within it.\n // https://github.com/pradyunsg/furo/discussions/505\n dd &\n line-height: inherit\n font-weight: inherit\n\n font-size: var(--font-size--small)\n text-transform: uppercase\n","article .sidebar\n float: right\n clear: right\n width: 30%\n\n margin-left: 1rem\n margin-right: 0\n\n border-radius: 0.2rem\n background-color: var(--color-background-secondary)\n border: var(--color-background-border) 1px solid\n\n > *\n padding-left: 1rem\n padding-right: 1rem\n\n > ul, > ol // lists need additional padding, because bullets.\n padding-left: 2.2rem\n\n .sidebar-title\n margin: 0\n padding: 0.5rem 1rem\n border-bottom: var(--color-background-border) 1px solid\n\n font-weight: 500\n\n// TODO: subtitle\n// TODO: dedicated variables?\n","[role=main] .table-wrapper.container\n width: 100%\n overflow-x: auto\n margin-top: 1rem\n margin-bottom: 0.5rem\n padding: 0.2rem 0.2rem 0.75rem\n\ntable.docutils\n border-radius: 0.2rem\n border-spacing: 0\n border-collapse: collapse\n\n box-shadow: 0 0.2rem 0.5rem rgba(0, 0, 0, 0.05), 0 0 0.0625rem rgba(0, 0, 0, 0.1)\n\n th\n background: var(--color-table-header-background)\n\n td,\n th\n // Space things out properly\n padding: 0 0.25rem\n\n // Get the borders looking just-right.\n border-left: 1px solid var(--color-table-border)\n border-right: 1px solid var(--color-table-border)\n border-bottom: 1px solid var(--color-table-border)\n\n p\n margin: 0.25rem\n\n &:first-child\n border-left: none\n &:last-child\n border-right: none\n\n // MyST-parser tables set these classes for control of column alignment\n &.text-left\n text-align: left\n &.text-right\n text-align: right\n &.text-center\n text-align: center\n",":target\n scroll-margin-top: 2.5rem\n\n@media (max-width: $full-width - $sidebar-width)\n :target\n scroll-margin-top: calc(2.5rem + var(--header-height))\n\n // When a heading is selected\n section > span:target\n scroll-margin-top: calc(2.8rem + var(--header-height))\n\n// Permalinks\n.headerlink\n font-weight: 100\n user-select: none\n\nh1,\nh2,\nh3,\nh4,\nh5,\nh6,\ndl dt,\np.caption,\nfigcaption p,\ntable > caption,\n.code-block-caption\n > .headerlink\n margin-left: 0.5rem\n visibility: hidden\n &:hover > .headerlink\n visibility: visible\n\n // Don't change to link-like, if someone adds the contents directive.\n > .toc-backref\n color: inherit\n text-decoration-line: none\n\n// Figure and table captions are special.\nfigure:hover > figcaption > p > .headerlink,\ntable:hover > caption > .headerlink\n visibility: visible\n\n:target >, // Regular section[id] style anchors\nspan:target ~ // Non-regular span[id] style \"extra\" anchors\n h1,\n h2,\n h3,\n h4,\n h5,\n h6\n &:nth-of-type(1)\n background-color: var(--color-highlight-on-target)\n // .headerlink\n // visibility: visible\n code.literal\n background-color: transparent\n\ntable:target > caption,\nfigure:target\n background-color: var(--color-highlight-on-target)\n\n// Inline page contents\n.this-will-duplicate-information-and-it-is-still-useful-here li :target\n background-color: var(--color-highlight-on-target)\n\n// Code block permalinks\n.literal-block-wrapper:target .code-block-caption\n background-color: var(--color-highlight-on-target)\n\n// When a definition list item is selected\n//\n// There isn't really an alternative to !important here, due to the\n// high-specificity of API documentation's selector.\ndt:target\n background-color: var(--color-highlight-on-target) !important\n\n// When a footnote reference is selected\n.footnote > dt:target + dd,\n.footnote-reference:target\n background-color: var(--color-highlight-on-target)\n",".guilabel\n background-color: var(--color-guilabel-background)\n border: 1px solid var(--color-guilabel-border)\n color: var(--color-guilabel-text)\n\n padding: 0 0.3em\n border-radius: 0.5em\n font-size: 0.9em\n","// This file contains the styles used for stylizing the footer that's shown\n// below the content.\n\nfooter\n font-size: var(--font-size--small)\n display: flex\n flex-direction: column\n\n margin-top: 2rem\n\n// Bottom of page information\n.bottom-of-page\n display: flex\n align-items: center\n justify-content: space-between\n\n margin-top: 1rem\n padding-top: 1rem\n padding-bottom: 1rem\n\n color: var(--color-foreground-secondary)\n border-top: 1px solid var(--color-background-border)\n\n line-height: 1.5\n\n @media (max-width: $content-width)\n text-align: center\n flex-direction: column-reverse\n gap: 0.25rem\n\n .left-details\n font-size: var(--font-size--small)\n\n .right-details\n display: flex\n flex-direction: column\n gap: 0.25rem\n text-align: right\n\n .icons\n display: flex\n justify-content: flex-end\n gap: 0.25rem\n font-size: 1rem\n\n a\n text-decoration: none\n\n svg,\n img\n font-size: 1.125rem\n height: 1em\n width: 1em\n\n// Next/Prev page information\n.related-pages\n a\n display: flex\n align-items: center\n\n text-decoration: none\n &:hover .page-info .title\n text-decoration: underline\n color: var(--color-link)\n text-decoration-color: var(--color-link-underline)\n\n svg.furo-related-icon,\n svg.furo-related-icon > use\n flex-shrink: 0\n\n color: var(--color-foreground-border)\n\n width: 0.75rem\n height: 0.75rem\n margin: 0 0.5rem\n\n &.next-page\n max-width: 50%\n\n float: right\n clear: right\n text-align: right\n\n &.prev-page\n max-width: 50%\n\n float: left\n clear: left\n\n svg\n transform: rotate(180deg)\n\n.page-info\n display: flex\n flex-direction: column\n overflow-wrap: anywhere\n\n .next-page &\n align-items: flex-end\n\n .context\n display: flex\n align-items: center\n\n padding-bottom: 0.1rem\n\n color: var(--color-foreground-muted)\n font-size: var(--font-size--small)\n text-decoration: none\n","// This file contains the styles for the contents of the left sidebar, which\n// contains the navigation tree, logo, search etc.\n\n////////////////////////////////////////////////////////////////////////////////\n// Brand on top of the scrollable tree.\n////////////////////////////////////////////////////////////////////////////////\n.sidebar-brand\n display: flex\n flex-direction: column\n flex-shrink: 0\n\n padding: var(--sidebar-item-spacing-vertical) var(--sidebar-item-spacing-horizontal)\n text-decoration: none\n\n.sidebar-brand-text\n color: var(--color-sidebar-brand-text)\n overflow-wrap: break-word\n margin: var(--sidebar-item-spacing-vertical) 0\n font-size: 1.5rem\n\n.sidebar-logo-container\n margin: var(--sidebar-item-spacing-vertical) 0\n\n.sidebar-logo\n margin: 0 auto\n display: block\n max-width: 100%\n\n////////////////////////////////////////////////////////////////////////////////\n// Search\n////////////////////////////////////////////////////////////////////////////////\n.sidebar-search-container\n display: flex\n align-items: center\n margin-top: var(--sidebar-search-space-above)\n\n position: relative\n\n background: var(--color-sidebar-search-background)\n &:hover,\n &:focus-within\n background: var(--color-sidebar-search-background--focus)\n\n &::before\n content: \"\"\n position: absolute\n left: var(--sidebar-item-spacing-horizontal)\n width: var(--sidebar-search-icon-size)\n height: var(--sidebar-search-icon-size)\n\n background-color: var(--color-sidebar-search-icon)\n mask-image: var(--icon-search)\n\n.sidebar-search\n box-sizing: border-box\n\n border: none\n border-top: 1px solid var(--color-sidebar-search-border)\n border-bottom: 1px solid var(--color-sidebar-search-border)\n\n padding-top: var(--sidebar-search-input-spacing-vertical)\n padding-bottom: var(--sidebar-search-input-spacing-vertical)\n padding-right: var(--sidebar-search-input-spacing-horizontal)\n padding-left: calc(var(--sidebar-item-spacing-horizontal) + var(--sidebar-search-input-spacing-horizontal) + var(--sidebar-search-icon-size))\n\n width: 100%\n\n color: var(--color-sidebar-search-foreground)\n background: transparent\n z-index: 10\n\n &:focus\n outline: none\n\n &::placeholder\n font-size: var(--sidebar-search-input-font-size)\n\n//\n// Hide Search Matches link\n//\n#searchbox .highlight-link\n padding: var(--sidebar-item-spacing-vertical) var(--sidebar-item-spacing-horizontal) 0\n margin: 0\n text-align: center\n\n a\n color: var(--color-sidebar-search-icon)\n font-size: var(--font-size--small--2)\n\n////////////////////////////////////////////////////////////////////////////////\n// Structure/Skeleton of the navigation tree (left)\n////////////////////////////////////////////////////////////////////////////////\n.sidebar-tree\n font-size: var(--sidebar-item-font-size)\n margin-top: var(--sidebar-tree-space-above)\n margin-bottom: var(--sidebar-item-spacing-vertical)\n\n ul\n padding: 0\n margin-top: 0\n margin-bottom: 0\n\n display: flex\n flex-direction: column\n\n list-style: none\n\n li\n position: relative\n margin: 0\n\n > ul\n margin-left: var(--sidebar-item-spacing-horizontal)\n\n .icon\n color: var(--color-sidebar-link-text)\n\n .reference\n box-sizing: border-box\n color: var(--color-sidebar-link-text)\n\n // Fill the parent.\n display: inline-block\n line-height: var(--sidebar-item-line-height)\n text-decoration: none\n\n // Don't allow long words to cause wrapping.\n overflow-wrap: anywhere\n\n height: 100%\n width: 100%\n\n padding: var(--sidebar-item-spacing-vertical) var(--sidebar-item-spacing-horizontal)\n\n &:hover\n color: var(--color-sidebar-link-text)\n background: var(--color-sidebar-item-background--hover)\n\n // Add a nice little \"external-link\" arrow here.\n &.external::after\n content: url('data:image/svg+xml,')\n margin: 0 0.25rem\n vertical-align: middle\n color: var(--color-sidebar-link-text)\n\n // Make the current page reference bold.\n .current-page > .reference\n font-weight: bold\n\n label\n position: absolute\n top: 0\n right: 0\n height: var(--sidebar-item-height)\n width: var(--sidebar-expander-width)\n\n cursor: pointer\n user-select: none\n\n display: flex\n justify-content: center\n align-items: center\n\n .caption, :not(.caption) > .caption-text\n font-size: var(--sidebar-caption-font-size)\n color: var(--color-sidebar-caption-text)\n\n font-weight: bold\n text-transform: uppercase\n\n margin: var(--sidebar-caption-space-above) 0 0 0\n padding: var(--sidebar-item-spacing-vertical) var(--sidebar-item-spacing-horizontal)\n\n // If it has children, add a bit more padding to wrap the content to avoid\n // overlapping with the