<!DOCTYPE html> <html lang="en" data-content_root="../../" data-theme="auto"> <head> <meta charset="utf-8" /> <meta name="viewport" content="width=device-width, initial-scale=1.0" /><meta name="viewport" content="width=device-width, initial-scale=1" /> <title>Decoding sensor space data with generalization across time and conditions — MNE 1.10.0.dev78+ge65a55965 documentation</title> <script data-cfasync="false"> document.documentElement.dataset.mode = localStorage.getItem("mode") || "auto"; document.documentElement.dataset.theme = localStorage.getItem("theme") || "auto"; </script> <!-- this give us a css class that will be invisible only if js is disabled --> <noscript> <style> .pst-js-only { display: none !important; } </style> </noscript> <!-- Loaded before other Sphinx assets --> <link href="../../_static/styles/theme.css?digest=8878045cc6db502f8baf" rel="stylesheet" /> <link href="../../_static/styles/pydata-sphinx-theme.css?digest=8878045cc6db502f8baf" rel="stylesheet" /> <link rel="stylesheet" type="text/css" href="../../_static/pygments.css?v=8f2a1f02" /> <link rel="stylesheet" type="text/css" href="../../_static/graphviz.css?v=4ae1632d" /> <link rel="stylesheet" type="text/css" href="../../_static/plot_directive.css" /> <link rel="stylesheet" type="text/css" href="../../_static/copybutton.css?v=76b2166b" /> <link rel="stylesheet" type="text/css" href="../../_static/sg_gallery.css?v=d2d258e8" /> <link rel="stylesheet" type="text/css" href="../../_static/sg_gallery-binder.css?v=f4aeca0c" /> <link rel="stylesheet" type="text/css" href="../../_static/sg_gallery-dataframe.css?v=2082cf3c" /> <link rel="stylesheet" type="text/css" href="../../_static/sg_gallery-rendered-html.css?v=1277b6f3" /> <link rel="stylesheet" type="text/css" href="../../_static/sphinx-design.min.css?v=95c83b7e" /> <link rel="stylesheet" type="text/css" href="../../_static/style.css?v=1395d0ad" /> <!-- So that users can add custom icons --> <script src="../../_static/scripts/fontawesome.js?digest=8878045cc6db502f8baf"></script> <!-- Pre-loaded scripts that we'll load fully later --> <link rel="preload" as="script" href="../../_static/scripts/bootstrap.js?digest=8878045cc6db502f8baf" /> <link rel="preload" as="script" href="../../_static/scripts/pydata-sphinx-theme.js?digest=8878045cc6db502f8baf" /> <script src="../../_static/documentation_options.js?v=6ad8ce81"></script> <script src="../../_static/doctools.js?v=9bcbadda"></script> <script src="../../_static/sphinx_highlight.js?v=dc90522c"></script> <script src="../../_static/clipboard.min.js?v=a7894cd8"></script> <script src="../../_static/copybutton.js?v=fd10adb8"></script> <script src="../../_static/design-tabs.js?v=f930bc37"></script> <script async="async" src="https://www.googletagmanager.com/gtag/js?id=G-5TBCPCRB6X"></script> <script> window.dataLayer = window.dataLayer || []; function gtag(){ dataLayer.push(arguments); } gtag('js', new Date()); gtag('config', 'G-5TBCPCRB6X'); </script> <script>DOCUMENTATION_OPTIONS.pagename = 'auto_examples/decoding/decoding_time_generalization_conditions';</script> <script> DOCUMENTATION_OPTIONS.theme_version = '0.16.1'; DOCUMENTATION_OPTIONS.theme_switcher_json_url = 'https://mne.tools/dev/_static/versions.json'; DOCUMENTATION_OPTIONS.theme_switcher_version_match = '1.10'; DOCUMENTATION_OPTIONS.show_version_warning_banner = false; </script> <link rel="icon" href="../../_static/favicon.ico"/> <link rel="index" title="Index" href="../../genindex.html" /> <link rel="search" title="Search" href="../../search.html" /> <link rel="next" title="Analysis of evoked response using ICA and PCA reduction techniques" href="decoding_unsupervised_spatial_filter.html" /> <link rel="prev" title="Continuous Target Decoding with SPoC" href="decoding_spoc_CMC.html" /> <link rel="canonical" href="https://mne.tools/stable/auto_examples/decoding/decoding_time_generalization_conditions.html" /> <meta name="viewport" content="width=device-width, initial-scale=1"/> <meta name="docsearch:language" content="en"/> <meta name="docsearch:version" content="1.10" /> </head> <body data-bs-spy="scroll" data-bs-target=".bd-toc-nav" data-offset="180" data-bs-root-margin="0px 0px -60%" data-default-mode="auto"> <div id="pst-skip-link" class="skip-link d-print-none"><a href="#main-content">Skip to main content</a></div> <div id="pst-scroll-pixel-helper"></div> <dialog id="pst-search-dialog"> <form class="bd-search d-flex align-items-center" action="../../search.html" method="get"> <i class="fa-solid fa-magnifying-glass"></i> <input type="search" class="form-control" name="q" placeholder="Search the docs ..." aria-label="Search the docs ..." autocomplete="off" autocorrect="off" autocapitalize="off" spellcheck="false"/> <span class="search-button__kbd-shortcut"><kbd class="kbd-shortcut__modifier">Ctrl</kbd>+<kbd>K</kbd></span> </form> </dialog> <div class="pst-async-banner-revealer d-none"> <aside id="bd-header-version-warning" class="d-none d-print-none" aria-label="Version warning"></aside> </div> <header class="bd-header navbar navbar-expand-lg bd-navbar d-print-none"> <div class="bd-header__inner bd-page-width"> <button class="pst-navbar-icon sidebar-toggle primary-toggle" aria-label="Site navigation"> <span class="fa-solid fa-bars"></span> </button> <div class=" navbar-header-items__start"> <div class="navbar-item"> <a class="navbar-brand logo" href="../../index.html"> <img src="../../_static/mne_logo_small.svg" class="logo__image only-light" alt="MNE 1.10.0.dev78+ge65a55965 documentation - Home"/> <img src="../../_static/mne_logo_small.svg" class="logo__image only-dark pst-js-only" alt="MNE 1.10.0.dev78+ge65a55965 documentation - Home"/> </a></div> </div> <div class=" navbar-header-items"> <div class="me-auto navbar-header-items__center"> <div class="navbar-item"> <nav> <ul class="bd-navbar-elements navbar-nav"> <li class="nav-item "> <a class="nav-link nav-internal" href="../../install/index.html"> Install </a> </li> <li class="nav-item current active"> <a class="nav-link nav-internal" href="../../documentation/index.html"> Documentation </a> </li> <li class="nav-item "> <a class="nav-link nav-internal" href="../../api/python_reference.html"> API Reference </a> </li> <li class="nav-item "> <a class="nav-link nav-internal" href="../../help/index.html"> Get help </a> </li> <li class="nav-item "> <a class="nav-link nav-internal" href="../../development/index.html"> Development </a> </li> </ul> </nav></div> </div> <div class="navbar-header-items__end"> <div class="navbar-item navbar-persistent--container"> <button class="btn btn-sm pst-navbar-icon search-button search-button__button pst-js-only" title="Search" aria-label="Search" data-bs-placement="bottom" data-bs-toggle="tooltip"> <i class="fa-solid fa-magnifying-glass fa-lg"></i> </button> </div> <div class="navbar-item"> <button class="btn btn-sm nav-link pst-navbar-icon theme-switch-button pst-js-only" aria-label="Color mode" data-bs-title="Color mode" data-bs-placement="bottom" data-bs-toggle="tooltip"> <i class="theme-switch fa-solid fa-sun fa-lg" data-mode="light" title="Light"></i> <i class="theme-switch fa-solid fa-moon fa-lg" data-mode="dark" title="Dark"></i> <i class="theme-switch fa-solid fa-circle-half-stroke fa-lg" data-mode="auto" title="System Settings"></i> </button></div> <div class="navbar-item"> <div class="version-switcher__container dropdown pst-js-only"> <button id="pst-version-switcher-button-2" type="button" class="version-switcher__button btn btn-sm dropdown-toggle" data-bs-toggle="dropdown" aria-haspopup="listbox" aria-controls="pst-version-switcher-list-2" aria-label="Version switcher list" > Choose version <!-- this text may get changed later by javascript --> <span class="caret"></span> </button> <div id="pst-version-switcher-list-2" class="version-switcher__menu dropdown-menu list-group-flush py-0" role="listbox" aria-labelledby="pst-version-switcher-button-2"> <!-- dropdown will be populated by javascript on page load --> </div> </div></div> <div class="navbar-item"><ul class="navbar-icon-links" aria-label="External Links"> <li class="nav-item"> <a href="https://discord.gg/rKfvxTuATa" title="Discord" class="nav-link pst-navbar-icon" rel="noopener" target="_blank" data-bs-toggle="tooltip" data-bs-placement="bottom"><i class="fa-brands fa-discord fa-fw fa-lg" aria-hidden="true"></i> <span class="sr-only">Discord</span></a> </li> <li class="nav-item"> <a href="https://fosstodon.org/@mne" title="Mastodon" class="nav-link pst-navbar-icon" rel="me" target="_blank" data-bs-toggle="tooltip" data-bs-placement="bottom"><i class="fa-brands fa-mastodon fa-fw fa-lg" aria-hidden="true"></i> <span class="sr-only">Mastodon</span></a> </li> <li class="nav-item"> <a href="https://mne.discourse.group/" title="Forum" class="nav-link pst-navbar-icon" rel="noopener" target="_blank" data-bs-toggle="tooltip" data-bs-placement="bottom"><i class="fa-brands fa-discourse fa-fw fa-lg" aria-hidden="true"></i> <span class="sr-only">Forum</span></a> </li> <li class="nav-item"> <a href="https://github.com/mne-tools/mne-python" title="GitHub" class="nav-link pst-navbar-icon" rel="noopener" target="_blank" data-bs-toggle="tooltip" data-bs-placement="bottom"><i class="fa-brands fa-square-github fa-fw fa-lg" aria-hidden="true"></i> <span class="sr-only">GitHub</span></a> </li> </ul></div> </div> </div> <div class="navbar-persistent--mobile"> <button class="btn btn-sm pst-navbar-icon search-button search-button__button pst-js-only" title="Search" aria-label="Search" data-bs-placement="bottom" data-bs-toggle="tooltip"> <i class="fa-solid fa-magnifying-glass fa-lg"></i> </button> </div> <button class="pst-navbar-icon sidebar-toggle secondary-toggle" aria-label="On this page"> <span class="fa-solid fa-outdent"></span> </button> </div> </header> <div class="bd-container"> <div class="bd-container__inner bd-page-width"> <dialog id="pst-primary-sidebar-modal"></dialog> <div id="pst-primary-sidebar" class="bd-sidebar-primary bd-sidebar"> <div class="sidebar-header-items sidebar-primary__section"> <div class="sidebar-header-items__center"> <div class="navbar-item"> <nav> <ul class="bd-navbar-elements navbar-nav"> <li class="nav-item "> <a class="nav-link nav-internal" href="../../install/index.html"> Install </a> </li> <li class="nav-item current active"> <a class="nav-link nav-internal" href="../../documentation/index.html"> Documentation </a> </li> <li class="nav-item "> <a class="nav-link nav-internal" href="../../api/python_reference.html"> API Reference </a> </li> <li class="nav-item "> <a class="nav-link nav-internal" href="../../help/index.html"> Get help </a> </li> <li class="nav-item "> <a class="nav-link nav-internal" href="../../development/index.html"> Development </a> </li> </ul> </nav></div> </div> <div class="sidebar-header-items__end"> <div class="navbar-item"> <button class="btn btn-sm nav-link pst-navbar-icon theme-switch-button pst-js-only" aria-label="Color mode" data-bs-title="Color mode" data-bs-placement="bottom" data-bs-toggle="tooltip"> <i class="theme-switch fa-solid fa-sun fa-lg" data-mode="light" title="Light"></i> <i class="theme-switch fa-solid fa-moon fa-lg" data-mode="dark" title="Dark"></i> <i class="theme-switch fa-solid fa-circle-half-stroke fa-lg" data-mode="auto" title="System Settings"></i> </button></div> <div class="navbar-item"> <div class="version-switcher__container dropdown pst-js-only"> <button id="pst-version-switcher-button-3" type="button" class="version-switcher__button btn btn-sm dropdown-toggle" data-bs-toggle="dropdown" aria-haspopup="listbox" aria-controls="pst-version-switcher-list-3" aria-label="Version switcher list" > Choose version <!-- this text may get changed later by javascript --> <span class="caret"></span> </button> <div id="pst-version-switcher-list-3" class="version-switcher__menu dropdown-menu list-group-flush py-0" role="listbox" aria-labelledby="pst-version-switcher-button-3"> <!-- dropdown will be populated by javascript on page load --> </div> </div></div> <div class="navbar-item"><ul class="navbar-icon-links" aria-label="External Links"> <li class="nav-item"> <a href="https://discord.gg/rKfvxTuATa" title="Discord" class="nav-link pst-navbar-icon" rel="noopener" target="_blank" data-bs-toggle="tooltip" data-bs-placement="bottom"><i class="fa-brands fa-discord fa-fw fa-lg" aria-hidden="true"></i> <span class="sr-only">Discord</span></a> </li> <li class="nav-item"> <a href="https://fosstodon.org/@mne" title="Mastodon" class="nav-link pst-navbar-icon" rel="me" target="_blank" data-bs-toggle="tooltip" data-bs-placement="bottom"><i class="fa-brands fa-mastodon fa-fw fa-lg" aria-hidden="true"></i> <span class="sr-only">Mastodon</span></a> </li> <li class="nav-item"> <a href="https://mne.discourse.group/" title="Forum" class="nav-link pst-navbar-icon" rel="noopener" target="_blank" data-bs-toggle="tooltip" data-bs-placement="bottom"><i class="fa-brands fa-discourse fa-fw fa-lg" aria-hidden="true"></i> <span class="sr-only">Forum</span></a> </li> <li class="nav-item"> <a href="https://github.com/mne-tools/mne-python" title="GitHub" class="nav-link pst-navbar-icon" rel="noopener" target="_blank" data-bs-toggle="tooltip" data-bs-placement="bottom"><i class="fa-brands fa-square-github fa-fw fa-lg" aria-hidden="true"></i> <span class="sr-only">GitHub</span></a> </li> </ul></div> </div> </div> <div class="sidebar-primary-items__start sidebar-primary__section"> <div class="sidebar-primary-item"> <nav class="bd-docs-nav bd-links" aria-label="Section Navigation"> <p class="bd-links__title" role="heading" aria-level="1">Section Navigation</p> <div class="bd-toc-item navbar-nav"><ul class="current nav bd-sidenav"> <li class="toctree-l1 has-children"><a class="reference internal" href="../../auto_tutorials/index.html">Tutorials</a><details><summary><span class="toctree-toggle" role="presentation"><i class="fa-solid fa-chevron-down"></i></span></summary><ul> <li class="toctree-l2 has-children"><a class="reference internal" href="../../auto_tutorials/intro/index.html">Introductory tutorials</a><details><summary><span class="toctree-toggle" role="presentation"><i class="fa-solid fa-chevron-down"></i></span></summary><ul> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/intro/10_overview.html">Overview of MEG/EEG analysis with MNE-Python</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/intro/15_inplace.html">Modifying data in-place</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/intro/20_events_from_raw.html">Parsing events from raw data</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/intro/30_info.html">The Info data structure</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/intro/40_sensor_locations.html">Working with sensor locations</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/intro/50_configure_mne.html">Configuring MNE-Python</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/intro/70_report.html">Getting started with mne.Report</a></li> </ul> </details></li> <li class="toctree-l2 has-children"><a class="reference internal" href="../../auto_tutorials/io/index.html">Reading data for different recording systems</a><details><summary><span class="toctree-toggle" role="presentation"><i class="fa-solid fa-chevron-down"></i></span></summary><ul> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/io/10_reading_meg_data.html">Importing data from MEG devices</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/io/20_reading_eeg_data.html">Importing data from EEG devices</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/io/30_reading_fnirs_data.html">Importing data from fNIRS devices</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/io/60_ctf_bst_auditory.html">Working with CTF data: the Brainstorm auditory dataset</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/io/70_reading_eyetracking_data.html">Importing Data from Eyetracking devices</a></li> </ul> </details></li> <li class="toctree-l2 has-children"><a class="reference internal" href="../../auto_tutorials/raw/index.html">Working with continuous data</a><details><summary><span class="toctree-toggle" role="presentation"><i class="fa-solid fa-chevron-down"></i></span></summary><ul> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/raw/10_raw_overview.html">The Raw data structure: continuous data</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/raw/20_event_arrays.html">Working with events</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/raw/30_annotate_raw.html">Annotating continuous data</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/raw/40_visualize_raw.html">Built-in plotting methods for Raw objects</a></li> </ul> </details></li> <li class="toctree-l2 has-children"><a class="reference internal" href="../../auto_tutorials/preprocessing/index.html">Preprocessing</a><details><summary><span class="toctree-toggle" role="presentation"><i class="fa-solid fa-chevron-down"></i></span></summary><ul> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/preprocessing/10_preprocessing_overview.html">Overview of artifact detection</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/preprocessing/15_handling_bad_channels.html">Handling bad channels</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/preprocessing/20_rejecting_bad_data.html">Rejecting bad data spans and breaks</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/preprocessing/25_background_filtering.html">Background information on filtering</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/preprocessing/30_filtering_resampling.html">Filtering and resampling data</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/preprocessing/35_artifact_correction_regression.html">Repairing artifacts with regression</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/preprocessing/40_artifact_correction_ica.html">Repairing artifacts with ICA</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/preprocessing/45_projectors_background.html">Background on projectors and projections</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/preprocessing/50_artifact_correction_ssp.html">Repairing artifacts with SSP</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/preprocessing/55_setting_eeg_reference.html">Setting the EEG reference</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/preprocessing/59_head_positions.html">Extracting and visualizing subject head movement</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/preprocessing/60_maxwell_filtering_sss.html">Signal-space separation (SSS) and Maxwell filtering</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/preprocessing/70_fnirs_processing.html">Preprocessing functional near-infrared spectroscopy (fNIRS) data</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/preprocessing/80_opm_processing.html">Preprocessing optically pumped magnetometer (OPM) MEG data</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/preprocessing/90_eyetracking_data.html">Working with eye tracker data in MNE-Python</a></li> </ul> </details></li> <li class="toctree-l2 has-children"><a class="reference internal" href="../../auto_tutorials/epochs/index.html">Segmenting continuous data into epochs</a><details><summary><span class="toctree-toggle" role="presentation"><i class="fa-solid fa-chevron-down"></i></span></summary><ul> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/epochs/10_epochs_overview.html">The Epochs data structure: discontinuous data</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/epochs/15_baseline_regression.html">Regression-based baseline correction</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/epochs/20_visualize_epochs.html">Visualizing epoched data</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/epochs/30_epochs_metadata.html">Working with Epoch metadata</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/epochs/40_autogenerate_metadata.html">Auto-generating Epochs metadata</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/epochs/50_epochs_to_data_frame.html">Exporting Epochs to Pandas DataFrames</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/epochs/60_make_fixed_length_epochs.html">Divide continuous data into equally-spaced epochs</a></li> </ul> </details></li> <li class="toctree-l2 has-children"><a class="reference internal" href="../../auto_tutorials/evoked/index.html">Estimating evoked responses</a><details><summary><span class="toctree-toggle" role="presentation"><i class="fa-solid fa-chevron-down"></i></span></summary><ul> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/evoked/10_evoked_overview.html">The Evoked data structure: evoked/averaged data</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/evoked/20_visualize_evoked.html">Visualizing Evoked data</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/evoked/30_eeg_erp.html">EEG analysis - Event-Related Potentials (ERPs)</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/evoked/40_whitened.html">Plotting whitened data</a></li> </ul> </details></li> <li class="toctree-l2 has-children"><a class="reference internal" href="../../auto_tutorials/time-freq/index.html">Time-frequency analysis</a><details><summary><span class="toctree-toggle" role="presentation"><i class="fa-solid fa-chevron-down"></i></span></summary><ul> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/time-freq/10_spectrum_class.html">The Spectrum and EpochsSpectrum classes: frequency-domain data</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/time-freq/20_sensors_time_frequency.html">Frequency and time-frequency sensor analysis</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/time-freq/50_ssvep.html">Frequency-tagging: Basic analysis of an SSVEP/vSSR dataset</a></li> </ul> </details></li> <li class="toctree-l2 has-children"><a class="reference internal" href="../../auto_tutorials/forward/index.html">Forward models and source spaces</a><details><summary><span class="toctree-toggle" role="presentation"><i class="fa-solid fa-chevron-down"></i></span></summary><ul> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/forward/10_background_freesurfer.html">FreeSurfer MRI reconstruction</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/forward/20_source_alignment.html">Source alignment and coordinate frames</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/forward/25_automated_coreg.html">Using an automated approach to coregistration</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/forward/30_forward.html">Head model and forward computation</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/forward/35_eeg_no_mri.html">EEG forward operator with a template MRI</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/forward/50_background_freesurfer_mne.html">How MNE uses FreeSurfer’s outputs</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/forward/80_fix_bem_in_blender.html">Fixing BEM and head surfaces</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/forward/90_compute_covariance.html">Computing a covariance matrix</a></li> </ul> </details></li> <li class="toctree-l2 has-children"><a class="reference internal" href="../../auto_tutorials/inverse/index.html">Source localization and inverses</a><details><summary><span class="toctree-toggle" role="presentation"><i class="fa-solid fa-chevron-down"></i></span></summary><ul> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/inverse/10_stc_class.html">The SourceEstimate data structure</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/inverse/20_dipole_fit.html">Source localization with equivalent current dipole (ECD) fit</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/inverse/30_mne_dspm_loreta.html">Source localization with MNE, dSPM, sLORETA, and eLORETA</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/inverse/35_dipole_orientations.html">The role of dipole orientations in distributed source localization</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/inverse/40_mne_fixed_free.html">Computing various MNE solutions</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/inverse/50_beamformer_lcmv.html">Source reconstruction using an LCMV beamformer</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/inverse/60_visualize_stc.html">Visualize source time courses (stcs)</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/inverse/70_eeg_mri_coords.html">EEG source localization given electrode locations on an MRI</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/inverse/80_brainstorm_phantom_elekta.html">Brainstorm Elekta phantom dataset tutorial</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/inverse/85_brainstorm_phantom_ctf.html">Brainstorm CTF phantom dataset tutorial</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/inverse/90_phantom_4DBTi.html">4D Neuroimaging/BTi phantom dataset tutorial</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/inverse/95_phantom_KIT.html">KIT phantom dataset tutorial</a></li> </ul> </details></li> <li class="toctree-l2 has-children"><a class="reference internal" href="../../auto_tutorials/stats-sensor-space/index.html">Statistical analysis of sensor data</a><details><summary><span class="toctree-toggle" role="presentation"><i class="fa-solid fa-chevron-down"></i></span></summary><ul> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/stats-sensor-space/10_background_stats.html">Statistical inference</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/stats-sensor-space/20_erp_stats.html">Visualising statistical significance thresholds on EEG data</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/stats-sensor-space/40_cluster_1samp_time_freq.html">Non-parametric 1 sample cluster statistic on single trial power</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/stats-sensor-space/50_cluster_between_time_freq.html">Non-parametric between conditions cluster statistic on single trial power</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/stats-sensor-space/70_cluster_rmANOVA_time_freq.html">Mass-univariate twoway repeated measures ANOVA on single trial power</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/stats-sensor-space/75_cluster_ftest_spatiotemporal.html">Spatiotemporal permutation F-test on full sensor data</a></li> </ul> </details></li> <li class="toctree-l2 has-children"><a class="reference internal" href="../../auto_tutorials/stats-source-space/index.html">Statistical analysis of source estimates</a><details><summary><span class="toctree-toggle" role="presentation"><i class="fa-solid fa-chevron-down"></i></span></summary><ul> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/stats-source-space/20_cluster_1samp_spatiotemporal.html">Permutation t-test on source data with spatio-temporal clustering</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/stats-source-space/30_cluster_ftest_spatiotemporal.html">2 samples permutation test on source data with spatio-temporal clustering</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/stats-source-space/60_cluster_rmANOVA_spatiotemporal.html">Repeated measures ANOVA on source data with spatio-temporal clustering</a></li> </ul> </details></li> <li class="toctree-l2 has-children"><a class="reference internal" href="../../auto_tutorials/machine-learning/index.html">Machine learning models of neural activity</a><details><summary><span class="toctree-toggle" role="presentation"><i class="fa-solid fa-chevron-down"></i></span></summary><ul> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/machine-learning/30_strf.html">Spectro-temporal receptive field (STRF) estimation on continuous data</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/machine-learning/50_decoding.html">Decoding (MVPA)</a></li> </ul> </details></li> <li class="toctree-l2 has-children"><a class="reference internal" href="../../auto_tutorials/clinical/index.html">Clinical applications</a><details><summary><span class="toctree-toggle" role="presentation"><i class="fa-solid fa-chevron-down"></i></span></summary><ul> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/clinical/20_seeg.html">Working with sEEG data</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/clinical/30_ecog.html">Working with ECoG data</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/clinical/60_sleep.html">Sleep stage classification from polysomnography (PSG) data</a></li> </ul> </details></li> <li class="toctree-l2 has-children"><a class="reference internal" href="../../auto_tutorials/simulation/index.html">Simulation</a><details><summary><span class="toctree-toggle" role="presentation"><i class="fa-solid fa-chevron-down"></i></span></summary><ul> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/simulation/10_array_objs.html">Creating MNE-Python data structures from scratch</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/simulation/70_point_spread.html">Corrupt known signal with point spread</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/simulation/80_dics.html">DICS for power mapping</a></li> </ul> </details></li> <li class="toctree-l2 has-children"><a class="reference internal" href="../../auto_tutorials/visualization/index.html">Visualization tutorials</a><details><summary><span class="toctree-toggle" role="presentation"><i class="fa-solid fa-chevron-down"></i></span></summary><ul> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/visualization/10_publication_figure.html">Make figures more publication ready</a></li> <li class="toctree-l3"><a class="reference internal" href="../../auto_tutorials/visualization/20_ui_events.html">Using the event system to link figures</a></li> </ul> </details></li> </ul> </details></li> <li class="toctree-l1 current active has-children"><a class="reference internal" href="../index.html">Examples</a><details open="open"><summary><span class="toctree-toggle" role="presentation"><i class="fa-solid fa-chevron-down"></i></span></summary><ul class="current"> <li class="toctree-l2 has-children"><a class="reference internal" href="../io/index.html">Input/Output</a><details><summary><span class="toctree-toggle" role="presentation"><i class="fa-solid fa-chevron-down"></i></span></summary><ul> <li class="toctree-l3"><a class="reference internal" href="../io/elekta_epochs.html">Getting averaging info from .fif files</a></li> <li class="toctree-l3"><a class="reference internal" href="../io/read_impedances.html">Getting impedances from raw files</a></li> <li class="toctree-l3"><a class="reference internal" href="../io/read_neo_format.html">How to use data in neural ensemble (NEO) format</a></li> <li class="toctree-l3"><a class="reference internal" href="../io/read_noise_covariance_matrix.html">Reading/Writing a noise covariance matrix</a></li> <li class="toctree-l3"><a class="reference internal" href="../io/read_xdf.html">Reading XDF EEG data</a></li> </ul> </details></li> <li class="toctree-l2 has-children"><a class="reference internal" href="../simulation/index.html">Data Simulation</a><details><summary><span class="toctree-toggle" role="presentation"><i class="fa-solid fa-chevron-down"></i></span></summary><ul> <li class="toctree-l3"><a class="reference internal" href="../simulation/plot_stc_metrics.html">Compare simulated and estimated source activity</a></li> <li class="toctree-l3"><a class="reference internal" href="../simulation/simulate_evoked_data.html">Generate simulated evoked data</a></li> <li class="toctree-l3"><a class="reference internal" href="../simulation/simulate_raw_data.html">Generate simulated raw data</a></li> <li class="toctree-l3"><a class="reference internal" href="../simulation/simulated_raw_data_using_subject_anatomy.html">Simulate raw data using subject anatomy</a></li> <li class="toctree-l3"><a class="reference internal" href="../simulation/source_simulator.html">Generate simulated source data</a></li> </ul> </details></li> <li class="toctree-l2 has-children"><a class="reference internal" href="../preprocessing/index.html">Preprocessing</a><details><summary><span class="toctree-toggle" role="presentation"><i class="fa-solid fa-chevron-down"></i></span></summary><ul> <li class="toctree-l3"><a class="reference internal" href="../preprocessing/contralateral_referencing.html">Using contralateral referencing for EEG</a></li> <li class="toctree-l3"><a class="reference internal" href="../preprocessing/css.html">Cortical Signal Suppression (CSS) for removal of cortical signals</a></li> <li class="toctree-l3"><a class="reference internal" href="../preprocessing/define_target_events.html">Define target events based on time lag, plot evoked response</a></li> <li class="toctree-l3"><a class="reference internal" href="../preprocessing/eeg_bridging.html">Identify EEG Electrodes Bridged by too much Gel</a></li> <li class="toctree-l3"><a class="reference internal" href="../preprocessing/eeg_csd.html">Transform EEG data using current source density (CSD)</a></li> <li class="toctree-l3"><a class="reference internal" href="../preprocessing/eog_artifact_histogram.html">Show EOG artifact timing</a></li> <li class="toctree-l3"><a class="reference internal" href="../preprocessing/eog_regression.html">Reduce EOG artifacts through regression</a></li> <li class="toctree-l3"><a class="reference internal" href="../preprocessing/epochs_metadata.html">Automated epochs metadata generation with variable time windows</a></li> <li class="toctree-l3"><a class="reference internal" href="../preprocessing/esg_rm_heart_artefact_pcaobs.html">Principal Component Analysis - Optimal Basis Sets (PCA-OBS) removing cardiac artefact</a></li> <li class="toctree-l3"><a class="reference internal" href="../preprocessing/find_ref_artifacts.html">Find MEG reference channel artifacts</a></li> <li class="toctree-l3"><a class="reference internal" href="../preprocessing/fnirs_artifact_removal.html">Visualise NIRS artifact correction methods</a></li> <li class="toctree-l3"><a class="reference internal" href="../preprocessing/ica_comparison.html">Compare the different ICA algorithms in MNE</a></li> <li class="toctree-l3"><a class="reference internal" href="../preprocessing/interpolate_bad_channels.html">Interpolate bad channels for MEG/EEG channels</a></li> <li class="toctree-l3"><a class="reference internal" href="../preprocessing/interpolate_to.html">Interpolate EEG data to any montage</a></li> <li class="toctree-l3"><a class="reference internal" href="../preprocessing/movement_compensation.html">Maxwell filter data with movement compensation</a></li> <li class="toctree-l3"><a class="reference internal" href="../preprocessing/movement_detection.html">Annotate movement artifacts and reestimate dev_head_t</a></li> <li class="toctree-l3"><a class="reference internal" href="../preprocessing/muscle_detection.html">Annotate muscle artifacts</a></li> <li class="toctree-l3"><a class="reference internal" href="../preprocessing/muscle_ica.html">Removing muscle ICA components</a></li> <li class="toctree-l3"><a class="reference internal" href="../preprocessing/otp.html">Plot sensor denoising using oversampled temporal projection</a></li> <li class="toctree-l3"><a class="reference internal" href="../preprocessing/shift_evoked.html">Shifting time-scale in evoked data</a></li> <li class="toctree-l3"><a class="reference internal" href="../preprocessing/virtual_evoked.html">Remap MEG channel types</a></li> <li class="toctree-l3"><a class="reference internal" href="../preprocessing/xdawn_denoising.html">XDAWN Denoising</a></li> </ul> </details></li> <li class="toctree-l2 has-children"><a class="reference internal" href="../visualization/index.html">Visualization</a><details><summary><span class="toctree-toggle" role="presentation"><i class="fa-solid fa-chevron-down"></i></span></summary><ul> <li class="toctree-l3"><a class="reference internal" href="../visualization/3d_to_2d.html">How to convert 3D electrode positions to a 2D image</a></li> <li class="toctree-l3"><a class="reference internal" href="../visualization/brain.html">Plotting with <code class="docutils literal notranslate"><span class="pre">mne.viz.Brain</span></code></a></li> <li class="toctree-l3"><a class="reference internal" href="../visualization/channel_epochs_image.html">Visualize channel over epochs as an image</a></li> <li class="toctree-l3"><a class="reference internal" href="../visualization/eeg_on_scalp.html">Plotting EEG sensors on the scalp</a></li> <li class="toctree-l3"><a class="reference internal" href="../visualization/evoked_arrowmap.html">Plotting topographic arrowmaps of evoked data</a></li> <li class="toctree-l3"><a class="reference internal" href="../visualization/evoked_topomap.html">Plotting topographic maps of evoked data</a></li> <li class="toctree-l3"><a class="reference internal" href="../visualization/evoked_whitening.html">Whitening evoked data with a noise covariance</a></li> <li class="toctree-l3"><a class="reference internal" href="../visualization/eyetracking_plot_heatmap.html">Plotting eye-tracking heatmaps in MNE-Python</a></li> <li class="toctree-l3"><a class="reference internal" href="../visualization/meg_sensors.html">Plotting sensor layouts of MEG systems</a></li> <li class="toctree-l3"><a class="reference internal" href="../visualization/mne_helmet.html">Plot the MNE brain and helmet</a></li> <li class="toctree-l3"><a class="reference internal" href="../visualization/montage_sgskip.html">Plotting sensor layouts of EEG systems</a></li> <li class="toctree-l3"><a class="reference internal" href="../visualization/parcellation.html">Plot a cortical parcellation</a></li> <li class="toctree-l3"><a class="reference internal" href="../visualization/roi_erpimage_by_rt.html">Plot single trial activity, grouped by ROI and sorted by RT</a></li> <li class="toctree-l3"><a class="reference internal" href="../visualization/ssp_projs_sensitivity_map.html">Sensitivity map of SSP projections</a></li> <li class="toctree-l3"><a class="reference internal" href="../visualization/topo_compare_conditions.html">Compare evoked responses for different conditions</a></li> <li class="toctree-l3"><a class="reference internal" href="../visualization/topo_customized.html">Plot custom topographies for MEG sensors</a></li> <li class="toctree-l3"><a class="reference internal" href="../visualization/xhemi.html">Cross-hemisphere comparison</a></li> </ul> </details></li> <li class="toctree-l2 has-children"><a class="reference internal" href="../time_frequency/index.html">Time-Frequency Examples</a><details><summary><span class="toctree-toggle" role="presentation"><i class="fa-solid fa-chevron-down"></i></span></summary><ul> <li class="toctree-l3"><a class="reference internal" href="../time_frequency/compute_csd.html">Compute a cross-spectral density (CSD) matrix</a></li> <li class="toctree-l3"><a class="reference internal" href="../time_frequency/compute_source_psd_epochs.html">Compute Power Spectral Density of inverse solution from single epochs</a></li> <li class="toctree-l3"><a class="reference internal" href="../time_frequency/source_label_time_frequency.html">Compute power and phase lock in label of the source space</a></li> <li class="toctree-l3"><a class="reference internal" href="../time_frequency/source_power_spectrum.html">Compute source power spectral density (PSD) in a label</a></li> <li class="toctree-l3"><a class="reference internal" href="../time_frequency/source_power_spectrum_opm.html">Compute source power spectral density (PSD) of VectorView and OPM data</a></li> <li class="toctree-l3"><a class="reference internal" href="../time_frequency/source_space_time_frequency.html">Compute induced power in the source space with dSPM</a></li> <li class="toctree-l3"><a class="reference internal" href="../time_frequency/temporal_whitening.html">Temporal whitening with AR model</a></li> <li class="toctree-l3"><a class="reference internal" href="../time_frequency/time_frequency_erds.html">Compute and visualize ERDS maps</a></li> <li class="toctree-l3"><a class="reference internal" href="../time_frequency/time_frequency_global_field_power.html">Explore event-related dynamics for specific frequency bands</a></li> <li class="toctree-l3"><a class="reference internal" href="../time_frequency/time_frequency_simulated.html">Time-frequency on simulated data (Multitaper vs. Morlet vs. Stockwell vs. Hilbert)</a></li> </ul> </details></li> <li class="toctree-l2 has-children"><a class="reference internal" href="../stats/index.html">Statistics Examples</a><details><summary><span class="toctree-toggle" role="presentation"><i class="fa-solid fa-chevron-down"></i></span></summary><ul> <li class="toctree-l3"><a class="reference internal" href="../stats/cluster_stats_evoked.html">Permutation F-test on sensor data with 1D cluster level</a></li> <li class="toctree-l3"><a class="reference internal" href="../stats/fdr_stats_evoked.html">FDR correction on T-test on sensor data</a></li> <li class="toctree-l3"><a class="reference internal" href="../stats/linear_regression_raw.html">Regression on continuous data (rER[P/F])</a></li> <li class="toctree-l3"><a class="reference internal" href="../stats/sensor_permutation_test.html">Permutation T-test on sensor data</a></li> <li class="toctree-l3"><a class="reference internal" href="../stats/sensor_regression.html">Analysing continuous features with binning and regression in sensor space</a></li> </ul> </details></li> <li class="toctree-l2 current active has-children"><a class="reference internal" href="index.html">Machine Learning (Decoding, Encoding, and MVPA)</a><details open="open"><summary><span class="toctree-toggle" role="presentation"><i class="fa-solid fa-chevron-down"></i></span></summary><ul class="current"> <li class="toctree-l3"><a class="reference internal" href="decoding_csp_eeg.html">Motor imagery decoding from EEG data using the Common Spatial Pattern (CSP)</a></li> <li class="toctree-l3"><a class="reference internal" href="decoding_csp_timefreq.html">Decoding in time-frequency space using Common Spatial Patterns (CSP)</a></li> <li class="toctree-l3"><a class="reference internal" href="decoding_rsa_sgskip.html">Representational Similarity Analysis</a></li> <li class="toctree-l3"><a class="reference internal" href="decoding_spatio_temporal_source.html">Decoding source space data</a></li> <li class="toctree-l3"><a class="reference internal" href="decoding_spoc_CMC.html">Continuous Target Decoding with SPoC</a></li> <li class="toctree-l3 current active"><a class="current reference internal" href="#">Decoding sensor space data with generalization across time and conditions</a></li> <li class="toctree-l3"><a class="reference internal" href="decoding_unsupervised_spatial_filter.html">Analysis of evoked response using ICA and PCA reduction techniques</a></li> <li class="toctree-l3"><a class="reference internal" href="decoding_xdawn_eeg.html">XDAWN Decoding From EEG data</a></li> <li class="toctree-l3"><a class="reference internal" href="ems_filtering.html">Compute effect-matched-spatial filtering (EMS)</a></li> <li class="toctree-l3"><a class="reference internal" href="linear_model_patterns.html">Linear classifier on sensor data with plot patterns and filters</a></li> <li class="toctree-l3"><a class="reference internal" href="receptive_field_mtrf.html">Receptive Field Estimation and Prediction</a></li> <li class="toctree-l3"><a class="reference internal" href="ssd_spatial_filters.html">Compute spatial filters with Spatio-Spectral Decomposition (SSD)</a></li> </ul> </details></li> <li class="toctree-l2"><a class="reference internal" href="../connectivity/index.html">Connectivity Analysis Examples</a></li> <li class="toctree-l2 has-children"><a class="reference internal" href="../forward/index.html">Forward modeling</a><details><summary><span class="toctree-toggle" role="presentation"><i class="fa-solid fa-chevron-down"></i></span></summary><ul> <li class="toctree-l3"><a class="reference internal" href="../forward/forward_sensitivity_maps.html">Display sensitivity maps for EEG and MEG sensors</a></li> <li class="toctree-l3"><a class="reference internal" href="../forward/left_cerebellum_volume_source.html">Generate a left cerebellum volume source space</a></li> <li class="toctree-l3"><a class="reference internal" href="../forward/source_space_morphing.html">Use source space morphing</a></li> </ul> </details></li> <li class="toctree-l2 has-children"><a class="reference internal" href="../inverse/index.html">Inverse problem and source analysis</a><details><summary><span class="toctree-toggle" role="presentation"><i class="fa-solid fa-chevron-down"></i></span></summary><ul> <li class="toctree-l3"><a class="reference internal" href="../inverse/compute_mne_inverse_epochs_in_label.html">Compute MNE-dSPM inverse solution on single epochs</a></li> <li class="toctree-l3"><a class="reference internal" href="../inverse/compute_mne_inverse_raw_in_label.html">Compute sLORETA inverse solution on raw data</a></li> <li class="toctree-l3"><a class="reference internal" href="../inverse/compute_mne_inverse_volume.html">Compute MNE-dSPM inverse solution on evoked data in volume source space</a></li> <li class="toctree-l3"><a class="reference internal" href="../inverse/custom_inverse_solver.html">Source localization with a custom inverse solver</a></li> <li class="toctree-l3"><a class="reference internal" href="../inverse/dics_epochs.html">Compute source level time-frequency timecourses using a DICS beamformer</a></li> <li class="toctree-l3"><a class="reference internal" href="../inverse/dics_source_power.html">Compute source power using DICS beamformer</a></li> <li class="toctree-l3"><a class="reference internal" href="../inverse/evoked_ers_source_power.html">Compute evoked ERS source power using DICS, LCMV beamformer, and dSPM</a></li> <li class="toctree-l3"><a class="reference internal" href="../inverse/gamma_map_inverse.html">Compute a sparse inverse solution using the Gamma-MAP empirical Bayesian method</a></li> <li class="toctree-l3"><a class="reference internal" href="../inverse/label_activation_from_stc.html">Extracting time course from source_estimate object</a></li> <li class="toctree-l3"><a class="reference internal" href="../inverse/label_from_stc.html">Generate a functional label from source estimates</a></li> <li class="toctree-l3"><a class="reference internal" href="../inverse/label_source_activations.html">Extracting the time series of activations in a label</a></li> <li class="toctree-l3"><a class="reference internal" href="../inverse/mixed_norm_inverse.html">Compute sparse inverse solution with mixed norm: MxNE and irMxNE</a></li> <li class="toctree-l3"><a class="reference internal" href="../inverse/mixed_source_space_inverse.html">Compute MNE inverse solution on evoked data with a mixed source space</a></li> <li class="toctree-l3"><a class="reference internal" href="../inverse/mne_cov_power.html">Compute source power estimate by projecting the covariance with MNE</a></li> <li class="toctree-l3"><a class="reference internal" href="../inverse/morph_surface_stc.html">Morph surface source estimate</a></li> <li class="toctree-l3"><a class="reference internal" href="../inverse/morph_volume_stc.html">Morph volumetric source estimate</a></li> <li class="toctree-l3"><a class="reference internal" href="../inverse/multi_dipole_model.html">Computing source timecourses with an XFit-like multi-dipole model</a></li> <li class="toctree-l3"><a class="reference internal" href="../inverse/multidict_reweighted_tfmxne.html">Compute iterative reweighted TF-MxNE with multiscale time-frequency dictionary</a></li> <li class="toctree-l3"><a class="reference internal" href="../inverse/psf_ctf_label_leakage.html">Visualize source leakage among labels using a circular graph</a></li> <li class="toctree-l3"><a class="reference internal" href="../inverse/psf_ctf_vertices.html">Plot point-spread functions (PSFs) and cross-talk functions (CTFs)</a></li> <li class="toctree-l3"><a class="reference internal" href="../inverse/psf_ctf_vertices_lcmv.html">Compute cross-talk functions for LCMV beamformers</a></li> <li class="toctree-l3"><a class="reference internal" href="../inverse/psf_volume.html">Plot point-spread functions (PSFs) for a volume</a></li> <li class="toctree-l3"><a class="reference internal" href="../inverse/rap_music.html">Compute Rap-Music on evoked data</a></li> <li class="toctree-l3"><a class="reference internal" href="../inverse/read_inverse.html">Reading an inverse operator</a></li> <li class="toctree-l3"><a class="reference internal" href="../inverse/read_stc.html">Reading an STC file</a></li> <li class="toctree-l3"><a class="reference internal" href="../inverse/resolution_metrics.html">Compute spatial resolution metrics in source space</a></li> <li class="toctree-l3"><a class="reference internal" href="../inverse/resolution_metrics_eegmeg.html">Compute spatial resolution metrics to compare MEG with EEG+MEG</a></li> <li class="toctree-l3"><a class="reference internal" href="../inverse/snr_estimate.html">Estimate data SNR using an inverse</a></li> <li class="toctree-l3"><a class="reference internal" href="../inverse/source_space_snr.html">Computing source space SNR</a></li> <li class="toctree-l3"><a class="reference internal" href="../inverse/time_frequency_mixed_norm_inverse.html">Compute MxNE with time-frequency sparse prior</a></li> <li class="toctree-l3"><a class="reference internal" href="../inverse/trap_music.html">Compute Trap-Music on evoked data</a></li> <li class="toctree-l3"><a class="reference internal" href="../inverse/vector_mne_solution.html">Plotting the full vector-valued MNE solution</a></li> </ul> </details></li> <li class="toctree-l2 has-children"><a class="reference internal" href="../datasets/index.html">Examples on open datasets</a><details><summary><span class="toctree-toggle" role="presentation"><i class="fa-solid fa-chevron-down"></i></span></summary><ul> <li class="toctree-l3"><a class="reference internal" href="../datasets/brainstorm_data.html">Brainstorm raw (median nerve) dataset</a></li> <li class="toctree-l3"><a class="reference internal" href="../datasets/hf_sef_data.html">HF-SEF dataset</a></li> <li class="toctree-l3"><a class="reference internal" href="../datasets/kernel_phantom.html">Kernel OPM phantom data</a></li> <li class="toctree-l3"><a class="reference internal" href="../datasets/limo_data.html">Single trial linear regression analysis with the LIMO dataset</a></li> <li class="toctree-l3"><a class="reference internal" href="../datasets/opm_data.html">Optically pumped magnetometer (OPM) data</a></li> <li class="toctree-l3"><a class="reference internal" href="../datasets/spm_faces_dataset.html">From raw data to dSPM on SPM Faces dataset</a></li> </ul> </details></li> </ul> </details></li> <li class="toctree-l1"><a class="reference internal" href="../../documentation/glossary.html">Glossary</a></li> <li class="toctree-l1"><a class="reference internal" href="../../documentation/implementation.html">Implementation details</a></li> <li class="toctree-l1"><a class="reference internal" href="../../documentation/design_philosophy.html">Design philosophy</a></li> <li class="toctree-l1"><a class="reference internal" href="../../documentation/datasets.html">Example datasets</a></li> <li class="toctree-l1"><a class="reference internal" href="../../generated/commands.html">Command-line tools</a></li> <li class="toctree-l1"><a class="reference internal" href="../../help/migrating.html">Migrating from other analysis software</a></li> <li class="toctree-l1"><a class="reference internal" href="../../documentation/cookbook.html">The typical M/EEG workflow</a></li> <li class="toctree-l1"><a class="reference internal" href="../../documentation/cite.html">How to cite MNE-Python</a></li> <li class="toctree-l1"><a class="reference internal" href="../../documentation/cited.html">Papers citing MNE-Python</a></li> </ul> </div> </nav></div> </div> <div class="sidebar-primary-items__end sidebar-primary__section"> <div class="sidebar-primary-item"> <div id="ethical-ad-placement" class="flat" data-ea-publisher="readthedocs" data-ea-type="readthedocs-sidebar" data-ea-manual="true"> </div></div> </div> </div> <main id="main-content" class="bd-main" role="main"> <div class="bd-content"> <div class="bd-article-container"> <div class="bd-header-article d-print-none"></div> <div> <div class="sphx-glr-download-link-note admonition note"> <p class="admonition-title">Note</p> <p><a class="reference internal" href="#sphx-glr-download-auto-examples-decoding-decoding-time-generalization-conditions-py"><span class="std std-ref">Go to the end</span></a> to download the full example code.</p> </div> <section class="sphx-glr-example-title" id="decoding-sensor-space-data-with-generalization-across-time-and-conditions"> <span id="ex-linear-sensor-decoding"></span><span id="sphx-glr-auto-examples-decoding-decoding-time-generalization-conditions-py"></span><h1>Decoding sensor space data with generalization across time and conditions<a class="headerlink" href="#decoding-sensor-space-data-with-generalization-across-time-and-conditions" title="Link to this heading">#</a></h1> <p>This example runs the analysis described in <a class="footnote-reference brackets" href="#footcite-kingdehaene2014" id="id3" role="doc-noteref"><span class="fn-bracket">[</span>1<span class="fn-bracket">]</span></a>. It illustrates how one can fit a linear classifier to identify a discriminatory topography at a given time instant and subsequently assess whether this linear model can accurately predict all of the time samples of a second set of conditions.</p> <div class="highlight-Python notranslate"><div class="highlight"><pre><span></span><span class="c1"># Authors: Jean-Rémi King <jeanremi.king@gmail.com></span> <span class="c1"># Alexandre Gramfort <alexandre.gramfort@inria.fr></span> <span class="c1"># Denis Engemann <denis.engemann@gmail.com></span> <span class="c1">#</span> <span class="c1"># License: BSD-3-Clause</span> <span class="c1"># Copyright the MNE-Python contributors.</span> </pre></div> </div> <div class="highlight-Python notranslate"><div class="highlight"><pre><span></span><span class="kn">import</span><span class="w"> </span><span class="nn">matplotlib.pyplot</span><span class="w"> </span><span class="k">as</span><span class="w"> </span><span class="nn">plt</span> <span class="kn">from</span><span class="w"> </span><span class="nn">sklearn.linear_model</span><span class="w"> </span><span class="kn">import</span> <a href="https://scikit-learn.org/stable/modules/generated/sklearn.linear_model.LogisticRegression.html#sklearn.linear_model.LogisticRegression" title="sklearn.linear_model.LogisticRegression" class="sphx-glr-backref-module-sklearn-linear_model sphx-glr-backref-type-py-class"><span class="n">LogisticRegression</span></a> <span class="kn">from</span><span class="w"> </span><span class="nn">sklearn.pipeline</span><span class="w"> </span><span class="kn">import</span> <a href="https://scikit-learn.org/stable/modules/generated/sklearn.pipeline.make_pipeline.html#sklearn.pipeline.make_pipeline" title="sklearn.pipeline.make_pipeline" class="sphx-glr-backref-module-sklearn-pipeline sphx-glr-backref-type-py-function"><span class="n">make_pipeline</span></a> <span class="kn">from</span><span class="w"> </span><span class="nn">sklearn.preprocessing</span><span class="w"> </span><span class="kn">import</span> <a href="https://scikit-learn.org/stable/modules/generated/sklearn.preprocessing.StandardScaler.html#sklearn.preprocessing.StandardScaler" title="sklearn.preprocessing.StandardScaler" class="sphx-glr-backref-module-sklearn-preprocessing sphx-glr-backref-type-py-class"><span class="n">StandardScaler</span></a> <span class="kn">import</span><span class="w"> </span><span class="nn">mne</span> <span class="kn">from</span><span class="w"> </span><span class="nn">mne.datasets</span><span class="w"> </span><span class="kn">import</span> <span class="n">sample</span> <span class="kn">from</span><span class="w"> </span><span class="nn">mne.decoding</span><span class="w"> </span><span class="kn">import</span> <a href="../../generated/mne.decoding.GeneralizingEstimator.html#mne.decoding.GeneralizingEstimator" title="mne.decoding.GeneralizingEstimator" class="sphx-glr-backref-module-mne-decoding sphx-glr-backref-type-py-class"><span class="n">GeneralizingEstimator</span></a> <span class="nb">print</span><span class="p">(</span><span class="vm">__doc__</span><span class="p">)</span> <span class="c1"># Preprocess data</span> <a href="https://docs.python.org/3/library/pathlib.html#pathlib.PosixPath" title="pathlib.PosixPath" class="sphx-glr-backref-module-pathlib sphx-glr-backref-type-py-class sphx-glr-backref-instance"><span class="n">data_path</span></a> <span class="o">=</span> <a href="../../generated/mne.datasets.sample.data_path.html#mne.datasets.sample.data_path" title="mne.datasets.sample.data_path" class="sphx-glr-backref-module-mne-datasets-sample sphx-glr-backref-type-py-function"><span class="n">sample</span><span class="o">.</span><span class="n">data_path</span></a><span class="p">()</span> <span class="c1"># Load and filter data, set up epochs</span> <a href="https://docs.python.org/3/library/pathlib.html#pathlib.PosixPath" title="pathlib.PosixPath" class="sphx-glr-backref-module-pathlib sphx-glr-backref-type-py-class sphx-glr-backref-instance"><span class="n">meg_path</span></a> <span class="o">=</span> <a href="https://docs.python.org/3/library/pathlib.html#pathlib.PosixPath" title="pathlib.PosixPath" class="sphx-glr-backref-module-pathlib sphx-glr-backref-type-py-class sphx-glr-backref-instance"><span class="n">data_path</span></a> <span class="o">/</span> <span class="s2">"MEG"</span> <span class="o">/</span> <span class="s2">"sample"</span> <a href="https://docs.python.org/3/library/pathlib.html#pathlib.PosixPath" title="pathlib.PosixPath" class="sphx-glr-backref-module-pathlib sphx-glr-backref-type-py-class sphx-glr-backref-instance"><span class="n">raw_fname</span></a> <span class="o">=</span> <a href="https://docs.python.org/3/library/pathlib.html#pathlib.PosixPath" title="pathlib.PosixPath" class="sphx-glr-backref-module-pathlib sphx-glr-backref-type-py-class sphx-glr-backref-instance"><span class="n">meg_path</span></a> <span class="o">/</span> <span class="s2">"sample_audvis_filt-0-40_raw.fif"</span> <a href="https://docs.python.org/3/library/pathlib.html#pathlib.PosixPath" title="pathlib.PosixPath" class="sphx-glr-backref-module-pathlib sphx-glr-backref-type-py-class sphx-glr-backref-instance"><span class="n">events_fname</span></a> <span class="o">=</span> <a href="https://docs.python.org/3/library/pathlib.html#pathlib.PosixPath" title="pathlib.PosixPath" class="sphx-glr-backref-module-pathlib sphx-glr-backref-type-py-class sphx-glr-backref-instance"><span class="n">meg_path</span></a> <span class="o">/</span> <span class="s2">"sample_audvis_filt-0-40_raw-eve.fif"</span> <a href="../../generated/mne.io.Raw.html#mne.io.Raw" title="mne.io.Raw" class="sphx-glr-backref-module-mne-io sphx-glr-backref-type-py-class sphx-glr-backref-instance"><span class="n">raw</span></a> <span class="o">=</span> <a href="../../generated/mne.io.read_raw_fif.html#mne.io.read_raw_fif" title="mne.io.read_raw_fif" class="sphx-glr-backref-module-mne-io sphx-glr-backref-type-py-function"><span class="n">mne</span><span class="o">.</span><span class="n">io</span><span class="o">.</span><span class="n">read_raw_fif</span></a><span class="p">(</span><a href="https://docs.python.org/3/library/pathlib.html#pathlib.PosixPath" title="pathlib.PosixPath" class="sphx-glr-backref-module-pathlib sphx-glr-backref-type-py-class sphx-glr-backref-instance"><span class="n">raw_fname</span></a><span class="p">,</span> <span class="n">preload</span><span class="o">=</span><span class="kc">True</span><span class="p">)</span> <a href="https://numpy.org/doc/stable/reference/generated/numpy.ndarray.html#numpy.ndarray" title="numpy.ndarray" class="sphx-glr-backref-module-numpy sphx-glr-backref-type-py-class sphx-glr-backref-instance"><span class="n">picks</span></a> <span class="o">=</span> <a href="../../generated/mne.pick_types.html#mne.pick_types" title="mne.pick_types" class="sphx-glr-backref-module-mne sphx-glr-backref-type-py-function"><span class="n">mne</span><span class="o">.</span><span class="n">pick_types</span></a><span class="p">(</span><a href="../../generated/mne.Info.html#mne.Info" title="mne.Info" class="sphx-glr-backref-module-mne sphx-glr-backref-type-py-class sphx-glr-backref-instance"><span class="n">raw</span><span class="o">.</span><span class="n">info</span></a><span class="p">,</span> <span class="n">meg</span><span class="o">=</span><span class="kc">True</span><span class="p">,</span> <span class="n">exclude</span><span class="o">=</span><span class="s2">"bads"</span><span class="p">)</span> <span class="c1"># Pick MEG channels</span> <a href="../../generated/mne.io.Raw.html#mne.io.Raw.filter" title="mne.io.Raw.filter" class="sphx-glr-backref-module-mne-io sphx-glr-backref-type-py-method"><span class="n">raw</span><span class="o">.</span><span class="n">filter</span></a><span class="p">(</span><span class="mf">1.0</span><span class="p">,</span> <span class="mf">30.0</span><span class="p">,</span> <span class="n">fir_design</span><span class="o">=</span><span class="s2">"firwin"</span><span class="p">)</span> <span class="c1"># Band pass filtering signals</span> <a href="https://numpy.org/doc/stable/reference/generated/numpy.ndarray.html#numpy.ndarray" title="numpy.ndarray" class="sphx-glr-backref-module-numpy sphx-glr-backref-type-py-class sphx-glr-backref-instance"><span class="n">events</span></a> <span class="o">=</span> <a href="../../generated/mne.read_events.html#mne.read_events" title="mne.read_events" class="sphx-glr-backref-module-mne sphx-glr-backref-type-py-function"><span class="n">mne</span><span class="o">.</span><span class="n">read_events</span></a><span class="p">(</span><a href="https://docs.python.org/3/library/pathlib.html#pathlib.PosixPath" title="pathlib.PosixPath" class="sphx-glr-backref-module-pathlib sphx-glr-backref-type-py-class sphx-glr-backref-instance"><span class="n">events_fname</span></a><span class="p">)</span> <a href="https://docs.python.org/3/library/stdtypes.html#dict" title="builtins.dict" class="sphx-glr-backref-module-builtins sphx-glr-backref-type-py-class sphx-glr-backref-instance"><span class="n">event_id</span></a> <span class="o">=</span> <span class="p">{</span> <span class="s2">"Auditory/Left"</span><span class="p">:</span> <span class="mi">1</span><span class="p">,</span> <span class="s2">"Auditory/Right"</span><span class="p">:</span> <span class="mi">2</span><span class="p">,</span> <span class="s2">"Visual/Left"</span><span class="p">:</span> <span class="mi">3</span><span class="p">,</span> <span class="s2">"Visual/Right"</span><span class="p">:</span> <span class="mi">4</span><span class="p">,</span> <span class="p">}</span> <a href="https://docs.python.org/3/library/functions.html#float" title="builtins.float" class="sphx-glr-backref-module-builtins sphx-glr-backref-type-py-class sphx-glr-backref-instance"><span class="n">tmin</span></a> <span class="o">=</span> <span class="o">-</span><span class="mf">0.050</span> <a href="https://docs.python.org/3/library/functions.html#float" title="builtins.float" class="sphx-glr-backref-module-builtins sphx-glr-backref-type-py-class sphx-glr-backref-instance"><span class="n">tmax</span></a> <span class="o">=</span> <span class="mf">0.400</span> <span class="c1"># decimate to make the example faster to run, but then use verbose='error' in</span> <span class="c1"># the Epochs constructor to suppress warning about decimation causing aliasing</span> <a href="https://docs.python.org/3/library/functions.html#int" title="builtins.int" class="sphx-glr-backref-module-builtins sphx-glr-backref-type-py-class sphx-glr-backref-instance"><span class="n">decim</span></a> <span class="o">=</span> <span class="mi">2</span> <a href="../../generated/mne.Epochs.html#mne.Epochs" title="mne.Epochs" class="sphx-glr-backref-module-mne sphx-glr-backref-type-py-class sphx-glr-backref-instance"><span class="n">epochs</span></a> <span class="o">=</span> <a href="../../generated/mne.Epochs.html#mne.Epochs" title="mne.Epochs" class="sphx-glr-backref-module-mne sphx-glr-backref-type-py-class"><span class="n">mne</span><span class="o">.</span><span class="n">Epochs</span></a><span class="p">(</span> <a href="../../generated/mne.io.Raw.html#mne.io.Raw" title="mne.io.Raw" class="sphx-glr-backref-module-mne-io sphx-glr-backref-type-py-class sphx-glr-backref-instance"><span class="n">raw</span></a><span class="p">,</span> <a href="https://numpy.org/doc/stable/reference/generated/numpy.ndarray.html#numpy.ndarray" title="numpy.ndarray" class="sphx-glr-backref-module-numpy sphx-glr-backref-type-py-class sphx-glr-backref-instance"><span class="n">events</span></a><span class="p">,</span> <a href="https://docs.python.org/3/library/stdtypes.html#dict" title="builtins.dict" class="sphx-glr-backref-module-builtins sphx-glr-backref-type-py-class sphx-glr-backref-instance"><span class="n">event_id</span></a><span class="o">=</span><a href="https://docs.python.org/3/library/stdtypes.html#dict" title="builtins.dict" class="sphx-glr-backref-module-builtins sphx-glr-backref-type-py-class sphx-glr-backref-instance"><span class="n">event_id</span></a><span class="p">,</span> <a href="https://docs.python.org/3/library/functions.html#float" title="builtins.float" class="sphx-glr-backref-module-builtins sphx-glr-backref-type-py-class sphx-glr-backref-instance"><span class="n">tmin</span></a><span class="o">=</span><a href="https://docs.python.org/3/library/functions.html#float" title="builtins.float" class="sphx-glr-backref-module-builtins sphx-glr-backref-type-py-class sphx-glr-backref-instance"><span class="n">tmin</span></a><span class="p">,</span> <a href="https://docs.python.org/3/library/functions.html#float" title="builtins.float" class="sphx-glr-backref-module-builtins sphx-glr-backref-type-py-class sphx-glr-backref-instance"><span class="n">tmax</span></a><span class="o">=</span><a href="https://docs.python.org/3/library/functions.html#float" title="builtins.float" class="sphx-glr-backref-module-builtins sphx-glr-backref-type-py-class sphx-glr-backref-instance"><span class="n">tmax</span></a><span class="p">,</span> <span class="n">proj</span><span class="o">=</span><span class="kc">True</span><span class="p">,</span> <a href="https://numpy.org/doc/stable/reference/generated/numpy.ndarray.html#numpy.ndarray" title="numpy.ndarray" class="sphx-glr-backref-module-numpy sphx-glr-backref-type-py-class sphx-glr-backref-instance"><span class="n">picks</span></a><span class="o">=</span><a href="https://numpy.org/doc/stable/reference/generated/numpy.ndarray.html#numpy.ndarray" title="numpy.ndarray" class="sphx-glr-backref-module-numpy sphx-glr-backref-type-py-class sphx-glr-backref-instance"><span class="n">picks</span></a><span class="p">,</span> <span class="n">baseline</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">preload</span><span class="o">=</span><span class="kc">True</span><span class="p">,</span> <span class="n">reject</span><span class="o">=</span><span class="nb">dict</span><span class="p">(</span><span class="n">mag</span><span class="o">=</span><span class="mf">5e-12</span><span class="p">),</span> <a href="https://docs.python.org/3/library/functions.html#int" title="builtins.int" class="sphx-glr-backref-module-builtins sphx-glr-backref-type-py-class sphx-glr-backref-instance"><span class="n">decim</span></a><span class="o">=</span><a href="https://docs.python.org/3/library/functions.html#int" title="builtins.int" class="sphx-glr-backref-module-builtins sphx-glr-backref-type-py-class sphx-glr-backref-instance"><span class="n">decim</span></a><span class="p">,</span> <span class="n">verbose</span><span class="o">=</span><span class="s2">"error"</span><span class="p">,</span> <span class="p">)</span> </pre></div> </div> <div class="sphx-glr-script-out highlight-none notranslate"><div class="highlight"><pre><span></span>Opening raw data file /home/circleci/mne_data/MNE-sample-data/MEG/sample/sample_audvis_filt-0-40_raw.fif... Read a total of 4 projection items: PCA-v1 (1 x 102) idle PCA-v2 (1 x 102) idle PCA-v3 (1 x 102) idle Average EEG reference (1 x 60) idle Range : 6450 ... 48149 = 42.956 ... 320.665 secs Ready. Reading 0 ... 41699 = 0.000 ... 277.709 secs... Filtering raw data in 1 contiguous segment Setting up band-pass filter from 1 - 30 Hz FIR filter parameters --------------------- Designing a one-pass, zero-phase, non-causal bandpass filter: - Windowed time-domain design (firwin) method - Hamming window with 0.0194 passband ripple and 53 dB stopband attenuation - Lower passband edge: 1.00 - Lower transition bandwidth: 1.00 Hz (-6 dB cutoff frequency: 0.50 Hz) - Upper passband edge: 30.00 Hz - Upper transition bandwidth: 7.50 Hz (-6 dB cutoff frequency: 33.75 Hz) - Filter length: 497 samples (3.310 s) </pre></div> </div> <p>We will train the classifier on all left visual vs auditory trials and test on all right visual vs auditory trials.</p> <div class="highlight-Python notranslate"><div class="highlight"><pre><span></span><a href="https://scikit-learn.org/stable/modules/generated/sklearn.pipeline.Pipeline.html#sklearn.pipeline.Pipeline" title="sklearn.pipeline.Pipeline" class="sphx-glr-backref-module-sklearn-pipeline sphx-glr-backref-type-py-class sphx-glr-backref-instance"><span class="n">clf</span></a> <span class="o">=</span> <a href="https://scikit-learn.org/stable/modules/generated/sklearn.pipeline.make_pipeline.html#sklearn.pipeline.make_pipeline" title="sklearn.pipeline.make_pipeline" class="sphx-glr-backref-module-sklearn-pipeline sphx-glr-backref-type-py-function"><span class="n">make_pipeline</span></a><span class="p">(</span> <a href="https://scikit-learn.org/stable/modules/generated/sklearn.preprocessing.StandardScaler.html#sklearn.preprocessing.StandardScaler" title="sklearn.preprocessing.StandardScaler" class="sphx-glr-backref-module-sklearn-preprocessing sphx-glr-backref-type-py-class"><span class="n">StandardScaler</span></a><span class="p">(),</span> <a href="https://scikit-learn.org/stable/modules/generated/sklearn.linear_model.LogisticRegression.html#sklearn.linear_model.LogisticRegression" title="sklearn.linear_model.LogisticRegression" class="sphx-glr-backref-module-sklearn-linear_model sphx-glr-backref-type-py-class"><span class="n">LogisticRegression</span></a><span class="p">(</span><span class="n">solver</span><span class="o">=</span><span class="s2">"liblinear"</span><span class="p">),</span> <span class="c1"># liblinear is faster than lbfgs</span> <span class="p">)</span> <a href="../../generated/mne.decoding.GeneralizingEstimator.html#mne.decoding.GeneralizingEstimator" title="mne.decoding.GeneralizingEstimator" class="sphx-glr-backref-module-mne-decoding sphx-glr-backref-type-py-class sphx-glr-backref-instance"><span class="n">time_gen</span></a> <span class="o">=</span> <a href="../../generated/mne.decoding.GeneralizingEstimator.html#mne.decoding.GeneralizingEstimator" title="mne.decoding.GeneralizingEstimator" class="sphx-glr-backref-module-mne-decoding sphx-glr-backref-type-py-class"><span class="n">GeneralizingEstimator</span></a><span class="p">(</span><a href="https://scikit-learn.org/stable/modules/generated/sklearn.pipeline.Pipeline.html#sklearn.pipeline.Pipeline" title="sklearn.pipeline.Pipeline" class="sphx-glr-backref-module-sklearn-pipeline sphx-glr-backref-type-py-class sphx-glr-backref-instance"><span class="n">clf</span></a><span class="p">,</span> <span class="n">scoring</span><span class="o">=</span><span class="s2">"roc_auc"</span><span class="p">,</span> <span class="n">n_jobs</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">verbose</span><span class="o">=</span><span class="kc">True</span><span class="p">)</span> <span class="c1"># Fit classifiers on the epochs where the stimulus was presented to the left.</span> <span class="c1"># Note that the experimental condition y indicates auditory or visual</span> <a href="../../generated/mne.decoding.GeneralizingEstimator.html#mne.decoding.GeneralizingEstimator.fit" title="mne.decoding.GeneralizingEstimator.fit" class="sphx-glr-backref-module-mne-decoding sphx-glr-backref-type-py-method"><span class="n">time_gen</span><span class="o">.</span><span class="n">fit</span></a><span class="p">(</span><span class="n">X</span><span class="o">=</span><a href="../../generated/mne.Epochs.html#mne.Epochs" title="mne.Epochs" class="sphx-glr-backref-module-mne sphx-glr-backref-type-py-class sphx-glr-backref-instance"><span class="n">epochs</span></a><span class="p">[</span><span class="s2">"Left"</span><span class="p">]</span><span class="o">.</span><span class="n">get_data</span><span class="p">(</span><span class="n">copy</span><span class="o">=</span><span class="kc">False</span><span class="p">),</span> <span class="n">y</span><span class="o">=</span><a href="../../generated/mne.Epochs.html#mne.Epochs" title="mne.Epochs" class="sphx-glr-backref-module-mne sphx-glr-backref-type-py-class sphx-glr-backref-instance"><span class="n">epochs</span></a><span class="p">[</span><span class="s2">"Left"</span><span class="p">]</span><span class="o">.</span><a href="https://numpy.org/doc/stable/reference/generated/numpy.ndarray.html#numpy.ndarray" title="numpy.ndarray" class="sphx-glr-backref-module-numpy sphx-glr-backref-type-py-class sphx-glr-backref-instance"><span class="n">events</span></a><span class="p">[:,</span> <span class="mi">2</span><span class="p">]</span> <span class="o">></span> <span class="mi">2</span><span class="p">)</span> </pre></div> </div> <div class="sphx-glr-script-out highlight-none notranslate"><div class="highlight"><pre><span></span> 0%| | Fitting GeneralizingEstimator : 0/35 [00:00<?, ?it/s] 6%|▌ | Fitting GeneralizingEstimator : 2/35 [00:00<00:00, 55.20it/s] 11%|█▏ | Fitting GeneralizingEstimator : 4/35 [00:00<00:00, 57.31it/s] 17%|█▋ | Fitting GeneralizingEstimator : 6/35 [00:00<00:00, 58.04it/s] 23%|██▎ | Fitting GeneralizingEstimator : 8/35 [00:00<00:00, 58.39it/s] 31%|███▏ | Fitting GeneralizingEstimator : 11/35 [00:00<00:00, 65.10it/s] 40%|████ | Fitting GeneralizingEstimator : 14/35 [00:00<00:00, 69.60it/s] 49%|████▊ | Fitting GeneralizingEstimator : 17/35 [00:00<00:00, 72.78it/s] 54%|█████▍ | Fitting GeneralizingEstimator : 19/35 [00:00<00:00, 70.81it/s] 63%|██████▎ | Fitting GeneralizingEstimator : 22/35 [00:00<00:00, 73.27it/s] 69%|██████▊ | Fitting GeneralizingEstimator : 24/35 [00:00<00:00, 70.83it/s] 74%|███████▍ | Fitting GeneralizingEstimator : 26/35 [00:00<00:00, 69.53it/s] 83%|████████▎ | Fitting GeneralizingEstimator : 29/35 [00:00<00:00, 71.59it/s] 89%|████████▊ | Fitting GeneralizingEstimator : 31/35 [00:00<00:00, 70.36it/s] 97%|█████████▋| Fitting GeneralizingEstimator : 34/35 [00:00<00:00, 72.07it/s] 100%|██████████| Fitting GeneralizingEstimator : 35/35 [00:00<00:00, 71.61it/s] 100%|██████████| Fitting GeneralizingEstimator : 35/35 [00:00<00:00, 70.90it/s] </pre></div> </div> <div class="output_subarea output_html rendered_html output_result"> <style>#sk-container-id-1 { /* Definition of color scheme common for light and dark mode */ --sklearn-color-text: #000; --sklearn-color-text-muted: #666; --sklearn-color-line: gray; /* Definition of color scheme for unfitted estimators */ --sklearn-color-unfitted-level-0: #fff5e6; --sklearn-color-unfitted-level-1: #f6e4d2; --sklearn-color-unfitted-level-2: #ffe0b3; --sklearn-color-unfitted-level-3: chocolate; /* Definition of color scheme for fitted estimators */ --sklearn-color-fitted-level-0: #f0f8ff; --sklearn-color-fitted-level-1: #d4ebff; --sklearn-color-fitted-level-2: #b3dbfd; --sklearn-color-fitted-level-3: cornflowerblue; /* Specific color for light theme */ --sklearn-color-text-on-default-background: var(--sg-text-color, var(--theme-code-foreground, var(--jp-content-font-color1, black))); --sklearn-color-background: var(--sg-background-color, var(--theme-background, var(--jp-layout-color0, white))); --sklearn-color-border-box: var(--sg-text-color, var(--theme-code-foreground, var(--jp-content-font-color1, black))); --sklearn-color-icon: #696969; @media (prefers-color-scheme: dark) { /* Redefinition of color scheme for dark theme */ --sklearn-color-text-on-default-background: var(--sg-text-color, var(--theme-code-foreground, var(--jp-content-font-color1, white))); --sklearn-color-background: var(--sg-background-color, var(--theme-background, var(--jp-layout-color0, #111))); --sklearn-color-border-box: var(--sg-text-color, var(--theme-code-foreground, var(--jp-content-font-color1, white))); --sklearn-color-icon: #878787; } } #sk-container-id-1 { color: var(--sklearn-color-text); } #sk-container-id-1 pre { padding: 0; } #sk-container-id-1 input.sk-hidden--visually { border: 0; clip: rect(1px 1px 1px 1px); clip: rect(1px, 1px, 1px, 1px); height: 1px; margin: -1px; overflow: hidden; padding: 0; position: absolute; width: 1px; } #sk-container-id-1 div.sk-dashed-wrapped { border: 1px dashed var(--sklearn-color-line); margin: 0 0.4em 0.5em 0.4em; box-sizing: border-box; padding-bottom: 0.4em; background-color: var(--sklearn-color-background); } #sk-container-id-1 div.sk-container { /* jupyter's `normalize.less` sets `[hidden] { display: none; }` but bootstrap.min.css set `[hidden] { display: none !important; }` so we also need the `!important` here to be able to override the default hidden behavior on the sphinx rendered scikit-learn.org. See: https://github.com/scikit-learn/scikit-learn/issues/21755 */ display: inline-block !important; position: relative; } #sk-container-id-1 div.sk-text-repr-fallback { display: none; } div.sk-parallel-item, div.sk-serial, div.sk-item { /* draw centered vertical line to link estimators */ background-image: linear-gradient(var(--sklearn-color-text-on-default-background), var(--sklearn-color-text-on-default-background)); background-size: 2px 100%; background-repeat: no-repeat; background-position: center center; } /* Parallel-specific style estimator block */ #sk-container-id-1 div.sk-parallel-item::after { content: ""; width: 100%; border-bottom: 2px solid var(--sklearn-color-text-on-default-background); flex-grow: 1; } #sk-container-id-1 div.sk-parallel { display: flex; align-items: stretch; justify-content: center; background-color: var(--sklearn-color-background); position: relative; } #sk-container-id-1 div.sk-parallel-item { display: flex; flex-direction: column; } #sk-container-id-1 div.sk-parallel-item:first-child::after { align-self: flex-end; width: 50%; } #sk-container-id-1 div.sk-parallel-item:last-child::after { align-self: flex-start; width: 50%; } #sk-container-id-1 div.sk-parallel-item:only-child::after { width: 0; } /* Serial-specific style estimator block */ #sk-container-id-1 div.sk-serial { display: flex; flex-direction: column; align-items: center; background-color: var(--sklearn-color-background); padding-right: 1em; padding-left: 1em; } /* Toggleable style: style used for estimator/Pipeline/ColumnTransformer box that is clickable and can be expanded/collapsed. - Pipeline and ColumnTransformer use this feature and define the default style - Estimators will overwrite some part of the style using the `sk-estimator` class */ /* Pipeline and ColumnTransformer style (default) */ #sk-container-id-1 div.sk-toggleable { /* Default theme specific background. It is overwritten whether we have a specific estimator or a Pipeline/ColumnTransformer */ background-color: var(--sklearn-color-background); } /* Toggleable label */ #sk-container-id-1 label.sk-toggleable__label { cursor: pointer; display: flex; width: 100%; margin-bottom: 0; padding: 0.5em; box-sizing: border-box; text-align: center; align-items: start; justify-content: space-between; gap: 0.5em; } #sk-container-id-1 label.sk-toggleable__label .caption { font-size: 0.6rem; font-weight: lighter; color: var(--sklearn-color-text-muted); } #sk-container-id-1 label.sk-toggleable__label-arrow:before { /* Arrow on the left of the label */ content: "▸"; float: left; margin-right: 0.25em; color: var(--sklearn-color-icon); } #sk-container-id-1 label.sk-toggleable__label-arrow:hover:before { color: var(--sklearn-color-text); } /* Toggleable content - dropdown */ #sk-container-id-1 div.sk-toggleable__content { max-height: 0; max-width: 0; overflow: hidden; text-align: left; /* unfitted */ background-color: var(--sklearn-color-unfitted-level-0); } #sk-container-id-1 div.sk-toggleable__content.fitted { /* fitted */ background-color: var(--sklearn-color-fitted-level-0); } #sk-container-id-1 div.sk-toggleable__content pre { margin: 0.2em; border-radius: 0.25em; color: var(--sklearn-color-text); /* unfitted */ background-color: var(--sklearn-color-unfitted-level-0); } #sk-container-id-1 div.sk-toggleable__content.fitted pre { /* unfitted */ background-color: var(--sklearn-color-fitted-level-0); } #sk-container-id-1 input.sk-toggleable__control:checked~div.sk-toggleable__content { /* Expand drop-down */ max-height: 200px; max-width: 100%; overflow: auto; } #sk-container-id-1 input.sk-toggleable__control:checked~label.sk-toggleable__label-arrow:before { content: "▾"; } /* Pipeline/ColumnTransformer-specific style */ #sk-container-id-1 div.sk-label input.sk-toggleable__control:checked~label.sk-toggleable__label { color: var(--sklearn-color-text); background-color: var(--sklearn-color-unfitted-level-2); } #sk-container-id-1 div.sk-label.fitted input.sk-toggleable__control:checked~label.sk-toggleable__label { background-color: var(--sklearn-color-fitted-level-2); } /* Estimator-specific style */ /* Colorize estimator box */ #sk-container-id-1 div.sk-estimator input.sk-toggleable__control:checked~label.sk-toggleable__label { /* unfitted */ background-color: var(--sklearn-color-unfitted-level-2); } #sk-container-id-1 div.sk-estimator.fitted input.sk-toggleable__control:checked~label.sk-toggleable__label { /* fitted */ background-color: var(--sklearn-color-fitted-level-2); } #sk-container-id-1 div.sk-label label.sk-toggleable__label, #sk-container-id-1 div.sk-label label { /* The background is the default theme color */ color: var(--sklearn-color-text-on-default-background); } /* On hover, darken the color of the background */ #sk-container-id-1 div.sk-label:hover label.sk-toggleable__label { color: var(--sklearn-color-text); background-color: var(--sklearn-color-unfitted-level-2); } /* Label box, darken color on hover, fitted */ #sk-container-id-1 div.sk-label.fitted:hover label.sk-toggleable__label.fitted { color: var(--sklearn-color-text); background-color: var(--sklearn-color-fitted-level-2); } /* Estimator label */ #sk-container-id-1 div.sk-label label { font-family: monospace; font-weight: bold; display: inline-block; line-height: 1.2em; } #sk-container-id-1 div.sk-label-container { text-align: center; } /* Estimator-specific */ #sk-container-id-1 div.sk-estimator { font-family: monospace; border: 1px dotted var(--sklearn-color-border-box); border-radius: 0.25em; box-sizing: border-box; margin-bottom: 0.5em; /* unfitted */ background-color: var(--sklearn-color-unfitted-level-0); } #sk-container-id-1 div.sk-estimator.fitted { /* fitted */ background-color: var(--sklearn-color-fitted-level-0); } /* on hover */ #sk-container-id-1 div.sk-estimator:hover { /* unfitted */ background-color: var(--sklearn-color-unfitted-level-2); } #sk-container-id-1 div.sk-estimator.fitted:hover { /* fitted */ background-color: var(--sklearn-color-fitted-level-2); } /* Specification for estimator info (e.g. "i" and "?") */ /* Common style for "i" and "?" */ .sk-estimator-doc-link, a:link.sk-estimator-doc-link, a:visited.sk-estimator-doc-link { float: right; font-size: smaller; line-height: 1em; font-family: monospace; background-color: var(--sklearn-color-background); border-radius: 1em; height: 1em; width: 1em; text-decoration: none !important; margin-left: 0.5em; text-align: center; /* unfitted */ border: var(--sklearn-color-unfitted-level-1) 1pt solid; color: var(--sklearn-color-unfitted-level-1); } .sk-estimator-doc-link.fitted, a:link.sk-estimator-doc-link.fitted, a:visited.sk-estimator-doc-link.fitted { /* fitted */ border: var(--sklearn-color-fitted-level-1) 1pt solid; color: var(--sklearn-color-fitted-level-1); } /* On hover */ div.sk-estimator:hover .sk-estimator-doc-link:hover, .sk-estimator-doc-link:hover, div.sk-label-container:hover .sk-estimator-doc-link:hover, .sk-estimator-doc-link:hover { /* unfitted */ background-color: var(--sklearn-color-unfitted-level-3); color: var(--sklearn-color-background); text-decoration: none; } div.sk-estimator.fitted:hover .sk-estimator-doc-link.fitted:hover, .sk-estimator-doc-link.fitted:hover, div.sk-label-container:hover .sk-estimator-doc-link.fitted:hover, .sk-estimator-doc-link.fitted:hover { /* fitted */ background-color: var(--sklearn-color-fitted-level-3); color: var(--sklearn-color-background); text-decoration: none; } /* Span, style for the box shown on hovering the info icon */ .sk-estimator-doc-link span { display: none; z-index: 9999; position: relative; font-weight: normal; right: .2ex; padding: .5ex; margin: .5ex; width: min-content; min-width: 20ex; max-width: 50ex; color: var(--sklearn-color-text); box-shadow: 2pt 2pt 4pt #999; /* unfitted */ background: var(--sklearn-color-unfitted-level-0); border: .5pt solid var(--sklearn-color-unfitted-level-3); } .sk-estimator-doc-link.fitted span { /* fitted */ background: var(--sklearn-color-fitted-level-0); border: var(--sklearn-color-fitted-level-3); } .sk-estimator-doc-link:hover span { display: block; } /* "?"-specific style due to the `<a>` HTML tag */ #sk-container-id-1 a.estimator_doc_link { float: right; font-size: 1rem; line-height: 1em; font-family: monospace; background-color: var(--sklearn-color-background); border-radius: 1rem; height: 1rem; width: 1rem; text-decoration: none; /* unfitted */ color: var(--sklearn-color-unfitted-level-1); border: var(--sklearn-color-unfitted-level-1) 1pt solid; } #sk-container-id-1 a.estimator_doc_link.fitted { /* fitted */ border: var(--sklearn-color-fitted-level-1) 1pt solid; color: var(--sklearn-color-fitted-level-1); } /* On hover */ #sk-container-id-1 a.estimator_doc_link:hover { /* unfitted */ background-color: var(--sklearn-color-unfitted-level-3); color: var(--sklearn-color-background); text-decoration: none; } #sk-container-id-1 a.estimator_doc_link.fitted:hover { /* fitted */ background-color: var(--sklearn-color-fitted-level-3); } </style><div id="sk-container-id-1" class="sk-top-container"><div class="sk-text-repr-fallback"><pre><GeneralizingEstimator(base_estimator=Pipeline(steps=[('standardscaler', StandardScaler()), ('logisticregression', LogisticRegression(solver='liblinear'))]), scoring='roc_auc', verbose=True, fitted with 35 estimators, fitted with 35 estimators></pre><b>In a Jupyter environment, please rerun this cell to show the HTML representation or trust the notebook. <br />On GitHub, the HTML representation is unable to render, please try loading this page with nbviewer.org.</b></div><div class="sk-container" hidden><div class="sk-item sk-dashed-wrapped"><div class="sk-label-container"><div class="sk-label fitted sk-toggleable"><input class="sk-toggleable__control sk-hidden--visually" id="sk-estimator-id-1" type="checkbox" ><label for="sk-estimator-id-1" class="sk-toggleable__label fitted sk-toggleable__label-arrow"><div><div>GeneralizingEstimator</div></div><div><span class="sk-estimator-doc-link fitted">i<span>Fitted</span></span></div></label><div class="sk-toggleable__content fitted"><pre><GeneralizingEstimator(base_estimator=Pipeline(steps=[('standardscaler', StandardScaler()), ('logisticregression', LogisticRegression(solver='liblinear'))]), scoring='roc_auc', verbose=True, fitted with 35 estimators, fitted with 35 estimators></pre></div> </div></div><div class="sk-parallel"><div class="sk-parallel-item"><div class="sk-item"><div class="sk-label-container"><div class="sk-label fitted sk-toggleable"><input class="sk-toggleable__control sk-hidden--visually" id="sk-estimator-id-2" type="checkbox" ><label for="sk-estimator-id-2" class="sk-toggleable__label fitted sk-toggleable__label-arrow"><div><div>base_estimator: Pipeline</div></div></label><div class="sk-toggleable__content fitted"><pre>Pipeline(steps=[('standardscaler', StandardScaler()), ('logisticregression', LogisticRegression(solver='liblinear'))])</pre></div> </div></div><div class="sk-serial"><div class="sk-item"><div class="sk-serial"><div class="sk-item"><div class="sk-estimator fitted sk-toggleable"><input class="sk-toggleable__control sk-hidden--visually" id="sk-estimator-id-3" type="checkbox" ><label for="sk-estimator-id-3" class="sk-toggleable__label fitted sk-toggleable__label-arrow"><div><div>StandardScaler</div></div><div><a class="sk-estimator-doc-link fitted" rel="noreferrer" target="_blank" href="https://scikit-learn.org/1.6/modules/generated/sklearn.preprocessing.StandardScaler.html">?<span>Documentation for StandardScaler</span></a></div></label><div class="sk-toggleable__content fitted"><pre>StandardScaler()</pre></div> </div></div><div class="sk-item"><div class="sk-estimator fitted sk-toggleable"><input class="sk-toggleable__control sk-hidden--visually" id="sk-estimator-id-4" type="checkbox" ><label for="sk-estimator-id-4" class="sk-toggleable__label fitted sk-toggleable__label-arrow"><div><div>LogisticRegression</div></div><div><a class="sk-estimator-doc-link fitted" rel="noreferrer" target="_blank" href="https://scikit-learn.org/1.6/modules/generated/sklearn.linear_model.LogisticRegression.html">?<span>Documentation for LogisticRegression</span></a></div></label><div class="sk-toggleable__content fitted"><pre>LogisticRegression(solver='liblinear')</pre></div> </div></div></div></div></div></div></div></div></div></div></div> </div> <br /> <br /><p>Score on the epochs where the stimulus was presented to the right.</p> <div class="highlight-Python notranslate"><div class="highlight"><pre><span></span><a href="https://numpy.org/doc/stable/reference/generated/numpy.ndarray.html#numpy.ndarray" title="numpy.ndarray" class="sphx-glr-backref-module-numpy sphx-glr-backref-type-py-class sphx-glr-backref-instance"><span class="n">scores</span></a> <span class="o">=</span> <a href="../../generated/mne.decoding.GeneralizingEstimator.html#mne.decoding.GeneralizingEstimator.score" title="mne.decoding.GeneralizingEstimator.score" class="sphx-glr-backref-module-mne-decoding sphx-glr-backref-type-py-method"><span class="n">time_gen</span><span class="o">.</span><span class="n">score</span></a><span class="p">(</span> <span class="n">X</span><span class="o">=</span><a href="../../generated/mne.Epochs.html#mne.Epochs" title="mne.Epochs" class="sphx-glr-backref-module-mne sphx-glr-backref-type-py-class sphx-glr-backref-instance"><span class="n">epochs</span></a><span class="p">[</span><span class="s2">"Right"</span><span class="p">]</span><span class="o">.</span><span class="n">get_data</span><span class="p">(</span><span class="n">copy</span><span class="o">=</span><span class="kc">False</span><span class="p">),</span> <span class="n">y</span><span class="o">=</span><a href="../../generated/mne.Epochs.html#mne.Epochs" title="mne.Epochs" class="sphx-glr-backref-module-mne sphx-glr-backref-type-py-class sphx-glr-backref-instance"><span class="n">epochs</span></a><span class="p">[</span><span class="s2">"Right"</span><span class="p">]</span><span class="o">.</span><a href="https://numpy.org/doc/stable/reference/generated/numpy.ndarray.html#numpy.ndarray" title="numpy.ndarray" class="sphx-glr-backref-module-numpy sphx-glr-backref-type-py-class sphx-glr-backref-instance"><span class="n">events</span></a><span class="p">[:,</span> <span class="mi">2</span><span class="p">]</span> <span class="o">></span> <span class="mi">2</span> <span class="p">)</span> </pre></div> </div> <div class="sphx-glr-script-out highlight-none notranslate"><div class="highlight"><pre><span></span> 0%| | Scoring GeneralizingEstimator : 0/1225 [00:00<?, ?it/s] 1%| | Scoring GeneralizingEstimator : 11/1225 [00:00<00:03, 310.34it/s] 2%|▏ | Scoring GeneralizingEstimator : 21/1225 [00:00<00:03, 303.28it/s] 3%|▎ | Scoring GeneralizingEstimator : 33/1225 [00:00<00:03, 321.19it/s] 4%|▎ | Scoring GeneralizingEstimator : 45/1225 [00:00<00:03, 329.83it/s] 5%|▍ | Scoring GeneralizingEstimator : 56/1225 [00:00<00:03, 328.94it/s] 5%|▌ | Scoring GeneralizingEstimator : 67/1225 [00:00<00:03, 323.72it/s] 6%|▋ | Scoring GeneralizingEstimator : 79/1225 [00:00<00:03, 328.35it/s] 7%|▋ | Scoring GeneralizingEstimator : 90/1225 [00:00<00:03, 328.11it/s] 8%|▊ | Scoring GeneralizingEstimator : 102/1225 [00:00<00:03, 331.86it/s] 9%|▉ | Scoring GeneralizingEstimator : 113/1225 [00:00<00:03, 330.79it/s] 10%|█ | Scoring GeneralizingEstimator : 125/1225 [00:00<00:03, 333.57it/s] 11%|█ | Scoring GeneralizingEstimator : 135/1225 [00:00<00:03, 329.60it/s] 12%|█▏ | Scoring GeneralizingEstimator : 147/1225 [00:00<00:03, 331.09it/s] 13%|█▎ | Scoring GeneralizingEstimator : 159/1225 [00:00<00:03, 333.28it/s] 14%|█▍ | Scoring GeneralizingEstimator : 170/1225 [00:00<00:03, 332.57it/s] 15%|█▍ | Scoring GeneralizingEstimator : 181/1225 [00:00<00:03, 331.87it/s] 16%|█▌ | Scoring GeneralizingEstimator : 193/1225 [00:00<00:03, 333.97it/s] 17%|█▋ | Scoring GeneralizingEstimator : 204/1225 [00:00<00:03, 332.50it/s] 18%|█▊ | Scoring GeneralizingEstimator : 215/1225 [00:00<00:03, 331.94it/s] 19%|█▊ | Scoring GeneralizingEstimator : 227/1225 [00:00<00:02, 333.84it/s] 19%|█▉ | Scoring GeneralizingEstimator : 238/1225 [00:00<00:02, 333.30it/s] 20%|██ | Scoring GeneralizingEstimator : 250/1225 [00:00<00:02, 333.32it/s] 21%|██▏ | Scoring GeneralizingEstimator : 262/1225 [00:00<00:02, 334.99it/s] 22%|██▏ | Scoring GeneralizingEstimator : 273/1225 [00:00<00:02, 334.39it/s] 23%|██▎ | Scoring GeneralizingEstimator : 285/1225 [00:00<00:02, 335.89it/s] 24%|██▍ | Scoring GeneralizingEstimator : 297/1225 [00:00<00:02, 337.20it/s] 25%|██▌ | Scoring GeneralizingEstimator : 307/1225 [00:00<00:02, 334.55it/s] 26%|██▌ | Scoring GeneralizingEstimator : 319/1225 [00:00<00:02, 335.82it/s] 27%|██▋ | Scoring GeneralizingEstimator : 331/1225 [00:00<00:02, 336.91it/s] 28%|██▊ | Scoring GeneralizingEstimator : 343/1225 [00:01<00:02, 336.27it/s] 29%|██▉ | Scoring GeneralizingEstimator : 356/1225 [00:01<00:02, 339.12it/s] 30%|███ | Scoring GeneralizingEstimator : 368/1225 [00:01<00:02, 340.11it/s] 31%|███ | Scoring GeneralizingEstimator : 380/1225 [00:01<00:02, 340.94it/s] 32%|███▏ | Scoring GeneralizingEstimator : 392/1225 [00:01<00:02, 341.74it/s] 33%|███▎ | Scoring GeneralizingEstimator : 405/1225 [00:01<00:02, 344.38it/s] 34%|███▍ | Scoring GeneralizingEstimator : 418/1225 [00:01<00:02, 346.81it/s] 35%|███▌ | Scoring GeneralizingEstimator : 432/1225 [00:01<00:02, 350.71it/s] 36%|███▋ | Scoring GeneralizingEstimator : 446/1225 [00:01<00:02, 354.50it/s] 38%|███▊ | Scoring GeneralizingEstimator : 460/1225 [00:01<00:02, 357.57it/s] 39%|███▊ | Scoring GeneralizingEstimator : 473/1225 [00:01<00:02, 359.20it/s] 40%|███▉ | Scoring GeneralizingEstimator : 487/1225 [00:01<00:02, 362.40it/s] 41%|████ | Scoring GeneralizingEstimator : 501/1225 [00:01<00:01, 364.15it/s] 42%|████▏ | Scoring GeneralizingEstimator : 515/1225 [00:01<00:01, 367.04it/s] 43%|████▎ | Scoring GeneralizingEstimator : 527/1225 [00:01<00:01, 366.44it/s] 44%|████▍ | Scoring GeneralizingEstimator : 539/1225 [00:01<00:01, 365.88it/s] 45%|████▍ | Scoring GeneralizingEstimator : 547/1225 [00:01<00:01, 358.88it/s] 45%|████▌ | Scoring GeneralizingEstimator : 557/1225 [00:01<00:01, 349.42it/s] 46%|████▌ | Scoring GeneralizingEstimator : 566/1225 [00:01<00:01, 344.99it/s] 48%|████▊ | Scoring GeneralizingEstimator : 583/1225 [00:01<00:01, 345.57it/s] 49%|████▊ | Scoring GeneralizingEstimator : 595/1225 [00:01<00:01, 345.92it/s] 49%|████▉ | Scoring GeneralizingEstimator : 602/1225 [00:01<00:01, 338.49it/s] 50%|█████ | Scoring GeneralizingEstimator : 614/1225 [00:01<00:01, 339.08it/s] 51%|█████▏ | Scoring GeneralizingEstimator : 628/1225 [00:01<00:01, 342.16it/s] 52%|█████▏ | Scoring GeneralizingEstimator : 641/1225 [00:01<00:01, 344.42it/s] 53%|█████▎ | Scoring GeneralizingEstimator : 654/1225 [00:01<00:01, 346.44it/s] 54%|█████▍ | Scoring GeneralizingEstimator : 666/1225 [00:01<00:01, 346.92it/s] 55%|█████▌ | Scoring GeneralizingEstimator : 678/1225 [00:01<00:01, 347.39it/s] 56%|█████▋ | Scoring GeneralizingEstimator : 690/1225 [00:02<00:01, 347.87it/s] 57%|█████▋ | Scoring GeneralizingEstimator : 703/1225 [00:02<00:01, 349.84it/s] 58%|█████▊ | Scoring GeneralizingEstimator : 716/1225 [00:02<00:01, 350.37it/s] 59%|█████▉ | Scoring GeneralizingEstimator : 728/1225 [00:02<00:01, 350.67it/s] 60%|██████ | Scoring GeneralizingEstimator : 741/1225 [00:02<00:01, 351.30it/s] 61%|██████▏ | Scoring GeneralizingEstimator : 753/1225 [00:02<00:01, 351.51it/s] 62%|██████▏ | Scoring GeneralizingEstimator : 765/1225 [00:02<00:01, 351.69it/s] 63%|██████▎ | Scoring GeneralizingEstimator : 776/1225 [00:02<00:01, 350.41it/s] 64%|██████▍ | Scoring GeneralizingEstimator : 789/1225 [00:02<00:01, 352.13it/s] 65%|██████▌ | Scoring GeneralizingEstimator : 802/1225 [00:02<00:01, 353.78it/s] 67%|██████▋ | Scoring GeneralizingEstimator : 815/1225 [00:02<00:01, 355.38it/s] 68%|██████▊ | Scoring GeneralizingEstimator : 829/1225 [00:02<00:01, 358.45it/s] 69%|██████▊ | Scoring GeneralizingEstimator : 842/1225 [00:02<00:01, 359.82it/s] 70%|██████▉ | Scoring GeneralizingEstimator : 856/1225 [00:02<00:01, 361.28it/s] 71%|███████ | Scoring GeneralizingEstimator : 870/1225 [00:02<00:00, 363.82it/s] 72%|███████▏ | Scoring GeneralizingEstimator : 883/1225 [00:02<00:00, 364.86it/s] 73%|███████▎ | Scoring GeneralizingEstimator : 896/1225 [00:02<00:00, 365.93it/s] 74%|███████▍ | Scoring GeneralizingEstimator : 909/1225 [00:02<00:00, 366.94it/s] 75%|███████▌ | Scoring GeneralizingEstimator : 922/1225 [00:02<00:00, 367.88it/s] 76%|███████▋ | Scoring GeneralizingEstimator : 936/1225 [00:02<00:00, 370.19it/s] 77%|███████▋ | Scoring GeneralizingEstimator : 949/1225 [00:02<00:00, 370.98it/s] 79%|███████▊ | Scoring GeneralizingEstimator : 963/1225 [00:02<00:00, 373.08it/s] 80%|███████▉ | Scoring GeneralizingEstimator : 977/1225 [00:02<00:00, 375.23it/s] 81%|████████ | Scoring GeneralizingEstimator : 991/1225 [00:02<00:00, 375.74it/s] 82%|████████▏ | Scoring GeneralizingEstimator : 1004/1225 [00:02<00:00, 376.13it/s] 83%|████████▎ | Scoring GeneralizingEstimator : 1017/1225 [00:02<00:00, 376.65it/s] 84%|████████▍ | Scoring GeneralizingEstimator : 1030/1225 [00:02<00:00, 377.10it/s] 85%|████████▌ | Scoring GeneralizingEstimator : 1043/1225 [00:02<00:00, 377.56it/s] 86%|████████▌ | Scoring GeneralizingEstimator : 1056/1225 [00:02<00:00, 377.97it/s] 87%|████████▋ | Scoring GeneralizingEstimator : 1070/1225 [00:02<00:00, 379.88it/s] 88%|████████▊ | Scoring GeneralizingEstimator : 1083/1225 [00:03<00:00, 380.22it/s] 89%|████████▉ | Scoring GeneralizingEstimator : 1096/1225 [00:03<00:00, 380.51it/s] 91%|█████████ | Scoring GeneralizingEstimator : 1110/1225 [00:03<00:00, 382.29it/s] 92%|█████████▏| Scoring GeneralizingEstimator : 1124/1225 [00:03<00:00, 382.92it/s] 93%|█████████▎| Scoring GeneralizingEstimator : 1137/1225 [00:03<00:00, 383.09it/s] 94%|█████████▍| Scoring GeneralizingEstimator : 1151/1225 [00:03<00:00, 384.69it/s] 95%|█████████▌| Scoring GeneralizingEstimator : 1164/1225 [00:03<00:00, 384.68it/s] 96%|█████████▌| Scoring GeneralizingEstimator : 1177/1225 [00:03<00:00, 384.60it/s] 97%|█████████▋| Scoring GeneralizingEstimator : 1190/1225 [00:03<00:00, 384.66it/s] 98%|█████████▊| Scoring GeneralizingEstimator : 1204/1225 [00:03<00:00, 386.16it/s] 99%|█████████▉| Scoring GeneralizingEstimator : 1218/1225 [00:03<00:00, 386.03it/s] 100%|██████████| Scoring GeneralizingEstimator : 1225/1225 [00:03<00:00, 387.09it/s] 100%|██████████| Scoring GeneralizingEstimator : 1225/1225 [00:03<00:00, 362.36it/s] </pre></div> </div> <p>Plot</p> <div class="highlight-Python notranslate"><div class="highlight"><pre><span></span><a href="https://matplotlib.org/stable/api/_as_gen/matplotlib.figure.Figure.html#matplotlib.figure.Figure" title="matplotlib.figure.Figure" class="sphx-glr-backref-module-matplotlib-figure sphx-glr-backref-type-py-class sphx-glr-backref-instance"><span class="n">fig</span></a><span class="p">,</span> <a href="https://matplotlib.org/stable/api/_as_gen/matplotlib.axes.Axes.html#matplotlib.axes.Axes" title="matplotlib.axes.Axes" class="sphx-glr-backref-module-matplotlib-axes sphx-glr-backref-type-py-class sphx-glr-backref-instance"><span class="n">ax</span></a> <span class="o">=</span> <a href="https://matplotlib.org/stable/api/_as_gen/matplotlib.pyplot.subplots.html#matplotlib.pyplot.subplots" title="matplotlib.pyplot.subplots" class="sphx-glr-backref-module-matplotlib-pyplot sphx-glr-backref-type-py-function"><span class="n">plt</span><span class="o">.</span><span class="n">subplots</span></a><span class="p">(</span><span class="n">layout</span><span class="o">=</span><span class="s2">"constrained"</span><span class="p">)</span> <a href="https://matplotlib.org/stable/api/image_api.html#matplotlib.image.AxesImage" title="matplotlib.image.AxesImage" class="sphx-glr-backref-module-matplotlib-image sphx-glr-backref-type-py-class sphx-glr-backref-instance"><span class="n">im</span></a> <span class="o">=</span> <a href="https://matplotlib.org/stable/api/_as_gen/matplotlib.axes.Axes.matshow.html#matplotlib.axes.Axes.matshow" title="matplotlib.axes.Axes.matshow" class="sphx-glr-backref-module-matplotlib-axes sphx-glr-backref-type-py-method"><span class="n">ax</span><span class="o">.</span><span class="n">matshow</span></a><span class="p">(</span> <a href="https://numpy.org/doc/stable/reference/generated/numpy.ndarray.html#numpy.ndarray" title="numpy.ndarray" class="sphx-glr-backref-module-numpy sphx-glr-backref-type-py-class sphx-glr-backref-instance"><span class="n">scores</span></a><span class="p">,</span> <span class="n">vmin</span><span class="o">=</span><span class="mi">0</span><span class="p">,</span> <span class="n">vmax</span><span class="o">=</span><span class="mf">1.0</span><span class="p">,</span> <span class="n">cmap</span><span class="o">=</span><span class="s2">"RdBu_r"</span><span class="p">,</span> <span class="n">origin</span><span class="o">=</span><span class="s2">"lower"</span><span class="p">,</span> <span class="n">extent</span><span class="o">=</span><a href="../../generated/mne.Epochs.html#mne.Epochs.times" title="mne.Epochs.times" class="sphx-glr-backref-module-mne sphx-glr-backref-type-py-property"><span class="n">epochs</span><span class="o">.</span><span class="n">times</span></a><span class="p">[[</span><span class="mi">0</span><span class="p">,</span> <span class="o">-</span><span class="mi">1</span><span class="p">,</span> <span class="mi">0</span><span class="p">,</span> <span class="o">-</span><span class="mi">1</span><span class="p">]],</span> <span class="p">)</span> <a href="https://matplotlib.org/stable/api/_as_gen/matplotlib.axes.Axes.axhline.html#matplotlib.axes.Axes.axhline" title="matplotlib.axes.Axes.axhline" class="sphx-glr-backref-module-matplotlib-axes sphx-glr-backref-type-py-method"><span class="n">ax</span><span class="o">.</span><span class="n">axhline</span></a><span class="p">(</span><span class="mf">0.0</span><span class="p">,</span> <span class="n">color</span><span class="o">=</span><span class="s2">"k"</span><span class="p">)</span> <a href="https://matplotlib.org/stable/api/_as_gen/matplotlib.axes.Axes.axvline.html#matplotlib.axes.Axes.axvline" title="matplotlib.axes.Axes.axvline" class="sphx-glr-backref-module-matplotlib-axes sphx-glr-backref-type-py-method"><span class="n">ax</span><span class="o">.</span><span class="n">axvline</span></a><span class="p">(</span><span class="mf">0.0</span><span class="p">,</span> <span class="n">color</span><span class="o">=</span><span class="s2">"k"</span><span class="p">)</span> <a href="https://matplotlib.org/stable/api/_as_gen/matplotlib.axis.XAxis.set_ticks_position.html#matplotlib.axis.XAxis.set_ticks_position" title="matplotlib.axis.XAxis.set_ticks_position" class="sphx-glr-backref-module-matplotlib-axis sphx-glr-backref-type-py-method"><span class="n">ax</span><span class="o">.</span><span class="n">xaxis</span><span class="o">.</span><span class="n">set_ticks_position</span></a><span class="p">(</span><span class="s2">"bottom"</span><span class="p">)</span> <a href="https://matplotlib.org/stable/api/_as_gen/matplotlib.axes.Axes.set_xlabel.html#matplotlib.axes.Axes.set_xlabel" title="matplotlib.axes.Axes.set_xlabel" class="sphx-glr-backref-module-matplotlib-axes sphx-glr-backref-type-py-method"><span class="n">ax</span><span class="o">.</span><span class="n">set_xlabel</span></a><span class="p">(</span> <span class="s1">'Condition: "Right"</span><span class="se">\n</span><span class="s1">Testing Time (s)'</span><span class="p">,</span> <span class="p">)</span> <a href="https://matplotlib.org/stable/api/_as_gen/matplotlib.axes.Axes.set_ylabel.html#matplotlib.axes.Axes.set_ylabel" title="matplotlib.axes.Axes.set_ylabel" class="sphx-glr-backref-module-matplotlib-axes sphx-glr-backref-type-py-method"><span class="n">ax</span><span class="o">.</span><span class="n">set_ylabel</span></a><span class="p">(</span><span class="s1">'Condition: "Left"</span><span class="se">\n</span><span class="s1">Training Time (s)'</span><span class="p">)</span> <a href="https://matplotlib.org/stable/api/_as_gen/matplotlib.axes.Axes.set_title.html#matplotlib.axes.Axes.set_title" title="matplotlib.axes.Axes.set_title" class="sphx-glr-backref-module-matplotlib-axes sphx-glr-backref-type-py-method"><span class="n">ax</span><span class="o">.</span><span class="n">set_title</span></a><span class="p">(</span><span class="s2">"Generalization across time and condition"</span><span class="p">,</span> <span class="n">fontweight</span><span class="o">=</span><span class="s2">"bold"</span><span class="p">)</span> <a href="https://matplotlib.org/stable/api/_as_gen/matplotlib.figure.Figure.colorbar.html#matplotlib.figure.Figure.colorbar" title="matplotlib.figure.Figure.colorbar" class="sphx-glr-backref-module-matplotlib-figure sphx-glr-backref-type-py-method"><span class="n">fig</span><span class="o">.</span><span class="n">colorbar</span></a><span class="p">(</span><a href="https://matplotlib.org/stable/api/image_api.html#matplotlib.image.AxesImage" title="matplotlib.image.AxesImage" class="sphx-glr-backref-module-matplotlib-image sphx-glr-backref-type-py-class sphx-glr-backref-instance"><span class="n">im</span></a><span class="p">,</span> <a href="https://matplotlib.org/stable/api/_as_gen/matplotlib.axes.Axes.html#matplotlib.axes.Axes" title="matplotlib.axes.Axes" class="sphx-glr-backref-module-matplotlib-axes sphx-glr-backref-type-py-class sphx-glr-backref-instance"><span class="n">ax</span></a><span class="o">=</span><a href="https://matplotlib.org/stable/api/_as_gen/matplotlib.axes.Axes.html#matplotlib.axes.Axes" title="matplotlib.axes.Axes" class="sphx-glr-backref-module-matplotlib-axes sphx-glr-backref-type-py-class sphx-glr-backref-instance"><span class="n">ax</span></a><span class="p">,</span> <span class="n">label</span><span class="o">=</span><span class="s2">"Performance (ROC AUC)"</span><span class="p">)</span> <a href="https://matplotlib.org/stable/api/_as_gen/matplotlib.pyplot.show.html#matplotlib.pyplot.show" title="matplotlib.pyplot.show" class="sphx-glr-backref-module-matplotlib-pyplot sphx-glr-backref-type-py-function"><span class="n">plt</span><span class="o">.</span><span class="n">show</span></a><span class="p">()</span> </pre></div> </div> <img src="../../_images/sphx_glr_decoding_time_generalization_conditions_001.png" srcset="../../_images/sphx_glr_decoding_time_generalization_conditions_001.png" alt="Generalization across time and condition" class = "sphx-glr-single-img"/><section id="references"> <h2>References<a class="headerlink" href="#references" title="Link to this heading">#</a></h2> <div class="docutils container" id="id4"> <aside class="footnote-list brackets"> <aside class="footnote brackets" id="footcite-kingdehaene2014" role="doc-footnote"> <span class="label"><span class="fn-bracket">[</span><a role="doc-backlink" href="#id3">1</a><span class="fn-bracket">]</span></span> <p>Jean-Rémi King and Stanislas Dehaene. Characterizing the dynamics of mental representations: the temporal generalization method. <em>Trends in Cognitive Sciences</em>, 18(4):203–210, 2014. <a class="reference external" href="https://doi.org/10.1016/j.tics.2014.01.002">doi:10.1016/j.tics.2014.01.002</a>.</p> </aside> </aside> </div> <p class="sphx-glr-timing"><strong>Total running time of the script:</strong> (0 minutes 5.810 seconds)</p> <div class="sphx-glr-footer sphx-glr-footer-example docutils container" id="sphx-glr-download-auto-examples-decoding-decoding-time-generalization-conditions-py"> <div class="sphx-glr-download sphx-glr-download-jupyter docutils container"> <p><a class="reference download internal" download="" href="../../_downloads/00e78bba5d10188fcf003ef05e32a6f7/decoding_time_generalization_conditions.ipynb"><code class="xref download docutils literal notranslate"><span class="pre">Download</span> <span class="pre">Jupyter</span> <span class="pre">notebook:</span> <span class="pre">decoding_time_generalization_conditions.ipynb</span></code></a></p> </div> <div class="sphx-glr-download sphx-glr-download-python docutils container"> <p><a class="reference download internal" download="" href="../../_downloads/ab6282967a162922fd7405f0d8568e07/decoding_time_generalization_conditions.py"><code class="xref download docutils literal notranslate"><span class="pre">Download</span> <span class="pre">Python</span> <span class="pre">source</span> <span class="pre">code:</span> <span class="pre">decoding_time_generalization_conditions.py</span></code></a></p> </div> <div class="sphx-glr-download sphx-glr-download-zip docutils container"> <p><a class="reference download internal" download="" href="../../_downloads/4726ce22836f018022970d7eff07f389/decoding_time_generalization_conditions.zip"><code class="xref download docutils literal notranslate"><span class="pre">Download</span> <span class="pre">zipped:</span> <span class="pre">decoding_time_generalization_conditions.zip</span></code></a></p> </div> </div> <p class="sphx-glr-signature"><a class="reference external" href="https://sphinx-gallery.github.io">Gallery generated by Sphinx-Gallery</a></p> </section> </section> </div> <footer class="prev-next-footer d-print-none"> <div class="prev-next-area"> <a class="left-prev" href="decoding_spoc_CMC.html" title="previous page"> <i class="fa-solid fa-angle-left"></i> <div class="prev-next-info"> <p class="prev-next-subtitle">previous</p> <p class="prev-next-title">Continuous Target Decoding with SPoC</p> </div> </a> <a class="right-next" href="decoding_unsupervised_spatial_filter.html" title="next page"> <div class="prev-next-info"> <p class="prev-next-subtitle">next</p> <p class="prev-next-title">Analysis of evoked response using ICA and PCA reduction techniques</p> </div> <i class="fa-solid fa-angle-right"></i> </a> </div> </footer> </div> <dialog id="pst-secondary-sidebar-modal"></dialog> <div id="pst-secondary-sidebar" class="bd-sidebar-secondary bd-toc"><div class="sidebar-secondary-items sidebar-secondary__inner"> <div class="sidebar-secondary-item"> <div id="pst-page-navigation-heading-2" class="page-toc tocsection onthispage"> <i class="fa-solid fa-list"></i> On this page </div> <nav class="bd-toc-nav page-toc" aria-labelledby="pst-page-navigation-heading-2"> <ul class="visible nav section-nav flex-column"> <li class="toc-h2 nav-item toc-entry"><a class="reference internal nav-link" href="#references">References</a></li> </ul> </nav></div> </div></div> </div> <footer class="bd-footer-content"> </footer> </main> </div> </div> <!-- Scripts loaded after <body> so the DOM is not blocked --> <script defer src="../../_static/scripts/bootstrap.js?digest=8878045cc6db502f8baf"></script> <script defer src="../../_static/scripts/pydata-sphinx-theme.js?digest=8878045cc6db502f8baf"></script> <script src="https://mne.tools/versionwarning.js"></script> <footer class="bd-footer"> <div class="bd-footer__inner bd-page-width"> <div class="footer-items__start"> <div class="footer-item"><p class="text-center small">© Copyright 2012–2025, MNE Developers. Last updated <time datetime="2025-04-02T06:37:30.272415+00:00" class="localized">2025-04-02 06:37 UTC</time> <script type="text/javascript">$(function () { $("time.localized").each(function () { var el = $(this); el.text(new Date(el.attr("datetime")).toLocaleString([], {dateStyle: "medium", timeStyle: "long"})); }); } )</script></p></div> </div> <div class="footer-items__end"> <div class="footer-item"> <p class="theme-version"> <!-- # L10n: Setting the PST URL as an argument as this does not need to be localized --> Built with the <a href="https://pydata-sphinx-theme.readthedocs.io/en/stable/index.html">PyData Sphinx Theme</a> 0.16.1. </p></div> </div> </div> </footer> </body> </html>