How to use the mriqc.config.workflow function in mriqc

To help you get started, we’ve selected a few mriqc examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github poldracklab / mriqc / mriqc / workflows / anatomical.py View on Github external
# Harmonize
    homog = pe.Node(Harmonize(), name='harmonize')

    # Mortamet's QI2
    getqi2 = pe.Node(ComputeQI2(), name='ComputeQI2')

    # Compute python-coded measures
    measures = pe.Node(StructuralQC(), 'measures')

    # Project MNI segmentation to T1 space
    invt = pe.MapNode(ants.ApplyTransforms(
        dimension=3, default_value=0, interpolation='Linear',
        float=True),
        iterfield=['input_image'], name='MNItpms2t1')
    invt.inputs.input_image = [str(p) for p in get_template(
        config.workflow.template_id, suffix='probseg', resolution=1,
        label=['CSF', 'GM', 'WM'])]

    datasink = pe.Node(IQMFileSink(
        out_dir=config.execution.output_dir,
        dataset=config.execution.dsname),
        name='datasink', run_without_submitting=True)

    def _getwm(inlist):
        return inlist[-1]

    workflow.connect([
        (inputnode, meta, [('in_file', 'in_file')]),
        (inputnode, datasink, [('in_file', 'in_file'),
                               (('in_file', _get_mod), 'modality')]),
        (inputnode, addprov, [(('in_file', _get_mod), 'modality')]),
        (meta, datasink, [('subject', 'subject_id'),
github poldracklab / mriqc / mriqc / workflows / functional.py View on Github external
workflow.connect([
            (drop_trs, st_corr, [('out_file', 'in_file')]),
            (st_corr, gen_ref, [('out_file', 'in_file')]),
            (st_corr, hmc, [('out_file', 'in_file')]),
        ])

    elif config.workflow.despike and config.workflow.deoblique:

        workflow.connect([
            (drop_trs, despike_node, [('out_file', 'in_file')]),
            (despike_node, deoblique_node, [('out_file', 'in_file')]),
            (deoblique_node, gen_ref, [('out_file', 'in_file')]),
            (deoblique_node, hmc, [('out_file', 'in_file')]),
        ])

    elif config.workflow.despike:

        workflow.connect([
            (drop_trs, despike_node, [('out_file', 'in_file')]),
            (despike_node, gen_ref, [('out_file', 'in_file')]),
            (despike_node, hmc, [('out_file', 'in_file')]),
        ])

    elif config.workflow.deoblique:

        workflow.connect([
            (drop_trs, deoblique_node, [('out_file', 'in_file')]),
            (deoblique_node, gen_ref, [('out_file', 'in_file')]),
            (deoblique_node, hmc, [('out_file', 'in_file')]),
        ])

    else:
github poldracklab / mriqc / mriqc / workflows / functional.py View on Github external
Encapsulates nodes writing plots

    .. workflow::

        from mriqc.workflows.functional import individual_reports
        from mriqc.testing import mock_config
        with mock_config():
            wf = individual_reports()

    """
    from niworkflows.interfaces.plotting import FMRISummary
    from ..interfaces import PlotMosaic, PlotSpikes
    from ..interfaces.reports import IndividualReport

    verbose = config.execution.verbose_reports
    mem_gb = config.workflow.biggest_file_gb

    pages = 5
    extra_pages = int(verbose) * 4

    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'in_iqms', 'in_ras', 'hmc_epi', 'epi_mean', 'brainmask', 'hmc_fd', 'fd_thres', 'epi_parc',
        'in_dvars', 'in_stddev', 'outliers', 'in_spikes', 'in_fft',
        'mni_report', 'ica_report']),
        name='inputnode')

    # Set FD threshold
    inputnode.inputs.fd_thres = config.workflow.fd_thres

    spmask = pe.Node(niu.Function(
        input_names=['in_file', 'in_mask'], output_names=['out_file', 'out_plot'],
github poldracklab / mriqc / mriqc / workflows / functional.py View on Github external
])

    if config.workflow.fft_spikes_detector:
        mosaic_spikes = pe.Node(PlotSpikes(
            out_file='plot_spikes.svg', cmap='viridis',
            title='High-Frequency spikes'),
            name='PlotSpikes')

        workflow.connect([
            (inputnode, mosaic_spikes, [('in_ras', 'in_file'),
                                        ('in_spikes', 'in_spikes'),
                                        ('in_fft', 'in_fft')]),
            (mosaic_spikes, mplots, [('out_file', 'in4')])
        ])

    if config.workflow.ica:
        page_number = 4 + config.workflow.fft_spikes_detector
        workflow.connect([
            (inputnode, mplots, [('ica_report', 'in%d' % page_number)])
        ])

    if not verbose:
        return workflow

    mosaic_zoom = pe.Node(PlotMosaic(
        out_file='plot_anat_mosaic1_zoomed.svg',
        cmap='Greys_r'), name='PlotMosaicZoomed')

    mosaic_noise = pe.Node(PlotMosaic(
        out_file='plot_anat_mosaic2_noise.svg',
        only_noise=True, cmap='viridis_r'), name='PlotMosaicNoise')
github poldracklab / mriqc / mriqc / cli / parser.py View on Github external
all_subjects = config.execution.layout.get_subjects()
    if config.execution.participant_label is None:
        config.execution.participant_label = all_subjects

    participant_label = set(config.execution.participant_label)
    missing_subjects = participant_label - set(all_subjects)
    if missing_subjects:
        parser.error(
            "One or more participant labels were not found in the BIDS directory: "
            f"{', '.join(missing_subjects)}."
        )

    config.execution.participant_label = sorted(participant_label)

    # Handle analysis_level
    analysis_level = set(config.workflow.analysis_level)
    if not config.execution.participant_label:
        analysis_level.add("group")
    config.workflow.analysis_level = list(analysis_level)

    # List of files to be run
    bids_filters = {
        "participant_label": config.execution.participant_label,
        "session": config.execution.session_id,
        "run": config.execution.run_id,
        "task": config.execution.task_id,
        "bids_type": config.execution.modalities,
    }
    config.workflow.inputs = {
        mod: files
        for mod, files in collect_bids_data(
            config.execution.layout, **bids_filters
github poldracklab / mriqc / mriqc / workflows / functional.py View on Github external
base_directory=str(config.execution.output_dir),
        parameterization=False),
        name='dsplots', run_without_submitting=True)

    workflow.connect([
        (inputnode, rnode, [('in_iqms', 'in_iqms')]),
        (inputnode, mosaic_mean, [('epi_mean', 'in_file')]),
        (inputnode, mosaic_stddev, [('in_stddev', 'in_file')]),
        (mosaic_mean, mplots, [('out_file', 'in1')]),
        (mosaic_stddev, mplots, [('out_file', 'in2')]),
        (bigplot, mplots, [('out_file', 'in3')]),
        (mplots, rnode, [('out', 'in_plots')]),
        (rnode, dsplots, [('out_file', '@html_report')]),
    ])

    if config.workflow.fft_spikes_detector:
        mosaic_spikes = pe.Node(PlotSpikes(
            out_file='plot_spikes.svg', cmap='viridis',
            title='High-Frequency spikes'),
            name='PlotSpikes')

        workflow.connect([
            (inputnode, mosaic_spikes, [('in_ras', 'in_file'),
                                        ('in_spikes', 'in_spikes'),
                                        ('in_fft', 'in_fft')]),
            (mosaic_spikes, mplots, [('out_file', 'in4')])
        ])

    if config.workflow.ica:
        page_number = 4 + config.workflow.fft_spikes_detector
        workflow.connect([
            (inputnode, mplots, [('ica_report', 'in%d' % page_number)])
github poldracklab / mriqc / mriqc / workflows / anatomical.py View on Github external
"""
    One-subject-one-session-one-run pipeline to extract the NR-IQMs from
    anatomical images

    .. workflow::

        import os.path as op
        from mriqc.workflows.anatomical import anat_qc_workflow
        from mriqc.testing import mock_config
        with mock_config():
            wf = anat_qc_workflow()

    """
    from niworkflows.anat.skullstrip import afni_wf as skullstrip_wf

    dataset = config.workflow.inputs.get("T1w", []) \
        + config.workflow.inputs.get("T2w", [])

    config.loggers.workflow.info(f"""\
Building anatomical MRIQC workflow for files: {', '.join(dataset)}.""")

    # Initialize workflow
    workflow = pe.Workflow(name=name)

    # Define workflow, inputs and outputs
    # 0. Get data
    inputnode = pe.Node(niu.IdentityInterface(fields=['in_file']), name='inputnode')
    inputnode.iterables = [('in_file', dataset)]

    outputnode = pe.Node(niu.IdentityInterface(fields=['out_json']), name='outputnode')

    # 1. Reorient anatomical image
github poldracklab / mriqc / mriqc / interfaces / reports.py View on Github external
if self.inputs.modality in ("T1w", "T2w"):
            air_msk_size = (
                np.asanyarray(nb.load(self.inputs.air_msk).dataobj).astype(bool).sum()
            )
            rot_msk_size = (
                np.asanyarray(nb.load(self.inputs.rot_msk).dataobj).astype(bool).sum()
            )
            self._results["out_prov"]["warnings"] = {
                "small_air_mask": bool(air_msk_size < 5e5),
                "large_rot_frame": bool(rot_msk_size > 500),
            }

        if self.inputs.modality == "bold":
            self._results["out_prov"]["settings"].update(
                {
                    "fd_thres": config.workflow.fd_thres,
                }
            )

        return runtime
github poldracklab / mriqc / mriqc / workflows / functional.py View on Github external
from .utils import _tofloat
    from ..interfaces.transitional import GCOR

    mem_gb = config.workflow.biggest_file_gb

    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'in_file', 'in_ras',
        'epi_mean', 'brainmask', 'hmc_epi', 'hmc_fd', 'fd_thres', 'in_tsnr', 'metadata',
        'exclude_index']), name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(
        fields=['out_file', 'out_dvars', 'outliers', 'out_spikes', 'out_fft']),
        name='outputnode')

    # Set FD threshold
    inputnode.inputs.fd_thres = config.workflow.fd_thres

    # Compute DVARS
    dvnode = pe.Node(nac.ComputeDVARS(save_plot=False, save_all=True), name='ComputeDVARS',
                     mem_gb=mem_gb * 3)

    # AFNI quality measures
    fwhm_interface = get_fwhmx()
    fwhm = pe.Node(fwhm_interface, name='smoothness')
    # fwhm.inputs.acf = True  # add when AFNI >= 16
    outliers = pe.Node(afni.OutlierCount(fraction=True, out_file='outliers.out'),
                       name='outliers', mem_gb=mem_gb * 2.5)

    quality = pe.Node(afni.QualityIndex(automask=True), out_file='quality.out',
                      name='quality', mem_gb=mem_gb * 3)

    gcor = pe.Node(GCOR(), name='gcor', mem_gb=mem_gb * 2)