How to use the nipype.interfaces.utility.Function function in nipype

To help you get started, we’ve selected a few nipype examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github aramis-lab / clinica / clinica / pipelines / dwi_preprocessing_phase_difference_fieldmap3 / dwi_preprocessing_phase_difference_fieldmap3_utils.py View on Github external
getb0 = pe.Node(fsl.ExtractROI(t_min=0, t_size=1), name='get_b0')

    pick_dws = pe.Node(niu.Function(
        input_names=['in_dwi', 'in_bval', 'b'], output_names=['out_file'],
        function=extract_bval), name='extract_dwi')
    pick_dws.inputs.b = 'diff'

    flirt = dwi_flirt(flirt_param=params, excl_nodiff=True)

    mult = pe.MapNode(fsl.BinaryMaths(operation='mul'), name='ModulateDWIs',
                      iterfield=['in_file', 'operand_value'])
    thres = pe.MapNode(fsl.Threshold(thresh=0.0), iterfield=['in_file'],
                       name='RemoveNegative')

    split = pe.Node(fsl.Split(dimension='t'), name='SplitDWIs')
    get_mat = pe.Node(niu.Function(
        input_names=['in_bval', 'in_xfms'], output_names=['out_files'],
        function=recompose_xfm), name='GatherMatrices')
    merge = pe.Node(niu.Function(
        input_names=['in_dwi', 'in_bval', 'in_corrected'],
        output_names=['out_file'], function=recompose_dwi), name='MergeDWIs')

    merged_volumes = pe.Node(niu.Function(input_names=['in_file1', 'in_file2'], output_names=['out_file'], function=merge_volumes_tdim), name='merge_enhanced_ref_dwis')

    outputnode = pe.Node(niu.IdentityInterface(
        fields=['out_file', 'out_xfms']), name='outputnode')

    wf = pe.Workflow(name=name)
    wf.connect([
        (inputnode,  getb0,        [('in_file', 'in_file')]),
        (inputnode,  pick_dws,     [('in_file', 'in_dwi'),
                                    ('in_bval', 'in_bval')]),
github aramis-lab / clinica / clinica / workflows / dwi_preprocessing.py View on Github external
split = pe.Node(fsl.Split(dimension='t'), name='SplitDWIs')
    pick_ref = pe.Node(niu.Select(), name='Pick_b0')
    pick_ref.inputs.index = [0]

    flirt_b0_2_T1 = pe.Node(interface=fsl.FLIRT(dof=6), name = 'flirt_B0_2_T1')
    flirt_b0_2_T1.inputs.interp = "spline"
    flirt_b0_2_T1.inputs.cost = 'normmi'
    flirt_b0_2_T1.inputs.cost_func = 'normmi'

    apply_xfm = pe.Node(interface=fsl.ApplyXfm(), name='apply_xfm')
    apply_xfm.inputs.apply_xfm = True

    expend_matrix = pe.Node(interface=niu.Function(input_names=['in_matrix', 'in_bvec'], output_names=['out_matrix_list'], function=expend_matrix_list), name='expend_matrix')

    rot_bvec = pe.Node(niu.Function(input_names=['in_matrix','in_bvec'],
                       output_names=['out_file'], function=rotate_bvecs),
                       name='Rotate_Bvec')

    antsRegistrationSyNQuick = pe.Node(interface=niu.Function(input_names=['fix_image', 'moving_image'], output_names=['image_warped', 'affine_matrix', 'warp', 'inverse_warped', 'inverse_warp'],
                                                              function=ants_registration_syn_quick), name='antsRegistrationSyNQuick')


    c3d_flirt2ants = pe.Node(c3.C3dAffineTool(), name='fsl_reg_2_itk')
    c3d_flirt2ants.inputs.itk_transform = True
    c3d_flirt2ants.inputs.fsl2ras = True

    change_transform = pe.Node(niu.Function(
            input_names=['input_affine_file'],
            output_names=['updated_affine_file'],
            function=change_itk_transform_type),
            name='change_transform_type')
github poldracklab / smriprep / smriprep / workflows / surfaces.py View on Github external
""".format(fs_ver=fs.Info().looseversion() or '')

    inputnode = pe.Node(
        niu.IdentityInterface(
            fields=['t1w', 't2w', 'flair', 'skullstripped_t1', 'corrected_t1', 'ants_segs',
                    'subjects_dir', 'subject_id']), name='inputnode')
    outputnode = pe.Node(
        niu.IdentityInterface(
            fields=['subjects_dir', 'subject_id', 't1w2fsnative_xfm',
                    'fsnative2t1w_xfm', 'surfaces', 'out_brainmask',
                    'out_aseg', 'out_aparc']),
        name='outputnode')

    recon_config = pe.Node(FSDetectInputs(hires_enabled=hires), name='recon_config')

    fov_check = pe.Node(niu.Function(function=_check_cw256), name='fov_check')

    autorecon1 = pe.Node(
        ReconAll(directive='autorecon1', openmp=omp_nthreads),
        name='autorecon1', n_procs=omp_nthreads, mem_gb=5)
    autorecon1.interface._can_resume = False
    autorecon1.interface._always_run = True

    skull_strip_extern = pe.Node(FSInjectBrainExtracted(), name='skull_strip_extern')

    fsnative2t1w_xfm = pe.Node(RobustRegister(auto_sens=True, est_int_scale=True),
                               name='fsnative2t1w_xfm')
    t1w2fsnative_xfm = pe.Node(LTAConvert(out_lta=True, invert=True),
                               name='t1w2fsnative_xfm')

    autorecon_resume_wf = init_autorecon_resume_wf(omp_nthreads=omp_nthreads)
    gifti_surface_wf = init_gifti_surface_wf()
github aramis-lab / clinica / clinica / pipeline / t1 / t1_freesurfer.py View on Github external
from clinica.iotools.utils.data_handling import create_subs_sess_list
    import os
    from clinica.pipeline.t1.t1_freesurfer_utils import bids_datagrabber, absolute_path

    if subjects_visits_tsv is None:
        try:
            os.makedirs(output_dir)
        except OSError as exception:
            if exception.errno != errno.EEXIST:  # if the error is not exist error, raise, otherwise, pass
                raise
        create_subs_sess_list(input_dir, output_dir)
        subjects_visits_tsv = os.path.join(output_dir, 'subjects_sessions_list.tsv')

    # Node to get the input vars
    inputnode = pe.Node(name='inputnode',
                          interface=Function(
                          input_names=['output_dir', 'subjects_visits_tsv'],
                          output_names=['nouse0', 'nouse1', 'nouse2', 'nouse3', 'subject_dir', 'subject_id', 'subject_list', 'session_list'],
                          function=get_dirs_check_reconalled))
    inputnode.inputs.output_dir = output_dir
    inputnode.inputs.subjects_visits_tsv = subjects_visits_tsv


    # BIDS DataGrabber
    # ===============
    datagrabbernode = pe.Node(name='datagrabbernode',
                              interface=Function(
                                  function=bids_datagrabber,
                                  input_names=['input_dir', 'subjects_list', 'sessions_list'],
                                  output_names=['anat_t1']))
    datagrabbernode.inputs.input_dir = input_dir
github nipreps / dmriprep / preafq / run_1.py View on Github external
wf.connect(vt2, "transformed_file", convert, "in_file")
    wf.connect(convert, "out_file", datasink, "preafq.anat.@aparc_aseg")

    convert1 = convert.clone("convertorig2nii")
    wf.connect(vt3, "transformed_file", convert1, "in_file")
    wf.connect(convert1, "out_file", datasink, "preafq.anat.@anat")

    def reportNodeFunc(dwi_corrected_file, eddy_rms, eddy_report,
                       color_fa_file, anat_mask_file):
        from preafq.qc import create_report_json

        report = create_report_json(dwi_corrected_file, eddy_rms, eddy_report,
                                    color_fa_file, anat_mask_file)
        return report

    reportNode = pe.Node(niu.Function(
        input_names=['dwi_corrected_file', 'eddy_rms',
                     'eddy_report', 'color_fa_file',
                     'anat_mask_file'],
        output_names=['report'],
        function=reportNodeFunc
    ), name="reportJSON")

    wf.connect(prep, "outputnode.out_file", reportNode, 'dwi_corrected_file')
    wf.connect(prep, "fsl_eddy.out_movement_rms", reportNode, 'eddy_rms')
    wf.connect(prep, "fsl_eddy.out_outlier_report", reportNode, 'eddy_report')
    wf.connect(threshold2, "binary_file", reportNode, 'anat_mask_file')
    wf.connect(get_tensor, "color_fa_file", reportNode, 'color_fa_file')

    wf.connect(reportNode, 'report', datasink, 'preafq.report.@report')

    wf.run()
github aramis-lab / clinica / clinica / pipeline / t1 / t1_spm_workflows.py View on Github external
This will allow to process each pair of files in parallel.
        :param native_space_images: list of lists of native space images
        :param flowfield_files: list of flow fields files
        :return: expanded list of native images,list of flow fields files of the same length
        """

        native_files = [image for nat_class in native_space_images for image in nat_class]

        if len(native_files)%len(flowfield_files) != 0:
            raise ValueError('Length of the list of native space images is not a multiple of the length of the list of flow fields images')

        ffield_files = flowfield_files * int(len(native_files)/len(flowfield_files))

        return native_files, ffield_files

    dartel2mni_input = npe.Node(niu.Function(input_names=['native_space_images', 'flowfield_files'],
                         output_names=['native_files', 'ffield_files'],
                         function=prepare_dartel2mni_input),
                name='dartel2mni_input')

    # DARTEL2MNI
    dartel2mni = npe.MapNode(spm.DARTELNorm2MNI(), name='dartel2MNI', iterfield=['apply_to_files', 'flowfield_files'])

    if bounding_box is not None:
        dartel2mni.inputs.bounding_box = bounding_box
    if voxel_size is not None:
        dartel2mni.inputs.voxel_size = voxel_size

    #Modulation
    dartel2mni.inputs.modulate = modulate

    #Smoothing
github poldracklab / fitlins / fitlins / workflows / base.py View on Github external
else:
        from ..interfaces.nistats import FirstLevelModel
    l1_model = pe.MapNode(
        FirstLevelModel(),
        iterfield=['design_matrix', 'contrast_info', 'bold_file', 'mask_file'],
        mem_gb=3,
        name='l1_model')

    def _deindex(tsv):
        from pathlib import Path
        import pandas as pd
        out_tsv = str(Path.cwd() / Path(tsv).name)
        pd.read_csv(tsv, sep='\t', index_col=0).to_csv(out_tsv, sep='\t', index=False)
        return out_tsv

    deindex_tsv = pe.MapNode(niu.Function(function=_deindex),
                             iterfield=['tsv'], name='deindex_tsv')

    # Set up common patterns
    image_pattern = 'reports/[sub-{subject}/][ses-{session}/]figures/[run-{run}/]' \
        '[sub-{subject}_][ses-{session}_]task-{task}[_acq-{acquisition}]' \
        '[_rec-{reconstruction}][_run-{run}][_echo-{echo}]_' \
        '{suffix}.svg'
    contrast_plot_pattern = 'reports/[sub-{subject}/][ses-{session}/]figures/[run-{run}/]' \
        '[sub-{subject}_][ses-{session}_]task-{task}[_acq-{acquisition}]' \
        '[_rec-{reconstruction}][_run-{run}][_echo-{echo}][_space-{space}]_' \
        'contrast-{contrast}_stat-{stat}_ortho.png'
    design_matrix_pattern = '[sub-{subject}/][ses-{session}/]' \
        '[sub-{subject}_][ses-{session}_]task-{task}[_acq-{acquisition}]' \
        '[_rec-{reconstruction}][_run-{run}][_echo-{echo}]_{suffix}.tsv'
    contrast_pattern = '[sub-{subject}/][ses-{session}/]' \
        '[sub-{subject}_][ses-{session}_]task-{task}[_acq-{acquisition}]' \
github nipy / nipype / _downloads / fmri_ants_openfmri.py View on Github external
"""

    wf = pe.Workflow(name='openfmri')
    wf.connect(infosource, 'subject_id', subjinfo, 'subject_id')
    wf.connect(infosource, 'model_id', subjinfo, 'model_id')
    wf.connect(infosource, 'task_id', subjinfo, 'task_id')
    wf.connect(infosource, 'subject_id', datasource, 'subject_id')
    wf.connect(infosource, 'model_id', datasource, 'model_id')
    wf.connect(infosource, 'task_id', datasource, 'task_id')
    wf.connect(subjinfo, 'run_id', datasource, 'run_id')
    wf.connect([(datasource, preproc, [('bold', 'inputspec.func')]),
                ])

    def get_highpass(TR, hpcutoff):
        return hpcutoff / (2 * TR)
    gethighpass = pe.Node(niu.Function(input_names=['TR', 'hpcutoff'],
                                       output_names=['highpass'],
                                       function=get_highpass),
                          name='gethighpass')
    wf.connect(subjinfo, 'TR', gethighpass, 'TR')
    wf.connect(gethighpass, 'highpass', preproc, 'inputspec.highpass')

    """
    Setup a basic set of contrasts, a t-test per condition
    """

    def get_contrasts(contrast_file, task_id, conds):
        import numpy as np
        import os
        contrast_def = []
        if os.path.exists(contrast_file):
            with open(contrast_file, 'rt') as fp:
github IBT-FMI / SAMRI / samri / preprocessing.py View on Github external
if not structural_scan_types:
			 structural_scan_types = list(scan_classification[(scan_classification["categories"] == "structural")]["scan_type"])

	# define measurement directories to be processed, and populate the list either with the given include_measurements, or with an intelligent selection
	scan_types = functional_scan_types[:]
	scan_types.extend(structural_scan_types)
	data_selection=get_data_selection(measurements_base, conditions, scan_types=scan_types, subjects=subjects, exclude_subjects=exclude_subjects, measurements=measurements, exclude_measurements=exclude_measurements)
	if not subjects:
		subjects = set(list(data_selection["subject"]))
	if not conditions:
		conditions = set(list(data_selection["condition"]))

	infosource = pe.Node(interface=util.IdentityInterface(fields=['condition','subject']), name="infosource")
	infosource.iterables = [('condition',conditions), ('subject',subjects)]

	get_functional_scan = pe.Node(name='get_functional_scan', interface=util.Function(function=get_scan,input_names=["measurements_base","data_selection","condition","subject","scan_type"], output_names=['scan_path','scan_type']))
	get_functional_scan.inputs.data_selection = data_selection
	get_functional_scan.inputs.measurements_base = measurements_base
	get_functional_scan.iterables = ("scan_type", functional_scan_types)

	functional_bru2nii = pe.Node(interface=Bru2(), name="functional_bru2nii")
	functional_bru2nii.inputs.actual_size=actual_size

	if structural_scan_types:
		get_structural_scan = pe.Node(name='get_structural_scan', interface=util.Function(function=get_scan,input_names=["measurements_base","data_selection","condition","subject","scan_type"], output_names=['scan_path','scan_type']))
		get_structural_scan.inputs.data_selection = data_selection
		get_structural_scan.inputs.measurements_base = measurements_base
		get_structural_scan.iterables = ("scan_type", structural_scan_types)

		structural_bru2nii = pe.Node(interface=Bru2(), name="structural_bru2nii")
		structural_bru2nii.inputs.force_conversion=True
		structural_bru2nii.inputs.actual_size=actual_size
github aramis-lab / clinica / clinica / pipelines / t1_freesurfer_longitudinal / t1_freesurfer_longitudinal_correction_pipeline.py View on Github external
#         processed/non-processed and corresponding sessions and
        #         CAPS locations)
        # ======
        receivefrom_template_node_name = '0_receivefrom_template'
        receivefrom_template_node = npe.Node(
            name=receivefrom_template_node_name,
            interface=nutil.IdentityInterface(
                fields=['unpcssd_sublist',
                        'pcssd_capstargetlist',
                        'overwrite_tsv'])
            )

        # check if cross-sectional pipeline run on all subjects
        checkinput_node_name = '1_check_input'
        checkinput_node = npe.Node(name=checkinput_node_name,
                                   interface=nutil.Function(
                                       input_names=['in_caps_dir',
                                                    'in_subject_list',
                                                    'in_session_list',
                                                    'in_unpcssd_sublist',
                                                    'in_pcssd_capstargetlist',
                                                    'in_overwrite_caps',
                                                    'in_working_directory',
                                                    'in_n_procs',
                                                    'in_overwrite_tsv'],
                                       output_names=['out_subject_list',
                                                     'out_session_list',
                                                     'out_caps_target_list',
                                                     'out_overwrite_warning'],
                                       function=utils.process_input_node))
        checkinput_node.inputs.in_caps_dir = self.caps_directory
        checkinput_node.inputs.in_subject_list = self.subjects