• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    迪恩网络公众号

Python engine.Workflow类代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了Python中nipype.pipeline.engine.Workflow的典型用法代码示例。如果您正苦于以下问题:Python Workflow类的具体用法?Python Workflow怎么用?Python Workflow使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。



在下文中一共展示了Workflow类的18个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。

示例1: create_normalize_pipeline

def create_normalize_pipeline(name='normalize'):
    # workflow
    normalize = Workflow(name='normalize')
    # Define nodes
    inputnode = Node(interface=util.IdentityInterface(fields=['epi_coreg',
                                                              'tr']),
                     name='inputnode')
    outputnode = Node(interface=util.IdentityInterface(fields=[
        'normalized_file']),
        name='outputnode')

    # time-normalize scans
    normalize_time = Node(util.Function(input_names=['in_file', 'tr'],
                                        output_names=['out_file'],
                                        function=time_normalizer),
                          name='normalize_time')
    normalize_time.plugin_args = {'submit_specs': 'request_memory = 17000'}
    normalize.connect([(inputnode, normalize_time, [('tr', 'tr')]),
                       (inputnode, normalize_time, [('epi_coreg', 'in_file')]),
                       (normalize_time, outputnode, [('out_file', 'normalized_file')])
                       ])

    # time-normalize scans    

    return normalize
开发者ID:fBeyer89,项目名称:LIFE_Lemon_mod_mod,代码行数:25,代码来源:normalize.py


示例2: create_reconall_pipeline

def create_reconall_pipeline(name='reconall'):
    
    reconall=Workflow(name='reconall')

    #inputnode 
    inputnode=Node(util.IdentityInterface(fields=['anat', 
                                                  'fs_subjects_dir',
                                                  'fs_subject_id'
                                                  ]),
                   name='inputnode')
    
    outputnode=Node(util.IdentityInterface(fields=['fs_subjects_dir',
                                                   'fs_subject_id']),
                    name='outputnode')
    
    # run reconall
    recon_all = create_skullstripped_recon_flow()
    
    
    # function to replace / in subject id string with a _
    def sub_id(sub_id):
        return sub_id.replace('/','_')
    
    reconall.connect([(inputnode, recon_all, [('fs_subjects_dir', 'inputspec.subjects_dir'),
                                              ('anat', 'inputspec.T1_files'),
                                              (('fs_subject_id', sub_id), 'inputspec.subject_id')]),
                      (recon_all, outputnode, [('outputspec.subject_id', 'fs_subject_id'),
                                               ('outputspec.subjects_dir', 'fs_subjects_dir')])
                      ])
    
    
    return reconall
开发者ID:JanisReinelt,项目名称:pipelines,代码行数:32,代码来源:reconall_noskullstrip.py


示例3: create_slice_timing_pipeline

def create_slice_timing_pipeline(name='slicetiming'):
    # set fsl output type
    fsl.FSLCommand.set_default_output_type('NIFTI')
    # initiate workflow
    slicetiming = Workflow(name='slicetiming')
    # inputnode
    inputnode = Node(util.IdentityInterface(fields=['ts'
                                                    ]),
                     name='inputnode')
    # outputnode
    outputnode = Node(util.IdentityInterface(fields=['ts_slicetcorrected'
                                                     ]),
                      name='outputnode')


    # use FSL slicetiming (default ascending bottom to top)
    timer = Node(fsl.SliceTimer(), name='timer')
    timer.inputs.time_repetition = 2.0

    slicetiming.connect([
        (inputnode, timer, [
            ('ts', 'in_file')]
         ),
        (timer, outputnode, [('slice_time_corrected_file', 'ts_slicetcorrected')]
         )
    ])

    return slicetiming
开发者ID:fBeyer89,项目名称:LIFE_Lemon_mod_mod,代码行数:28,代码来源:slicetiming_correction.py


示例4: create_smoothing_pipeline

def create_smoothing_pipeline(name='smoothing'):
    # set fsl output type
    fsl.FSLCommand.set_default_output_type('NIFTI')
    # initiate workflow
    smoothing = Workflow(name='smoothing')
    # inputnode
    inputnode=Node(util.IdentityInterface(fields=['ts_transformed',
    'fwhm'
    ]),
    name='inputnode')
    # outputnode
    outputnode=Node(util.IdentityInterface(fields=['ts_smoothed'
    ]),
    name='outputnode')
    
    
    #apply smoothing
    smooth = Node(fsl.Smooth(),name = 'smooth')
   
    
    smoothing.connect([
    (inputnode, smooth, [
    ('ts_transformed', 'in_file'),
    ('fwhm', 'fwhm')]
    ), 
    (smooth, outputnode, [('smoothed_file', 'ts_smoothed')]
    )
    ])
    
 



    
    return smoothing
开发者ID:fBeyer89,项目名称:LIFE_add_on_rsfMRI,代码行数:35,代码来源:smoothing.py


示例5: create_dcmconvert_pipeline

def create_dcmconvert_pipeline(name='dcmconvert'):
    
    from nipype.pipeline.engine import Node, Workflow
    import nipype.interfaces.utility as util
    from nipype.interfaces.dcmstack import DcmStack

    # workflow
    dcmconvert = Workflow(name='dcmconvert')
    
    #inputnode 
    inputnode=Node(util.IdentityInterface(fields=['dicoms',
                                                  'filename']),
                   name='inputnode')
    
    # outputnode                                     
    outputnode=Node(util.IdentityInterface(fields=['nifti']),
                    name='outputnode')
    
    # conversion node
    converter = Node(DcmStack(embed_meta=True),
                     name='converter')
    
    # connections
    dcmconvert.connect([(inputnode, converter, [('dicoms', 'dicom_files'),
                                                ('filename','out_format')]),
                        (converter, outputnode, [('out_file','nifti')])])
    
    return dcmconvert
开发者ID:JanisReinelt,项目名称:pipelines,代码行数:28,代码来源:dcmconvert.py


示例6: create_ants_registration_pipeline

def create_ants_registration_pipeline(name='ants_registration'):
    # set fsl output type
    fsl.FSLCommand.set_default_output_type('NIFTI_GZ')
    # initiate workflow
    ants_registration = Workflow(name='ants_registration')
    # inputnode
    inputnode=Node(util.IdentityInterface(fields=['denoised_ts',
    'ants_affine',
    'ants_warp',
    'ref'
    ]),
    name='inputnode')
    # outputnode
    outputnode=Node(util.IdentityInterface(fields=['ants_reg_ts',
    ]),
    name='outputnode')

    #also transform to mni space
    collect_transforms = Node(interface = util.Merge(2),name='collect_transforms')    
    
    ants_reg = Node(ants.ApplyTransforms(input_image_type = 3, dimension = 3, interpolation = 'Linear'), name='ants_reg')
    
    
    
    
    ants_registration.connect([
                          (inputnode, ants_reg, [('denoised_ts', 'input_image')]),
                          (inputnode, ants_reg, [('ref', 'reference_image')]),
                          (inputnode, collect_transforms, [('ants_affine', 'in1')]),
                          (inputnode, collect_transforms, [('ants_warp', 'in2')]),
                          (collect_transforms, ants_reg,  [('out', 'transforms')]),
                          (ants_reg, outputnode, [('output_image', 'ants_reg_ts')])
                          ])
                          
    return ants_registration
开发者ID:fBeyer89,项目名称:LIFE_add_on_rsfMRI,代码行数:35,代码来源:ants_registration.py


示例7: create_reconall_pipeline

def create_reconall_pipeline(name='reconall'):
    reconall = Workflow(name='reconall')
    # inputnode
    inputnode = Node(util.IdentityInterface(fields=['anat',
                                                    'fs_subjects_dir',
                                                    'fs_subject_id'
                                                    ]),
                     name='inputnode')
    outputnode = Node(util.IdentityInterface(fields=['fs_subjects_dir',
                                                     'fs_subject_id']),
                      name='outputnode')
    # run reconall
    recon_all = Node(fs.ReconAll(args='-autorecon2 -nuiterations 7 -no-isrunning -hippo-subfields'),
                     name="recon_all")
    # recon_all.inputs.directive= 'autorecon2-wm' # -autorecon3
    recon_all.plugin_args = {'submit_specs': 'request_memory = 9000'}
    # function to replace / in subject id string with a _
    def sub_id(sub_id):
        return sub_id.replace('/', '_')

    reconall.connect([(inputnode, recon_all, [('fs_subjects_dir', 'subjects_dir'),
                                              ('anat', 'T1_files'),
                                              (('fs_subject_id', sub_id), 'subject_id')]),
                      (recon_all, outputnode, [('subject_id', 'fs_subject_id'),
                                               ('subjects_dir', 'fs_subjects_dir')])
                      ])
    return reconall
开发者ID:fBeyer89,项目名称:LIFE_Lemon_mod_mod,代码行数:27,代码来源:reconall.py


示例8: __init__

    def __init__(self, ct_file_name, tmp_dir, chest_regions=None):
        Workflow.__init__(self, 'VesselParticlesWorkflow')

        assert ct_file_name.rfind('.') != -1, "Unrecognized CT file name format"
        
        self._tmp_dir = tmp_dir
        self._cid = ct_file_name[max([ct_file_name.rfind('/'), 0])+1:\
                                 ct_file_name.rfind('.')]

        if ct_file_name.rfind('/') != -1:
            self._dir = ct_file_name[0:ct_file_name.rfind('/')]
        else:
            self._dir = '.'

        if vessel_seeds_mask_file_name is None:
            self._vessel_seeds_mask_file_name = \
              os.path.join(self._dir, self._cid + CM._vesselSeedsMask)
        else:
            self._vessel_seeds_mask_file_name = vessel_seeds_mask_file_name
            
        generate_partial_lung_label_map = \
          pe.Node(interface=cip.GeneratePartialLungLabelMap(), 
                  name='generate_partial_lung_label_map')
        generate_partial_lung_label_map.inputs.ct = ct_file_name
        generate_partial_lung_label_map.inputs.
        
        extract_chest_label_map = \
          pe.Node(interface=cip.ExtractChestLabelMap(),
                  name='extract_chest_label_map')
        extract_chest_label_map.inputs.outFileName = 
        extract_chest_label_map.inputs.
开发者ID:151706061,项目名称:ChestImagingPlatform,代码行数:31,代码来源:vessel_particles_workflow.py


示例9: create_visualize_pipeline

def create_visualize_pipeline(name='visualize'):

    # initiate workflow
    visualize = Workflow(name='visualize')
    # inputnode
    inputnode=Node(util.IdentityInterface(fields=['ts_transformed',
    'mni_template'
    ]),
    name='inputnode')
    # outputnode
    outputnode=Node(util.IdentityInterface(fields=['output_image'
    ]),
    name='outputnode')
    
    
    #apply smoothing
    slicer = Node(fsl.Slicer(sample_axial=6, image_width=750),name = 'smooth')
   
    
    visualize.connect([
    (inputnode, slicer, [('ts_transformed', 'in_file'),('mni_template', 'image_edges')]),     
    (slicer, outputnode,[('out_file', 'output_image')])
    ])
    
 
   
    return visualize
开发者ID:fBeyer89,项目名称:LIFE_add_on_rsfMRI,代码行数:27,代码来源:visualize.py


示例10: create_mgzconvert_pipeline

def create_mgzconvert_pipeline(name='mgzconvert'):
    # workflow
    mgzconvert = Workflow(name='mgzconvert')
    # inputnode
    inputnode = Node(util.IdentityInterface(fields=['fs_subjects_dir', 'fs_subject_id']), name='inputnode')
    # outputnode
    outputnode = Node(util.IdentityInterface(fields=['anat_head',
                                                     'anat_brain',
                                                     'anat_brain_mask',
                                                     'wmseg',
                                                     'wmedge']),
                      name='outputnode')
    # import files from freesurfer
    fs_import = Node(interface=nio.FreeSurferSource(),
                     name='fs_import')
    # convert Freesurfer T1 file to nifti
    head_convert = Node(fs.MRIConvert(out_type='niigz',
                                      out_file='T1.nii.gz'),
                        name='head_convert')
    # create brainmask from aparc+aseg with single dilation
    def get_aparc_aseg(files):
        for name in files:
            if 'aparc+aseg' in name:
                return name

    # create brain by converting only freesurfer output
    brain_convert = Node(fs.MRIConvert(out_type='niigz',
                                       out_file='brain.nii.gz'),
                         name='brain_convert')
    brain_binarize = Node(fsl.ImageMaths(op_string='-bin -fillh', out_file='T1_brain_mask.nii.gz'), name='brain_binarize')

    # cortical and cerebellar white matter volumes to construct wm edge
    # [lh cerebral wm, lh cerebellar wm, rh cerebral wm, rh cerebellar wm, brain stem]
    wmseg = Node(fs.Binarize(out_type='nii.gz',
                             match=[2, 7, 41, 46, 16],
                             binary_file='T1_brain_wmseg.nii.gz'),
                 name='wmseg')
    # make edge from wmseg to visualize coregistration quality
    edge = Node(fsl.ApplyMask(args='-edge -bin',
                              out_file='T1_brain_wmedge.nii.gz'),
                name='edge')
    # connections
    mgzconvert.connect([(inputnode, fs_import, [('fs_subjects_dir', 'subjects_dir'),
                                                ('fs_subject_id', 'subject_id')]),
                        (fs_import, head_convert, [('T1', 'in_file')]),
                        (fs_import, wmseg, [(('aparc_aseg', get_aparc_aseg), 'in_file')]),
                        (fs_import, brain_convert, [('brainmask', 'in_file')]),
                        (wmseg, edge, [('binary_file', 'in_file'),
                                       ('binary_file', 'mask_file')]),
                        (head_convert, outputnode, [('out_file', 'anat_head')]),
                        (brain_convert, outputnode, [('out_file', 'anat_brain')]),
                        (brain_convert, brain_binarize, [('out_file', 'in_file')]),
                        (brain_binarize, outputnode, [('out_file', 'anat_brain_mask')]),
                        (wmseg, outputnode, [('binary_file', 'wmseg')]),
                        (edge, outputnode, [('out_file', 'wmedge')])
                        ])

    return mgzconvert
开发者ID:fBeyer89,项目名称:LIFE_Lemon_mod_mod,代码行数:58,代码来源:mgzconvert.py


示例11: __init__

    def __init__(self,name,input_fields=None,output_fields=None,**kwargs):
        Workflow.__init__(self,name=name,**kwargs)

        if input_fields:
            self.input_node = pe.Node(name = 'input',
                                      interface = util.IdentityInterface(fields=input_fields))
        if output_fields:
            self.output_node = pe.Node(name = 'output',
                                       interface = util.IdentityInterface(fields=output_fields))
开发者ID:afloren,项目名称:neurometrics,代码行数:9,代码来源:ml.py


示例12: ants_ct_wf

def ants_ct_wf(subjects_id,
            preprocessed_data_dir,
            working_dir,
            ds_dir,
            template_dir,
            plugin_name):
    import os
    from nipype import config
    from nipype.pipeline.engine import Node, Workflow, MapNode
    import nipype.interfaces.utility as util
    import nipype.interfaces.io as nio
    from nipype.interfaces.freesurfer.utils import ImageInfo



    #####################################
    # GENERAL SETTINGS
    #####################################
    wf = Workflow(name='ants_ct')
    wf.base_dir = os.path.join(working_dir)

    nipype_cfg = dict(logging=dict(workflow_level='DEBUG'), execution={'stop_on_first_crash': True,
                                                                       'remove_unnecessary_outputs': True,
                                                                       'job_finished_timeout': 120})
    config.update_config(nipype_cfg)
    wf.config['execution']['crashdump_dir'] = os.path.join(working_dir, 'crash')

    ds = Node(nio.DataSink(base_directory=ds_dir), name='ds')



    #####################################
    # GET DATA
    #####################################
    # GET SUBJECT SPECIFIC STRUCTURAL DATA
    in_data_templates = {
        't1w': '{subject_id}/raw_niftis/sMRI/t1w_reoriented.nii.gz',
    }

    in_data = Node(nio.SelectFiles(in_data_templates,
                                       base_directory=preprocessed_data_dir),
                       name="in_data")
    in_data.inputs.subject_id = subjects_id


    # GET NKI ANTs templates
    ants_templates_templates = {
        'brain_template': 'NKI/T_template.nii.gz',
        'brain_probability_mask': 'NKI/T_templateProbabilityMask.nii.gz',
        'segmentation_priors': 'NKI/Priors/*.nii.gz',
        't1_registration_template': 'NKI/T_template_BrainCerebellum.nii.gz'

    }

    ants_templates = Node(nio.SelectFiles(ants_templates_templates,
                                       base_directory=template_dir),
                       name="ants_templates")
开发者ID:fliem,项目名称:LeiCA,代码行数:57,代码来源:ants_ct.py


示例13: create_normalize_pipeline

def create_normalize_pipeline(name='normalize'):
    
    # workflow
    normalize=Workflow(name='normalize')
    
    # inputnode
    inputnode=Node(util.IdentityInterface(fields=['anat',
                                                  'standard']),
                   name='inputnode')
    
    # outputnode                                 
    outputnode=Node(util.IdentityInterface(fields=['anat2std_transforms',
                                                   'anat2std',
                                                   'std2anat_transforms',
                                                   'std2anat']),
                    name='outputnode')
    
    # normalization with ants
    antsreg= Node(ants.Registration(dimension=3,
                                    transforms=['Rigid','Affine','SyN'],
                                    metric=['MI','MI','CC'],
                                    metric_weight=[1,1,1],
                                    number_of_iterations=[[1000,500,250,100],[1000,500,250,100],[100,70,50,20]],
                                    convergence_threshold=[1e-6,1e-6,1e-6],
                                    convergence_window_size=[10,10,10],
                                    shrink_factors=[[8,4,2,1],[8,4,2,1],[8,4,2,1]],
                                    smoothing_sigmas=[[3,2,1,0],[3,2,1,0],[3,2,1,0]],
                                    sigma_units=['vox','vox','vox'],
                                    initial_moving_transform_com=1,
                                    transform_parameters=[(0.1,),(0.1,),(0.1,3.0,0.0)],
                                    sampling_strategy=['Regular', 'Regular', 'None'],
                                    sampling_percentage=[0.25,0.25,1],
                                    radius_or_number_of_bins=[32,32,4],
                                    num_threads=1,
                                    interpolation='Linear',
                                    winsorize_lower_quantile=0.005,
                                    winsorize_upper_quantile=0.995,
                                    collapse_output_transforms=True,
                                    output_inverse_warped_image=True,
                                    output_warped_image=True,
                                    use_histogram_matching=True,
                                    ),
                  name='antsreg')
       
    
    # connections
    normalize.connect([(inputnode, antsreg, [('anat', 'moving_image'),
                                             ('standard', 'fixed_image')]),
                       (antsreg, outputnode, [('forward_transforms', 'anat2std_transforms'),
                                              ('reverse_transforms', 'std2anat_transforms'),
                                              ('warped_image', 'anat2std'),
                                              ('inverse_warped_image', 'std2anat')])
                        ])
     
    return normalize
开发者ID:JanisReinelt,项目名称:pipelines,代码行数:55,代码来源:ants.py


示例14: create

    def create(self):  # , **kwargs):
        """ Create the nodes and connections for the workflow """
        # Preamble
        csvReader = CSVReader()
        csvReader.inputs.in_file = self.csv_file.default_value
        csvReader.inputs.header = self.hasHeader.default_value
        csvOut = csvReader.run()

        print(("=" * 80))
        print((csvOut.outputs.__dict__))
        print(("=" * 80))

        iters = OrderedDict()
        label = list(csvOut.outputs.__dict__.keys())[0]
        result = eval("csvOut.outputs.{0}".format(label))
        iters['tests'], iters['trains'] = subsample_crossValidationSet(result, self.sample_size.default_value)
        # Main event
        out_fields = ['T1', 'T2', 'Label', 'trainindex', 'testindex']
        inputsND = Node(interface=IdentityInterface(fields=out_fields),
                        run_without_submitting=True, name='inputs')
        inputsND.iterables = [('trainindex', iters['trains']),
                              ('testindex', iters['tests'])]
        if not self.hasHeader.default_value:
            inputsND.inputs.T1 = csvOut.outputs.column_0
            inputsND.inputs.Label = csvOut.outputs.column_1
            inputsND.inputs.T2 = csvOut.outputs.column_2
        else:
            inputsND.inputs.T1 = csvOut.outputs.__dict__['t1']
            inputsND.inputs.Label = csvOut.outputs.__dict__['label']
            inputsND.inputs.T2 = csvOut.outputs.__dict__['t2']
            pass  # TODO
        metaflow = Workflow(name='metaflow')
        metaflow.config['execution'] = {
            'plugin': 'Linear',
            'stop_on_first_crash': 'false',
            'stop_on_first_rerun': 'false',
        # This stops at first attempt to rerun, before running, and before deleting previous results.
            'hash_method': 'timestamp',
            'single_thread_matlab': 'true',  # Multi-core 2011a  multi-core for matrix multiplication.
            'remove_unnecessary_outputs': 'true',
            'use_relative_paths': 'false',  # relative paths should be on, require hash update when changed.
            'remove_node_directories': 'false',  # Experimental
            'local_hash_check': 'false'
        }

        metaflow.add_nodes([inputsND])
        """import pdb; pdb.set_trace()"""
        fusionflow = FusionLabelWorkflow()
        self.connect(
            [(metaflow, fusionflow, [('inputs.trainindex', 'trainT1s.index'), ('inputs.T1', 'trainT1s.inlist')]),
             (metaflow, fusionflow,
              [('inputs.trainindex', 'trainLabels.index'), ('inputs.Label', 'trainLabels.inlist')]),
             (metaflow, fusionflow, [('inputs.testindex', 'testT1s.index'), ('inputs.T1', 'testT1s.inlist')])
             ])
开发者ID:NIRALUser,项目名称:BRAINSTools,代码行数:54,代码来源:crossValidate.py


示例15: create_mp2rage_pipeline

def create_mp2rage_pipeline(name='mp2rage'):
    
    # workflow
    mp2rage = Workflow('mp2rage')
    
    # inputnode 
    inputnode=Node(util.IdentityInterface(fields=['inv2',
                                                  'uni',
                                                  't1map']),
               name='inputnode')
    
    # outputnode                                     
    outputnode=Node(util.IdentityInterface(fields=['uni_masked',
                                                   'background_mask',
                                                   'uni_stripped',
                                                   #'skullstrip_mask',
                                                   #'uni_reoriented'
                                                   ]),
                name='outputnode')
    
    # remove background noise
    background = Node(JistIntensityMp2rageMasking(outMasked=True,
                                            outMasked2=True,
                                            outSignal2=True), 
                      name='background')
    
    # skullstrip
    strip = Node(MedicAlgorithmSPECTRE2010(outStripped=True,
                                           outMask=True,
                                           outOriginal=True,
                                           inOutput='true',
                                           inFind='true',
                                           inMMC=4
                                           ), 
                 name='strip')
    
    # connections
    mp2rage.connect([(inputnode, background, [('inv2', 'inSecond'),
                                              ('t1map', 'inQuantitative'),
                                              ('uni', 'inT1weighted')]),
                     (background, strip, [('outMasked2','inInput')]),
                     (background, outputnode, [('outMasked2','uni_masked'),
                                               ('outSignal2','background_mask')]),
                    (strip, outputnode, [('outStripped','uni_stripped'),
                                         #('outMask', 'skullstrip_mask'),
                                         #('outOriginal','uni_reoriented')
                                         ])
                     ])
    
    
    return mp2rage
开发者ID:JanisReinelt,项目名称:pipelines,代码行数:51,代码来源:mp2rage_cbstools.py


示例16: create_brainextract_pipeline

def create_brainextract_pipeline(name='brainextract'):
    # workflow
    brainextract = Workflow(name='brainextract')
    #inputnode
    inputnode=Node(util.IdentityInterface(fields=['anat', 'fraction']),
                   name='inputnode')
    #outputnode
    outputnode=Node(util.IdentityInterface(fields=['anat_brain', 'anat_brain_mask']),
                    name='outputnode')
    #use bet brain extraction
    bet = Node(interface=fsl.BET(mask=True),
               name = 'bet')
  
    # connections
    brainextract.connect([(inputnode, bet, [('anat','in_file'),
    ('fraction', 'frac')]),
    (bet, outputnode, [('out_file', 'anat_brain')]),
    (bet, outputnode, [('mask_file', 'anat_brain_mask')])
    ])
    
    return brainextract
开发者ID:fBeyer89,项目名称:LIFE_rs_ICA_preprocessing,代码行数:21,代码来源:brainextract.py


示例17: smooth_data

def smooth_data(name = 'func_smoothed'):
    from nipype.pipeline.engine import Node, Workflow
    import nipype.interfaces.utility as util
    import nipype.interfaces.fsl as fsl

    flow        = Workflow(name)

    inputnode   = Node(util.IdentityInterface(fields=['func_data']),
                       name = 'inputnode')

    outputnode  =  Node(util.IdentityInterface(fields=['func_smoothed']),
                       name = 'outputnode')

    smooth      = Node(interface=fsl.Smooth(), name='func_smooth_fwhm_4')
    smooth.inputs.fwhm                 = 4.0
    smooth.terminal_output             = 'file'

    flow.connect(inputnode, 'func_data'      , smooth      , 'in_file'    )
    flow.connect(smooth,    'smoothed_file'  , outputnode  , 'func_smoothed'   )


    return flow
开发者ID:amadeuskanaan,项目名称:GluREST,代码行数:22,代码来源:smooth.py


示例18: func_preprocess

def func_preprocess(name = 'func_preproc'):

    '''
    Method to preprocess functional data after warping to anatomical space.

    Accomplished after one step Distortion Correction, Motion Correction and Boundary based linear registration to
    anatomical space.

    Precodure includes:
    # 1- skull strip
    # 2- Normalize the image intensity values.
    # 3- Calculate Mean of Skull stripped image
    # 4- Create brain mask from Normalized data.
    '''

    # Define Workflow
    flow        = Workflow(name=name)
    inputnode   = Node(util.IdentityInterface(fields=['func_in']),
                           name='inputnode')
    outputnode  = Node(util.IdentityInterface(fields=['func_preproc',
                                                      'func_preproc_mean',
                                                      'func_preproc_mask']),
                           name = 'outputnode')


    # 2- Normalize the image intensity values.
    norm                               = Node(interface = fsl.ImageMaths(),       name = 'func_normalized')
    norm.inputs.op_string              = '-ing 1000'
    norm.out_data_type                 = 'float'
    norm.output_type                   = 'NIFTI'

    # 4- Create brain mask from Normalized data.
    mask                               = Node(interface = fsl.BET(),  name = 'func_preprocessed')
    mask.inputs.functional             = True
    mask.inputs.mask                   = True
    mask.inputs.frac                   = 0.5
    mask.inputs.vertical_gradient      = 0
    mask.inputs.threshold              = True

    # 3- Calculate Mean of Skull stripped image
    mean                          = Node(interface = preprocess.TStat(),     name = 'func_preprocessed_mean')
    mean.inputs.options           = '-mean'
    mean.inputs.outputtype        = 'NIFTI'


    flow.connect( inputnode  ,   'func_in'           ,   norm,        'in_file'     )
    flow.connect( norm       ,   'out_file'          ,   mask,        'in_file'     )
    flow.connect( norm       ,   'out_file'          ,   mean,        'in_file'     )
    flow.connect( mask       ,   'out_file'          ,   outputnode,  'func_preproc')
    flow.connect( mask       ,   'mask_file'         ,   outputnode,  'func_preproc_mask')
    flow.connect( mean       ,   'out_file'          ,   outputnode,  'func_preproc_mean')

    return flow
开发者ID:amadeuskanaan,项目名称:GluREST,代码行数:53,代码来源:func_preprocess.py



注:本文中的nipype.pipeline.engine.Workflow类示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
Python testing.assert_equal函数代码示例发布时间:2022-05-27
下一篇:
Python engine.Node类代码示例发布时间:2022-05-27
热门推荐
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap