How to use the nimare.dataset.Dataset function in NiMARE

To help you get started, we’ve selected a few NiMARE examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github neurostuff / NiMARE / nimare / extract / extract.py View on Github external
Email address to use to call the PubMed API

    Returns
    -------
    dataset : :obj:`nimare.dataset.Dataset`
    """
    try:
        from Bio import Entrez, Medline
    except:
        raise Exception(
            'Module biopython is required for downloading abstracts from '
            'PubMed.')

    Entrez.email = email

    if isinstance(dataset, Dataset):
        pmids = dataset.coordinates['id'].astype(str).tolist()
        pmids = [pmid.split('-')[0] for pmid in pmids]
        pmids = sorted(list(set(pmids)))
    elif isinstance(dataset, list):
        pmids = [str(pmid) for pmid in dataset]
    else:
        raise Exception(
            'Dataset type not recognized: {0}'.format(type(dataset)))

    records = []
    # PubMed only allows you to search ~1000 at a time. I chose 900 to be safe.
    chunks = [pmids[x: x + 900] for x in range(0, len(pmids), 900)]
    for i, chunk in enumerate(chunks):
        LGR.info('Downloading chunk {0} of {1}'.format(i + 1, len(chunks)))
        h = Entrez.efetch(db='pubmed', id=chunk, rettype='medline',
                          retmode='text')
github neurostuff / NiMARE / nimare / workflows / scale.py View on Github external
def scale_workflow(dataset_file, baseline, output_dir=None, prefix=None,
                   n_iters=2500, v_thr=0.001):
    """
    Perform SCALE meta-analysis from Sleuth text file or NiMARE json file.

    Warnings
    --------
    This method is not yet implemented.
    """
    if dataset_file.endswith('.json'):
        dset = Dataset(dataset_file, target='mni152_2mm')
    if dataset_file.endswith('.txt'):
        dset = convert_sleuth_to_dataset(dataset_file, target='mni152_2mm')

    boilerplate = """
A specific coactivation likelihood estimation (SCALE; Langner et al., 2014)
meta-analysis was performed using NiMARE. The input dataset included {n}
studies/experiments.

Voxel-specific null distributions were generated using base rates from {bl}
with {n_iters} iterations. Results were thresholded at p < {thr}.

References
----------
- Langner, R., Rottschy, C., Laird, A. R., Fox, P. T., & Eickhoff, S. B. (2014).
Meta-analytic connectivity modeling revisited: controlling for activation base
rates. NeuroImage, 99, 559-570.
github neurostuff / NiMARE / nimare / io.py View on Github external
-------
    :obj:`nimare.dataset.Dataset`
        Dataset object containing experiment information from text_file.
    """
    if isinstance(text_file, str):
        text_files = [text_file]
    elif isinstance(text_file, list):
        text_files = text_file
    else:
        raise ValueError('Unsupported type for parameter "text_file": '
                         '{0}'.format(type(text_file)))
    dict_ = {}
    for text_file in text_files:
        temp_dict = convert_sleuth_to_dict(text_file)
        dict_ = {**dict_, **temp_dict}
    return Dataset(dict_, target=target)
github neurostuff / NiMARE / nimare / workflows / macm.py View on Github external
def macm_workflow(dataset_file, mask_file, output_dir=None, prefix=None,
                  n_iters=10000, v_thr=0.001, n_cores=-1):
    """
    Perform MACM with ALE algorithm.
    """
    LGR.info('Loading coordinates...')
    dset = Dataset(dataset_file)
    sel_ids = dset.get_studies_by_mask(mask_file)

    # override sample size
    n_subs_db = dset.coordinates.drop_duplicates('id')['n'].astype(float).astype(int).sum()
    sel_coords = dset.coordinates.loc[dset.coordinates['id'].isin(sel_ids)]
    n_subs_sel = sel_coords.drop_duplicates('id')['n'].astype(float).astype(int).sum()
    LGR.info('{0} studies selected out of {1}.'.format(len(sel_ids),
                                                       len(dset.ids)))

    boilerplate = """
Meta-analytic connectivity modeling (MACM; Laird et al., 2009; Robinson et al.,
2009; Eickhoff et al., 2010) analysis was performed with the activation
likelihood estimation (ALE; Turkeltaub, Eden, Jones, & Zeffiro, 2002; Eickhoff,
Bzdok, Laird, Kurth, & Fox, 2012; Turkeltaub et al., 2012) meta-analysis
algorithm using NiMARE. The input dataset included {n_foci_db}
foci from {n_subs_db} participants across {n_exps_db} studies/experiments, from