How to use the sacred.observers.MongoObserver function in sacred

To help you get started, we’ve selected a few sacred examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github kata-ai / indosum / utils.py View on Github external
def setup_mongo_observer(ex):
    mongo_url = os.getenv('SACRED_MONGO_URL')
    db_name = os.getenv('SACRED_DB_NAME')
    if mongo_url is not None and db_name is not None:
        ex.observers.append(MongoObserver.create(url=mongo_url, db_name=db_name))
github arthurmensch / modl / examples / old / task_predict_from_nii.py View on Github external
from modl.datasets.hcp import fetch_hcp, contrasts_description
from modl.utils.system import get_cache_dirs
from modl.decomposition.fmri import compute_loadings

import matplotlib.pyplot as plt

import sys
from os import path

sys.path.append(path.dirname(path.dirname
                             (path.dirname(path.abspath(__file__)))))

task_data_ing = Ingredient('task_data')
prediction_ex = Experiment('task_predict_from_nii', ingredients=[task_data_ing])

observer = MongoObserver.create(db_name='amensch', collection='runs')
prediction_ex.observers.append(observer)

observer = FileStorageObserver.create(expanduser('~/output/runs'))
prediction_ex.observers.append(observer)


@prediction_ex.config
def config():
    standardize = True
    C = np.logspace(-1, 2, 15)
    n_jobs = 20
    verbose = 10
    seed = 2
    max_iter = 10000
    tol = 1e-7
    transform_batch_size = 300
github arthurmensch / modl / examples / raw / unmask_rest.py View on Github external
from sacred import Experiment
from sacred.observers import MongoObserver

from modl.input_data.fmri.monkey import monkey_patch_nifti_image

monkey_patch_nifti_image()

from sklearn.externals.joblib import Memory

from modl.datasets import fetch_adhd
from modl.datasets import fetch_hcp, get_data_dirs
from modl.input_data.fmri.unmask import create_raw_rest_data, get_raw_rest_data
from modl.utils.system import get_cache_dirs

unmask_rest = Experiment('unmask_rest')
observer = MongoObserver.create(db_name='amensch', collection='runs')
unmask_rest.observers.append(observer)


@unmask_rest.config
def config():
    source = 'adhd'
    smoothing_fwhm = 6
    n_jobs = 3


@unmask_rest.named_config
def hcp():
    source = 'hcp'
    smoothing_fwhm = 4
    n_jobs = 36
github arthurmensch / modl / examples / contrast / predict_contrast_multi_dataset.py View on Github external
import pandas as pd
from modl.classification import make_loadings_extractor
from modl.datasets import get_data_dirs
from modl.input_data.fmri.unmask import get_raw_contrast_data
from modl.utils.system import get_cache_dirs
from sacred import Experiment
from sacred.observers import MongoObserver
from sklearn.externals.joblib import Memory
from sklearn.externals.joblib import dump
from sklearn.linear_model import LogisticRegressionCV
from sklearn.model_selection import train_test_split
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import LabelEncoder

predict_contrast = Experiment('predict_contrast')
observer = MongoObserver.create(db_name='amensch', collection='runs')
predict_contrast.observers.append(observer)

@predict_contrast.config
def config():
    alphas = np.logspace(-3, 3, 7).tolist()
    standardize = True
    scale_importance = 'sqrt'
    n_jobs = 30
    verbose = 2
    seed = 2
    max_iter = 200
    tol = 1e-7
    alpha = 1e-4
    multi_class = 'multinomial'
    fit_intercept = True
    identity = False
github arthurmensch / cogspaces / examples / multiple / predict_contrast_train_size.py View on Github external
from sacred import Experiment
from sacred.observers import MongoObserver
from sacred.optional import pymongo
from sklearn.externals.joblib import Parallel
from sklearn.externals.joblib import delayed
from sklearn.utils import check_random_state, shuffle

sys.path.append(path.dirname(path.dirname
                             (path.dirname(path.abspath(__file__)))))

from examples.predict_contrast import predict_contrast_exp

predict_contrast_multi_exp = Experiment('predict_contrast_train_size',
                                        ingredients=[predict_contrast_exp])
collection = predict_contrast_multi_exp.path
observer = MongoObserver.create(db_name='amensch', collection=collection)
predict_contrast_multi_exp.observers.append(observer)


@predict_contrast_multi_exp.config
def config():
    n_jobs = 24
    n_seeds = 10
    seed = 2


def single_run(config_updates, _id, master_id):
    observer = MongoObserver.create(db_name='amensch', collection=collection)
    predict_contrast_exp.observers = [observer]

    @predict_contrast_exp.config
    def config():
github arthurmensch / modl / examples / contrast / multi_predict_contrast_factored_no_projection.py View on Github external
def single_run(config_updates, _id, master_id):
    observer = MongoObserver.create(db_name='amensch',
                                    collection=collection)

    @predict_contrast.config
    def config():
        n_jobs = 1
        from_loadings = True
        projection = False
        factored = True
        loadings_dir = join(get_data_dirs()[0], 'pipeline', 'contrast',
                            'reduced')
        verbose = 2
        max_iter = 50

    predict_contrast.observers = [observer]

    run = predict_contrast._create_run(config_updates=config_updates)
github pinae / Superresolution / train.py View on Github external
# -*- coding: utf-8 -*-
from network import Network
from PIL import Image
from scale import size, load_batches
import numpy as np
import os
import sys
from sacred import Experiment
from sacred.observers import MongoObserver
from sacred.utils import apply_backspaces_and_linefeeds
from gpu_helpers import init_all_gpu
init_all_gpu()

ex = Experiment('Superresolution', ingredients=[])
ex.observers.append(MongoObserver())
ex.captured_out_filter = apply_backspaces_and_linefeeds


@ex.config
def my_config():
    image_size = (320, 240)
    batch_size = 5
    no_epochs = 500
    lr = 0.0001
    lr_stair_width = 10
    lr_decay = 0.95


@ex.capture
def log_training_performance(_run, loss, lr):
    _run.log_scalar("loss", float(loss))
github arthurmensch / cogspaces / examples / multiple / predict_contrast_multinomial.py View on Github external
from sacred import Experiment
from sacred.observers import MongoObserver
from sacred.optional import pymongo
from sklearn.externals.joblib import Parallel
from sklearn.externals.joblib import delayed
from sklearn.utils import check_random_state, shuffle

sys.path.append(path.dirname(path.dirname
                             (path.dirname(path.abspath(__file__)))))

from examples.predict_contrast import predict_contrast_exp

predict_contrast_multi_exp = Experiment('predict_contrast_multinomial',
                                        ingredients=[predict_contrast_exp])
collection = predict_contrast_multi_exp.path
observer = MongoObserver.create(db_name='amensch', collection=collection)
predict_contrast_multi_exp.observers.append(observer)


@predict_contrast_multi_exp.config
def config():
    n_jobs = 10
    n_seeds = 10
    seed = 2


def single_run(config_updates, _id, master_id):
    observer = MongoObserver.create(db_name='amensch', collection=collection)
    predict_contrast_exp.observers = [observer]

    @predict_contrast_exp.config
    def config():
github arthurmensch / modl / examples / contrast / predict_contrast_multi.py View on Github external
def single_run(config_updates, _id, master_id):
    observer = MongoObserver.create(db_name='amensch', collection=collection)
    predict_contrast_exp.observers = [observer]

    @predict_contrast_exp.config
    def config():
        n_jobs = 1
        epochs = 100
        steps_per_epoch = 300
        dropout_input = 0.25
        dropout_latent = 0.5
        source = 'hcp_rs_concat'
        depth_prob = [0, 1., 0]
        shared_supervised = False
        batch_size = 256
        alpha = 1e-5
        validation = False
        mix_batch = False
github arthurmensch / modl / examples / unmask / unmask_contrast_hcp.py View on Github external
from nilearn.datasets import load_mni152_brain_mask
from sacred import Experiment
from sacred.observers import MongoObserver
from sklearn.externals.joblib import Memory

from modl.input_data.fmri.monkey import monkey_patch_nifti_image

monkey_patch_nifti_image()

from modl.datasets import get_data_dirs
from modl.datasets.hcp import fetch_hcp, INTERESTING_CONTRASTS_EXTENDED
from modl.input_data.fmri.unmask import create_raw_contrast_data

unmask_task = Experiment('unmask_contrast_hcp')
observer = MongoObserver.create(db_name='amensch', collection='runs')
unmask_task.observers.append(observer)


@unmask_task.config
def config():
    n_jobs = 30
    batch_size = 1200


@unmask_task.automain
def run(n_jobs, batch_size, _run):
    dataset = fetch_hcp()
    imgs = dataset.contrasts
    interesting_con = INTERESTING_CONTRASTS_EXTENDED
    imgs = imgs.loc[(slice(None), slice(None), interesting_con), :]
    mask = fetch_hcp().mask