How to use the sacred.optional.pymongo function in sacred

To help you get started, we’ve selected a few sacred examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github arthurmensch / cogspaces / examples / multiple / predict_contrast_parameter.py View on Github external
latent_dim_list,
        batch_size_list,
        n_seeds, n_jobs, _run, _seed):
    seed_list = check_random_state(_seed).randint(np.iinfo(np.uint32).max,
                                                  size=n_seeds)
    param_grid = ParameterGrid(
        {'datasets': [['archi', 'hcp']],
         'dropout_latent': dropout_latent_list,
         'dropout_input': dropout_input_list,
         'batch_size': batch_size_list,
         'latent_dim': latent_dim_list,
         # Hack to iterate over seed first'
         'aseed': seed_list})

    # Robust labelling of experiments
    client = pymongo.MongoClient()
    database = client['amensch']
    c = database[collection].find({}, {'_id': 1})
    c = c.sort('_id', pymongo.DESCENDING).limit(1)
    c = c.next()['_id'] + 1 if c.count() else 1

    Parallel(n_jobs=n_jobs,
             verbose=10)(delayed(single_run)(config_updates, c + i, _run._id)
                         for i, config_updates in enumerate(param_grid))
github arthurmensch / cogspaces / examples / multiple / predict_contrast_multi.py View on Github external
'human_voice': None}
        transfer = [{'datasets': ['archi', 'hcp', 'brainomics', 'camcan'],
                     'geometric_reduction': True,
                     'latent_dim': 50,
                     'dropout_input': 0.25,
                     'dropout_latent': 0.5,
                     'train_size': train_size,
                     'optimizer': 'adam',
                     'seed': seed} for seed in seed_list]
        # exps += multinomial
        # exps += geometric_reduction
        # exps += latent_dropout
        exps += transfer

    # Robust labelling of experiments
    client = pymongo.MongoClient()
    database = client['amensch']
    c = database[collection].find({}, {'_id': 1})
    c = c.sort('_id', pymongo.DESCENDING).limit(1)
    c = c.next()['_id'] + 1 if c.count() else 1
    exps = shuffle(exps)


    Parallel(n_jobs=n_jobs,
             verbose=10)(delayed(single_run)(config_updates, c + i, _run._id)
                         for i, config_updates in enumerate(exps))
github arthurmensch / cogspaces / examples / multiple / predict_contrast_multinomial.py View on Github external
'geometric_reduction': True,
                         'latent_dim': 50,
                         'dropout_input': 0.25,
                         'dropout_latent': 0.5,
                         'optimizer': 'adam',
                         'seed': seed} for seed in seed_list]
            exps += multinomial
            # exps += geometric_reduction
            # exps += latent_dropout
            # exps += transfer

    # Robust labelling of experiments
    client = pymongo.MongoClient()
    database = client['amensch']
    c = database[collection].find({}, {'_id': 1})
    c = c.sort('_id', pymongo.DESCENDING).limit(1)
    c = c.next()['_id'] + 1 if c.count() else 1
    exps = shuffle(exps)
    Parallel(n_jobs=n_jobs,
             verbose=10)(delayed(single_run)(config_updates, c + i, _run._id)
                         for i, config_updates in enumerate(exps))
github arthurmensch / modl / examples / contrast / old / multi_predict_contrast_hierarchical.py View on Github external
param_grid = ParameterGrid(
        {'datasets': [['la5c', 'hcp']],
         'dataset_weight': [dict(hcp=i, la5c=1 - i)
                            for i in [0, 0.25, 0.5, 0.75]],
         'shared_supervised': shared_supervised_list,
         'task_prob': task_prob_list,
         'dropout_latent': dropout_latent_list,
         'latent_dim': latent_dim_list,
         # Hack to iterate over seed first'
         'aseed': seed_list})

    # Robust labelling of experiments
    client = pymongo.MongoClient()
    database = client['amensch']
    c = database[collection].find({}, {'_id': 1})
    c = c.sort('_id', pymongo.DESCENDING).limit(1)
    c = c.next()['_id'] + 1 if c.count() else 1

    Parallel(n_jobs=n_jobs,
             verbose=10)(delayed(single_run)(config_updates, c + i, _run._id)
                         for i, config_updates in enumerate(param_grid))
github arthurmensch / modl / examples / components / multi_decompose_rest.py View on Github external
def run(n_components_list, alpha_list, n_jobs):
    update_list = []
    for n_components in n_components_list:
        for alpha in alpha_list:
            config_updates = {'n_components': n_components,
                               'alpha': alpha}
            update_list.append(config_updates)

    # Robust labelling of experiments
    client = pymongo.MongoClient()
    database = client['amensch']
    c = database.runs.find({}, {'_id': 1})
    c = c.sort('_id', pymongo.DESCENDING).limit(1)
    c = c.next()['_id'] + 1 if c.count() else 1

    Parallel(n_jobs=n_jobs,
             verbose=10)(delayed(single_run)(config_updates, c + i)
                            for i, config_updates in enumerate(update_list))
github arthurmensch / cogspaces / examples / multiple / predict_contrast_legacy_multi.py View on Github external
'dropout_input': 0.25,
                           'dropout_latent': 0.5,
                           'seed': seed} for seed in seed_list]
        transfer = [{'datasets': [dataset, 'hcp'],
                     'geometric_reduction': True,
                     'latent_dim': 50,
                     'dropout_input': 0.25,
                     'dropout_latent': 0.5,
                     'seed': seed} for seed in seed_list]
        # exps += multinomial
        exps += geometric_reduction
        exps += latent_dropout
        exps += transfer

    # Robust labelling of experiments
    client = pymongo.MongoClient()
    database = client['amensch']
    c = database[collection].find({}, {'_id': 1})
    c = c.sort('_id', pymongo.DESCENDING).limit(1)
    c = c.next()['_id'] + 1 if c.count() else 1

    Parallel(n_jobs=n_jobs,
             verbose=10)(delayed(single_run)(config_updates, c + i, _run._id)
                         for i, config_updates in enumerate(exps))
github arthurmensch / modl / examples / contrast / predict_contrast_train_size.py View on Github external
['camcan', 'hcp']
                    ]
    transfer_camcan = [{'datasets': dataset,
                        'geometric_reduction': True,
                        'latent_dim': 50,
                        'dropout_input': 0.25,
                        'dropout_latent': 0.5,
                        'train_size': train_size,
                        'optimizer': 'adam',
                        'seed': seed} for seed in seed_list
                       for train_size in train_sizes
                       for dataset in datasets_list]
    exps += transfer_camcan

    # Robust labelling of experiments
    client = pymongo.MongoClient()
    database = client['amensch']
    c = database[collection].find({}, {'_id': 1})
    c = c.sort('_id', pymongo.DESCENDING).limit(1)
    c = c.next()['_id'] + 1 if c.count() else 1
    exps = shuffle(exps)

    Parallel(n_jobs=n_jobs,
             verbose=10)(delayed(single_run)(config_updates, c + i, _run._id)
                         for i, config_updates in enumerate(exps))
github arthurmensch / cogspaces / examples / multiple / predict_contrast_parameter.py View on Github external
seed_list = check_random_state(_seed).randint(np.iinfo(np.uint32).max,
                                                  size=n_seeds)
    param_grid = ParameterGrid(
        {'datasets': [['archi', 'hcp']],
         'dropout_latent': dropout_latent_list,
         'dropout_input': dropout_input_list,
         'batch_size': batch_size_list,
         'latent_dim': latent_dim_list,
         # Hack to iterate over seed first'
         'aseed': seed_list})

    # Robust labelling of experiments
    client = pymongo.MongoClient()
    database = client['amensch']
    c = database[collection].find({}, {'_id': 1})
    c = c.sort('_id', pymongo.DESCENDING).limit(1)
    c = c.next()['_id'] + 1 if c.count() else 1

    Parallel(n_jobs=n_jobs,
             verbose=10)(delayed(single_run)(config_updates, c + i, _run._id)
                         for i, config_updates in enumerate(param_grid))
github arthurmensch / modl / examples / contrast / old / predict_contrast_hierarchical_dataset.py View on Github external
seed_list = check_random_state(_seed).randint(np.iinfo(np.uint32).max,
                                                  size=n_seeds)
    param_grid = ParameterGrid(
        {'datasets': [['archi', 'hcp', 'brainomics', 'la5c']],
         'shared_supervised': shared_supervised_list,
         'task_prob': task_prob_list,
         'dropout_latent': dropout_latent_list,
         'latent_dim': latent_dim_list,
         # Hack to iterate over seed first'
         'aseed': seed_list})

    # Robust labelling of experiments
    client = pymongo.MongoClient()
    database = client['amensch']
    c = database[collection].find({}, {'_id': 1})
    c = c.sort('_id', pymongo.DESCENDING).limit(1)
    c = c.next()['_id'] + 1 if c.count() else 1

    Parallel(n_jobs=n_jobs,
             verbose=10)(delayed(single_run)(config_updates, c + i, _run._id)
                         for i, config_updates in enumerate(param_grid))
github arthurmensch / modl / examples / contrast / task / compare_task_predict.py View on Github external
update_list = []
    for train_size in train_size_list:
        for n_components in n_components_list:
            for alpha in alpha_list:
                config_updates = {'task_data': {'train_size': 778},
                                  'rest_data': {'train_size': train_size},
                                  'components':
                                      {'n_components': n_components,
                                       'alpha': alpha},
                                  }
                update_list.append(config_updates)

    client = pymongo.MongoClient()
    database = client['amensch']
    c = database.runs.find({}, {'_id': 1})
    c = c.sort('_id', pymongo.DESCENDING).limit(1)
    c = c.next()['_id'] + 1 if c.count() else 1

    Parallel(n_jobs=n_jobs)(delayed(single_run)(config_updates, c + i)
                            for i, config_updates in enumerate(update_list))