How to use the pyglm.utils.experiment_helper.load_data function in PyGLM

To help you get started, we’ve selected a few PyGLM examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github slinderman / pyglm / experiments / rgc / inferred_connectivity.py View on Github external
def plot_connectivity(dataset, run, algs,):
    # Load the data and results
    train, test, true_model = load_data(dataset)
    res_dir = os.path.join("results", dataset, "run%03d" % run)
    results = load_results(dataset, run, algs)

    # samples = results["gibbs"][0]

    ###########################################################
    # Get the average connectivity
    ###########################################################
    if "bfgs" in algs:
        W_mean = results["bfgs"].W.sum(2)
    elif "gibbs" in algs:
        W_samples = [smpl.weight_model.W_effective
                         for smpl in results["gibbs"][0]]

        offset = len(W_samples) // 2
        W_samples = np.array(W_samples[offset:])
github slinderman / pyglm / experiments / switching / fit_empty_hsmm.py View on Github external
def fit_with_gibbs(dataset, run, seed=None):
    """
    Fit the dataset using Gibbs sampling
    :return:
    """
    if seed is None:
        seed = np.random.randint(2**32)

    print "Setting seed to ", seed
    np.random.seed(seed)

    ###########################################################
    # Load some example data.
    # See data/synthetic/generate.py to create more.
    ###########################################################
    train, test, true_model = load_data(dataset)
    res_dir = os.path.join("results", dataset, "run%03d" % run)
    assert os.path.exists(res_dir), "Results directory does not exist: " + res_dir
    standard_results = load_results(dataset, run=run,
                                    algorithms=["bfgs"])

    T      = train.shape[0]
    N      = true_model.N
    M      = true_model.M
    B      = true_model.B
    dt     = true_model.dt
    dt_max = true_model.dt_max

    # Create and fit a standard model for initialization
    init_model = standard_results["bfgs"]

    ###########################################################
github slinderman / pyglm / experiments / switching / fit_empty_hmm.py View on Github external
def fit_with_gibbs(dataset, run, seed=None):
    """
    Fit the dataset using Gibbs sampling
    :return:
    """
    if seed is None:
        seed = np.random.randint(2**32)

    print "Setting seed to ", seed
    np.random.seed(seed)

    ###########################################################
    # Load some example data.
    # See data/synthetic/generate.py to create more.
    ###########################################################
    train, test, true_model = load_data(dataset)
    res_dir = os.path.join("results", dataset, "run%03d" % run)
    assert os.path.exists(res_dir), "Results directory does not exist: " + res_dir
    standard_results = load_results(dataset, run=run,
                                    algorithms=["bfgs"])

    T      = train.shape[0]
    N      = true_model.N
    M      = true_model.M
    B      = true_model.B
    dt     = true_model.dt
    dt_max = true_model.dt_max

    # Create and fit a standard model for initialization
    init_model = standard_results["bfgs"]

    ###########################################################
github slinderman / pyglm / experiments / rgc / fit_rgc_nb_eigen_vb.py View on Github external
def demo(dataset="rgc_nb_eigen_300T", run=1, seed=None):
    """
    Fit a weakly sparse
    :return:
    """
    if seed is None:
        seed = np.random.randint(2**32)

    print "Setting seed to ", seed
    np.random.seed(seed)

    train, test, _ = load_data(dataset)
    train = train.astype(np.int32)
    test = test.astype(np.int32)
    res_dir = os.path.join("results", dataset, "run%03d" % run)
    assert os.path.exists(res_dir), "Results directory does not exist: " + res_dir
    standard_results = load_results(dataset, run=run,
                                    algorithms=["bfgs"])

    T      = train.shape[0]
    N      = train.shape[1]
    B      = 5
    dt     = 1.0
    dt_max = 10.0

    # Create and fit a standard model for initialization
    init_model = standard_results["bfgs"]
github slinderman / pyglm / experiments / synthetic / fit_mcmc.py View on Github external
def fit_with_gibbs(dataset, run, seed=None):
    """
    Fit the dataset using Gibbs sampling
    :return:
    """
    if seed is None:
        seed = np.random.randint(2**32)

    print "Setting seed to ", seed
    np.random.seed(seed)

    ###########################################################
    # Load some example data.
    # See data/synthetic/generate.py to create more.
    ###########################################################
    train, test, true_model = load_data(dataset)
    res_dir = os.path.join("results", dataset, "run%03d" % run)
    assert os.path.exists(res_dir), "Results directory does not exist: " + res_dir
    standard_results = load_results(dataset, run=run,
                                    algorithms=["bfgs"])

    T      = train.shape[0]
    N      = true_model.N
    B      = true_model.B
    dt     = true_model.dt
    dt_max = true_model.dt_max

    # Create and fit a standard model for initialization
    init_model = standard_results["bfgs"]

    ###########################################################
    # Create a test spike-and-slab model
github slinderman / pyglm / experiments / switching / fit_er_hsmm.py View on Github external
def fit_with_gibbs(dataset, run, seed=None):
    """
    Fit the dataset using Gibbs sampling
    :return:
    """
    if seed is None:
        seed = np.random.randint(2**32)

    print "Setting seed to ", seed
    np.random.seed(seed)

    ###########################################################
    # Load some example data.
    # See data/synthetic/generate.py to create more.
    ###########################################################
    train, test, true_model = load_data(dataset)
    res_dir = os.path.join("results", dataset, "run%03d" % run)
    assert os.path.exists(res_dir), "Results directory does not exist: " + res_dir
    standard_results = load_results(dataset, run=run,
                                    algorithms=["bfgs"])

    T      = train.shape[0]
    N      = true_model.N
    M      = true_model.M
    B      = true_model.B
    dt     = true_model.dt
    dt_max = true_model.dt_max

    # Create and fit a standard model for initialization
    init_model = standard_results["bfgs"]

    ###########################################################
github slinderman / pyglm / experiments / synthetic / fit_svi.py View on Github external
def fit_with_svi(dataset, run, seed=None):
    """
    Fit the dataset using SVI
    :return:
    """
    if seed is None:
        seed = np.random.randint(2**32)

    print "Setting seed to ", seed
    np.random.seed(seed)

    ###########################################################
    # Load some example data.
    # See data/synthetic/generate.py to create more.
    ###########################################################
    train, test, true_model = load_data(dataset)
    res_dir = os.path.join("results", dataset, "run%03d" % run)
    assert os.path.exists(res_dir), "Results directory does not exist: " + res_dir
    standard_results = load_results(dataset, run=run,
                                    algorithms=["bfgs"])

    T      = train.shape[0]
    N      = true_model.N
    B      = true_model.B
    dt     = true_model.dt
    dt_max = true_model.dt_max

    # Create and fit a standard model for initialization
    init_model = standard_results["bfgs"]

    ###########################################################
    # Create a test spike-and-slab model
github slinderman / pyglm / experiments / rgc / fit_rgc_nb_eigen_svi.py View on Github external
def demo(dataset="rgc_nb_eigen_300T", run=1, seed=None):
    """
    Fit a weakly sparse
    :return:
    """
    if seed is None:
        seed = np.random.randint(2**32)

    print "Setting seed to ", seed
    np.random.seed(seed)

    train, test, _ = load_data(dataset)
    train = train.astype(np.int32)
    test = test.astype(np.int32)
    res_dir = os.path.join("results", dataset, "run%03d" % run)
    assert os.path.exists(res_dir), "Results directory does not exist: " + res_dir
    standard_results = load_results(dataset, run=run,
                                    algorithms=["bfgs"])

    T      = train.shape[0]
    N      = train.shape[1]
    B      = 5
    dt     = 1.0
    dt_max = 10.0

    # Create and fit a standard model for initialization
    init_model = standard_results["bfgs"]
github slinderman / pyglm / experiments / synthetic / fit_bfgs.py View on Github external
def fit_with_bfgs(dataset, run, seed=None):
    """
    Fit a weakly sparse
    :return:
    """
    if seed is None:
        seed = np.random.randint(2**32)

    print "Setting seed to ", seed
    np.random.seed(seed)

    ###########################################################
    # Load some example data.
    # See data/synthetic/generate.py to create more.
    ###########################################################
    train, test, true_model = load_data(dataset)
    res_dir = os.path.join("results", dataset, "run%03d" % run)
    assert os.path.exists(res_dir), "Results directory does not exist: " + res_dir


    T      = train.shape[0]
    N      = true_model.N
    B      = true_model.B
    dt     = true_model.dt
    dt_max = true_model.dt_max

    ###########################################################
    # Create a test spike-and-slab model
    ###########################################################

    # Copy the network hypers.
    test_model = StandardNegativeBinomialPopulation(N=N, xi=10, dt=dt, dt_max=dt_max, B=B,