Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def plot_connectivity(dataset, run, algs,):
# Load the data and results
train, test, true_model = load_data(dataset)
res_dir = os.path.join("results", dataset, "run%03d" % run)
results = load_results(dataset, run, algs)
# samples = results["gibbs"][0]
###########################################################
# Get the average connectivity
###########################################################
if "bfgs" in algs:
W_mean = results["bfgs"].W.sum(2)
elif "gibbs" in algs:
W_samples = [smpl.weight_model.W_effective
for smpl in results["gibbs"][0]]
offset = len(W_samples) // 2
W_samples = np.array(W_samples[offset:])
def fit_with_gibbs(dataset, run, seed=None):
"""
Fit the dataset using Gibbs sampling
:return:
"""
if seed is None:
seed = np.random.randint(2**32)
print "Setting seed to ", seed
np.random.seed(seed)
###########################################################
# Load some example data.
# See data/synthetic/generate.py to create more.
###########################################################
train, test, true_model = load_data(dataset)
res_dir = os.path.join("results", dataset, "run%03d" % run)
assert os.path.exists(res_dir), "Results directory does not exist: " + res_dir
standard_results = load_results(dataset, run=run,
algorithms=["bfgs"])
T = train.shape[0]
N = true_model.N
M = true_model.M
B = true_model.B
dt = true_model.dt
dt_max = true_model.dt_max
# Create and fit a standard model for initialization
init_model = standard_results["bfgs"]
###########################################################
def fit_with_gibbs(dataset, run, seed=None):
"""
Fit the dataset using Gibbs sampling
:return:
"""
if seed is None:
seed = np.random.randint(2**32)
print "Setting seed to ", seed
np.random.seed(seed)
###########################################################
# Load some example data.
# See data/synthetic/generate.py to create more.
###########################################################
train, test, true_model = load_data(dataset)
res_dir = os.path.join("results", dataset, "run%03d" % run)
assert os.path.exists(res_dir), "Results directory does not exist: " + res_dir
standard_results = load_results(dataset, run=run,
algorithms=["bfgs"])
T = train.shape[0]
N = true_model.N
M = true_model.M
B = true_model.B
dt = true_model.dt
dt_max = true_model.dt_max
# Create and fit a standard model for initialization
init_model = standard_results["bfgs"]
###########################################################
def demo(dataset="rgc_nb_eigen_300T", run=1, seed=None):
"""
Fit a weakly sparse
:return:
"""
if seed is None:
seed = np.random.randint(2**32)
print "Setting seed to ", seed
np.random.seed(seed)
train, test, _ = load_data(dataset)
train = train.astype(np.int32)
test = test.astype(np.int32)
res_dir = os.path.join("results", dataset, "run%03d" % run)
assert os.path.exists(res_dir), "Results directory does not exist: " + res_dir
standard_results = load_results(dataset, run=run,
algorithms=["bfgs"])
T = train.shape[0]
N = train.shape[1]
B = 5
dt = 1.0
dt_max = 10.0
# Create and fit a standard model for initialization
init_model = standard_results["bfgs"]
def fit_with_gibbs(dataset, run, seed=None):
"""
Fit the dataset using Gibbs sampling
:return:
"""
if seed is None:
seed = np.random.randint(2**32)
print "Setting seed to ", seed
np.random.seed(seed)
###########################################################
# Load some example data.
# See data/synthetic/generate.py to create more.
###########################################################
train, test, true_model = load_data(dataset)
res_dir = os.path.join("results", dataset, "run%03d" % run)
assert os.path.exists(res_dir), "Results directory does not exist: " + res_dir
standard_results = load_results(dataset, run=run,
algorithms=["bfgs"])
T = train.shape[0]
N = true_model.N
B = true_model.B
dt = true_model.dt
dt_max = true_model.dt_max
# Create and fit a standard model for initialization
init_model = standard_results["bfgs"]
###########################################################
# Create a test spike-and-slab model
def fit_with_gibbs(dataset, run, seed=None):
"""
Fit the dataset using Gibbs sampling
:return:
"""
if seed is None:
seed = np.random.randint(2**32)
print "Setting seed to ", seed
np.random.seed(seed)
###########################################################
# Load some example data.
# See data/synthetic/generate.py to create more.
###########################################################
train, test, true_model = load_data(dataset)
res_dir = os.path.join("results", dataset, "run%03d" % run)
assert os.path.exists(res_dir), "Results directory does not exist: " + res_dir
standard_results = load_results(dataset, run=run,
algorithms=["bfgs"])
T = train.shape[0]
N = true_model.N
M = true_model.M
B = true_model.B
dt = true_model.dt
dt_max = true_model.dt_max
# Create and fit a standard model for initialization
init_model = standard_results["bfgs"]
###########################################################
def fit_with_svi(dataset, run, seed=None):
"""
Fit the dataset using SVI
:return:
"""
if seed is None:
seed = np.random.randint(2**32)
print "Setting seed to ", seed
np.random.seed(seed)
###########################################################
# Load some example data.
# See data/synthetic/generate.py to create more.
###########################################################
train, test, true_model = load_data(dataset)
res_dir = os.path.join("results", dataset, "run%03d" % run)
assert os.path.exists(res_dir), "Results directory does not exist: " + res_dir
standard_results = load_results(dataset, run=run,
algorithms=["bfgs"])
T = train.shape[0]
N = true_model.N
B = true_model.B
dt = true_model.dt
dt_max = true_model.dt_max
# Create and fit a standard model for initialization
init_model = standard_results["bfgs"]
###########################################################
# Create a test spike-and-slab model
def demo(dataset="rgc_nb_eigen_300T", run=1, seed=None):
"""
Fit a weakly sparse
:return:
"""
if seed is None:
seed = np.random.randint(2**32)
print "Setting seed to ", seed
np.random.seed(seed)
train, test, _ = load_data(dataset)
train = train.astype(np.int32)
test = test.astype(np.int32)
res_dir = os.path.join("results", dataset, "run%03d" % run)
assert os.path.exists(res_dir), "Results directory does not exist: " + res_dir
standard_results = load_results(dataset, run=run,
algorithms=["bfgs"])
T = train.shape[0]
N = train.shape[1]
B = 5
dt = 1.0
dt_max = 10.0
# Create and fit a standard model for initialization
init_model = standard_results["bfgs"]
def fit_with_bfgs(dataset, run, seed=None):
"""
Fit a weakly sparse
:return:
"""
if seed is None:
seed = np.random.randint(2**32)
print "Setting seed to ", seed
np.random.seed(seed)
###########################################################
# Load some example data.
# See data/synthetic/generate.py to create more.
###########################################################
train, test, true_model = load_data(dataset)
res_dir = os.path.join("results", dataset, "run%03d" % run)
assert os.path.exists(res_dir), "Results directory does not exist: " + res_dir
T = train.shape[0]
N = true_model.N
B = true_model.B
dt = true_model.dt
dt_max = true_model.dt_max
###########################################################
# Create a test spike-and-slab model
###########################################################
# Copy the network hypers.
test_model = StandardNegativeBinomialPopulation(N=N, xi=10, dt=dt, dt_max=dt_max, B=B,