How to use scipy - 10 common examples

To help you get started, we’ve selected a few scipy examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github prannayk / videoMultiGAN / acrcn / step_2_gen_anneal.py View on Github external
def save_visualization(X, nh_nw=(batch_size,2+frames), save_path='../results/%s/sample.jpg'%(sys.argv[4])):
	X = morph(X)
	print(X.shape)
	h,w = X.shape[1], X.shape[2]
	img = np.zeros((h * nh_nw[0], w * nh_nw[1], 3))
	
	for n,x in enumerate(X):
		j = n // nh_nw[1]
		i = n % nh_nw[1]
		img[j*h:j*h+h, i*w:i*w+w, :] = x[:,:,:3]
	np.save("%s.%s"%(save_path.split(".")[0],".npy"), img)
	scipy.misc.imsave(save_path, img)
github pysal / pysal / pysal / model / spreg / diagnostics.py View on Github external
# Check for constant, if none add one, see Greene 2003, pg. 222
    # if constant == False:
    #    X = np.hstack((np.ones((n,1)),X))

    # Check for multicollinearity in the X matrix
    ci = condition_index(reg)
    if ci > 30:
        white_result = "Not computed due to multicollinearity."
        return white_result

    # Compute cross-products and squares of the regression variables
    if type(X).__name__ == 'ndarray':
        A = np.zeros((n, (k * (k + 1)) // 2))
    elif type(X).__name__ == 'csc_matrix' or type(X).__name__ == 'csr_matrix':
        # this is probably inefficient
        A = SP.lil_matrix((n, (k * (k + 1)) // 2))
    else:
        raise Exception("unknown X type, %s" % type(X).__name__)
    counter = 0
    for i in range(k):
        for j in range(i, k):
            v = spmultiply(X[:, i], X[:, j], False)
            A[:, counter] = v
            counter += 1

    # Append the original variables
    A = sphstack(X, A)   # note: this also converts a LIL to CSR
    n, k = A.shape

    # Check to identify any duplicate or constant columns in A
    omitcolumn = []
    for i in range(k):
github MarvinTeichmann / KittiBox / decoder / fastBox.py View on Github external
def log_image(np_img, np_confidences, np_boxes, np_global_step,
                  pred_or_true):

        merged = train_utils.add_rectangles(hyp, np_img, np_confidences,
                                            np_boxes,
                                            use_stitching=True,
                                            rnn_len=hyp['rnn_len'])[0]

        num_images = 10

        filename = '%s_%s.jpg' % \
            ((np_global_step // hyp['logging']['write_iter'])
                % num_images, pred_or_true)
        img_path = os.path.join(hyp['dirs']['output_dir'], filename)

        scp.misc.imsave(img_path, merged)
        return merged
github flatironinstitute / CaImAn / caiman / source_extraction / cnmf / online_cnmf.py View on Github external
for rp in range(len(self.estimates.ind_new)*2):
                                out.write(vid_frame)

                        cv2.imshow('frame', vid_frame)
                        for rp in range(len(self.estimates.ind_new)*2):
                            cv2.imshow('frame', vid_frame)
                        if cv2.waitKey(1) & 0xFF == ord('q'):
                            break
                    t += 1
                    t_online.append(time() - t_frame_start)
        
            self.Ab_epoch.append(self.estimates.Ab.copy())

        if self.params.get('online', 'normalize'):
            self.estimates.Ab /= 1./self.img_norm.reshape(-1, order='F')[:,np.newaxis]
            self.estimates.Ab = csc_matrix(self.estimates.Ab)
        self.estimates.A, self.estimates.b = self.estimates.Ab[:, self.params.get('init', 'nb'):], self.estimates.Ab[:, :self.params.get('init', 'nb')].toarray()
        self.estimates.C, self.estimates.f = self.estimates.C_on[self.params.get('init', 'nb'):self.M, t - t //
                         epochs:t], self.estimates.C_on[:self.params.get('init', 'nb'), t - t // epochs:t]
        noisyC = self.estimates.noisyC[self.params.get('init', 'nb'):self.M, t - t // epochs:t]
        self.estimates.YrA = noisyC - self.estimates.C
        if self.estimates.OASISinstances is not None:
            self.estimates.bl = [osi.b for osi in self.estimates.OASISinstances]
            self.estimates.S = np.stack([osi.s for osi in self.estimates.OASISinstances])
            self.estimates.S = self.estimates.S[:, t - t // epochs:t]
        else:
            self.estimates.bl = [0] * self.estimates.C.shape[0]
            self.estimates.S = np.zeros_like(self.estimates.C)
        if self.params.get('online', 'ds_factor') > 1:
            dims = Y_.shape[1:]
            self.estimates.A = hstack([coo_matrix(cv2.resize(self.estimates.A[:, i].reshape(self.dims, order='F').toarray(),
                                                            dims[::-1]).reshape(-1, order='F')[:,None]) for i in range(self.N)], format='csc')
github peng-cao / mripy / test / CS_MRI / cs_MRF_CNN_IST_cuda.py View on Github external
def test():
    # read rf and tr arrays from mat file
    mat_contents  = sio.loadmat(pathdat+'mrf_t1t2b0pd_mrf_randphasecyc_traintest.mat');
    far           = np.array(mat_contents["rf"].astype(np.complex128).squeeze())
    trr           = np.array(mat_contents["trr"].astype(np.float64).squeeze())
    # input MRF time courses
    mat_contents2 = sio.loadmat(pathdat+'datax1.mat');
    data_x        = np.array(mat_contents2["datax1"]).astype(np.float64)
    # prepare for sequence simulation, y->x_hat
    Nk            = far.shape[0]
    Nexample      = data_x.shape[0]
    ti            = 10 #ms
    M0            = np.array([0.0,0.0,1.0]).astype(np.float64)
    #image size
    nx            = 217
    ny            = 181
    # mask in ksp
    mask          = ut.mask3d( nx, ny, Nk, [15,15,0], 0.4)
    #FTm           = opts.FFT2d_kmask(mask) 
    FTm           = cuopts.FFT2d_cuda_kmask(mask)
    
    #intial timing
    timing        = utc.timing()
github tompollard / tableone / test_tableone.py View on Github external
def mytest(*args):
    """
    Hypothesis test for test_self_defined_statistical_tests
    """
    mytest.__name__ = "Test name"
    _, pval = stats.ks_2samp(*args)
    return pval
github christophmark / bayesloop / tests / test_observationmodels.py View on Github external
def test_scipy_2p(self):
        # carry out fit
        S = bl.Study()
        S.loadData(np.array([1, 2, 3, 4, 5]))

        L = bl.om.SciPy(scipy.stats.norm, 'loc', bl.cint(0, 7, 200), 'scale', bl.oint(0, 1, 200))

        S.setOM(L)
        S.setTM(bl.tm.Static())
        S.fit()

        # test model evidence value
        np.testing.assert_almost_equal(S.logEvidence, -13.663836264357225, decimal=5,
                                       err_msg='Erroneous log-evidence value.')
github analogdevicesinc / m1k-fw / scripts / testusb_plot.py View on Github external
for line in fileinput.input():
  [o, va, ia, vb, ib] = [float(x.strip()) for x in line.split(',')]
  target.append(o)
  voltages_a.append(va)
  currents_a.append(ia)
  voltages_b.append(vb)
  currents_b.append(ib)

plot(voltages_a, '.', label='va')
plot(currents_a, '.', label='ia')
plot(voltages_b, '.', label='vb')
plot(currents_b, '.', label='ib')
plot(target, '.', label='dac')
xlabel('time (samples)')
ylabel('amplitude (bits)')
sio.savemat("smu.mat", {"v": voltages_a, "i": currents_a, "setpoint": target})
legend(loc='best')
figure()
semilogy(fftfreq(len(voltages_a), 2e-05), fft(voltages_a), '.')
semilogy(fftfreq(len(voltages_b), 2e-05), fft(voltages_b), '.')
semilogy(fftfreq(len(target), 2e-05), fft(target), '.')
savefig("svmi-fft.png")
show()
github dmlc / dgl / tests / compute / test_traversal.py View on Github external
if u in layers_nx[-1]:
                frontier.add(v)
                edge_frontier.add(g.edge_id(u, v))
            else:
                layers_nx.append(frontier)
                edges_nx.append(edge_frontier)
                frontier = set([v])
                edge_frontier = set([g.edge_id(u, v)])
        # avoids empty successors
        if len(frontier) > 0 and len(edge_frontier) > 0:
            layers_nx.append(frontier)
            edges_nx.append(edge_frontier)
        return layers_nx, edges_nx

    g = dgl.DGLGraph()
    a = sp.random(n, n, 3 / n, data_rvs=lambda n: np.ones(n))
    g.from_scipy_sparse_matrix(a)
    g_nx = g.to_networkx()
    src = random.choice(range(n))
    layers_nx, _ = _bfs_nx(g_nx, src)
    layers_dgl = dgl.bfs_nodes_generator(g, src)
    assert len(layers_dgl) == len(layers_nx)
    assert all(toset(x) == y for x, y in zip(layers_dgl, layers_nx))

    g_nx = nx.random_tree(n, seed=42)
    g = dgl.DGLGraph()
    g.from_networkx(g_nx)
    src = 0
    _, edges_nx = _bfs_nx(g_nx, src)
    edges_dgl = dgl.bfs_edges_generator(g, src)
    assert len(edges_dgl) == len(edges_nx)
    assert all(toset(x) == y for x, y in zip(edges_dgl, edges_nx))
github chappers / scikit-feature / skfeature / example / test_fisher_score.py View on Github external
def main():
    # load data
    mat = scipy.io.loadmat('../data/COIL20.mat')
    X = mat['X']    # data
    X = X.astype(float)
    y = mat['Y']    # label
    y = y[:, 0]
    n_samples, n_features = X.shape    # number of samples and number of features

    # split data into 10 folds
    ss = cross_validation.KFold(n_samples, n_folds=10, shuffle=True)

    # perform evaluation on classification task
    num_fea = 100    # number of selected features
    clf = svm.LinearSVC()    # linear SVM

    correct = 0
    for train, test in ss:
        # obtain the score of each feature on the training set