How to use the numpy.dot function in numpy

To help you get started, we’ve selected a few numpy examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github sambler / myblenderaddons / power_sequencer / operators / audiosync / mfcc / mfcc.py View on Github external
nfil = nlinfil + nlogfil

    w = hamming(nwin, sym=0)

    fbank = trfbank(fs, nfft, lowfreq, linsc, logsc, nlinfil, nlogfil)[0]

    # ------------------
    # Compute the MFCC
    # ------------------
    extract = lfilter([1.0, -prefac], 1, input)
    framed = segment_axis(extract, nwin, over) * w

    # Compute the spectrum magnitude
    spec = np.abs(fft(framed, nfft, axis=-1))
    # Filter the spectrum through the triangle filterbank
    mspec = np.log10(np.dot(spec, fbank.T))
    # Use the DCT to 'compress' the coefficients (spectrum -> cepstrum domain)
    ceps = dct(mspec, type=2, norm="ortho", axis=-1)[:, :nceps]

    return ceps, mspec, spec
github lebedov / scikit-cuda / demos / dot_demo.py View on Github external
b = np.asarray(np.random.rand(5, 5)+1j*np.random.rand(5, 5), t)
        c = np.asarray(np.random.rand(5, 5)+1j*np.random.rand(5, 5), t)
    else:
        a = np.asarray(np.random.rand(10, 5), t)
        b = np.asarray(np.random.rand(5, 5), t)
        c = np.asarray(np.random.rand(5, 5), t)

    a_gpu = gpuarray.to_gpu(a)
    b_gpu = gpuarray.to_gpu(b)
    c_gpu = gpuarray.to_gpu(c)

    temp_gpu = culinalg.dot(a_gpu, b_gpu)
    d_gpu = culinalg.dot(temp_gpu, c_gpu)
    temp_gpu.gpudata.free()
    del(temp_gpu)
    print 'Success status: ', np.allclose(np.dot(np.dot(a, b), c) , d_gpu.get())

    print 'Testing vector multiplication for type '  + str(np.dtype(t))
    if np.iscomplexobj(t()):
        d = np.asarray(np.random.rand(5)+1j*np.random.rand(5), t)
        e = np.asarray(np.random.rand(5)+1j*np.random.rand(5), t)
    else:
        d = np.asarray(np.random.rand(5), t)
        e = np.asarray(np.random.rand(5), t)

    d_gpu = gpuarray.to_gpu(d)
    e_gpu = gpuarray.to_gpu(e)

    temp = culinalg.dot(d_gpu, e_gpu)
    print 'Success status: ', np.allclose(np.dot(d, e), temp)
github vasole / pymca / PyMca / SixCircle.py View on Github external
:param delta: angle in Degrees
        :type delta: float or numpy.ndarray
        :param gamma: angle in Degrees
        :type gamma: float or numpy.ndarray
        :param gamma_first: if delta and gamma are arrays, which one variates first.
        :type gamma_first: boolean

        :return: Q values for all the given delta, gamma values

        This is only true if the diffractometer has been properly aligned.
        """
        PHIi = self.getPhiMatrix(phi).T
        CHIi = self.getChiMatrix(chi).T
        THi  = self.getThetaMatrix(theta).T
        MUi   = self.getMuMatrix(mu).T
        tmpArray = numpy.dot(PHIi, numpy.dot(CHIi, numpy.dot(THi, MUi)))
        Q = self.getQLab(mu=mu, delta=delta, gamma=gamma, gamma_first=gamma_first)
        Q.shape = 3, -1
        return (numpy.dot(tmpArray, Q))
github xiaqiong / PC-CCA / PC-CCA.py View on Github external
def pls1_nipals(X, y, a):
    T = zeros((X.shape[0], a))
    P = zeros((X.shape[1], a))
    Q = zeros((1, a))
    W = zeros((X.shape[1], a))
    for i in range(a):
        v = dot(X.T, y[:, 0])
        
        W[:, i] = v/norm(v)
        T[:, i] = dot(X, W[:, i])
        P[:, i] = dot(X.T, T[:, i])/dot(T[:, i].T, T[:, i])
        Q[0, i] = dot(T[:, i].T, y[:, 0])/dot(T[:, i].T, T[:, i])
        X = X-outer(T[:, i], P[:, i])
    W = dot(W, inv(dot(P.T, W)))
    B = dot(W[:, 0:a], Q[:, 0:a].T)
    return {'B': B, 'T': T, 'P': P, 'Q': Q, 'W': W}
github nipy / dipy / dipy / sims / voxel.py View on Github external
"""
    if evals is None:
        evals = diffusion_evals

    if evecs is None:
        evecs = np.eye(3)

    out_shape = r.shape[:r.ndim - 1]

    R = np.asarray(evecs)
    D = dot(dot(R, np.diag(evals)), R.T)
    Di = np.linalg.inv(D)
    r = r.reshape(-1, 3)
    P = np.zeros(len(r))
    for (i, u) in enumerate(r):
        P[i] = (-dot(dot(u.T, Di), u)) / (4 * tau)

    pdf = (1 / np.sqrt((4 * np.pi * tau) ** 3 * np.prod(evals))) * np.exp(P)

    return pdf.reshape(out_shape)
github carla-simulator / scenario_runner / srunner / tools / scenario_helper.py View on Github external
if len(wp_choice) > 1:
            reached_junction = True
            waypoint = choose_at_junction(waypoint, wp_choice, turn)
        else:
            waypoint = wp_choice[0]
        plan.append((waypoint, RoadOption.LANEFOLLOW))
        #   End condition for the behavior
        if turn != 0 and reached_junction and len(plan) >= 3:
            v_1 = vector(
                plan[-2][0].transform.location,
                plan[-1][0].transform.location)
            v_2 = vector(
                plan[-3][0].transform.location,
                plan[-2][0].transform.location)
            angle_wp = math.acos(
                np.dot(v_1, v_2) / abs((np.linalg.norm(v_1) * np.linalg.norm(v_2))))
            if angle_wp < threshold:
                break
        elif reached_junction and not plan[-1][0].is_intersection:
            break

    return plan, plan[-1][0]
github DamCB / tyssue / tyssue / topology / bulk_topology.py View on Github external
if plane_center is None:
        plane_center = eptm.cell_df.loc[mother, eptm.coords]

    n_xy = np.linalg.norm(plane_normal[:2])
    theta = -np.arctan2(n_xy, plane_normal[2])
    if np.linalg.norm(plane_normal[:2]) < 1e-10:
        rot = None
    else:
        direction = [plane_normal[1], -plane_normal[0], 0]
        rot = rotation_matrix(theta, direction)
    cell_verts = set(eptm.edge_df[eptm.edge_df["cell"] == mother]["srce"])
    vert_pos = eptm.vert_df.loc[cell_verts, eptm.coords]
    for coord in eptm.coords:
        vert_pos[coord] -= plane_center[coord]
    if rot is not None:
        vert_pos[:] = np.dot(vert_pos, rot)

    mother_edges = eptm.edge_df[eptm.edge_df["cell"] == mother]
    srce_z = vert_pos.loc[mother_edges["srce"], "z"]
    srce_z.index = mother_edges.index
    trgt_z = vert_pos.loc[mother_edges["trgt"], "z"]
    trgt_z.index = mother_edges.index
    division_edges = mother_edges[((srce_z < 0) & (trgt_z >= 0))]

    # Order the returned edges so that their centers
    # are oriented counterclockwize in the division plane
    # in preparation for septum creation
    srce_pos = vert_pos.loc[division_edges["srce"], eptm.coords].values
    trgt_pos = vert_pos.loc[division_edges["trgt"], eptm.coords].values
    centers = (srce_pos + trgt_pos) / 2
    theta = np.arctan2(centers[:, 1], centers[:, 0])
    return division_edges.iloc[np.argsort(theta)].index
github mthomure / glimpse-project / glimpse / util / stats.py View on Github external
.. seealso::
     This function was adapted from `similar code by Jan Erik Solem
     `_.
     Also see :func:`sklearn.decomposition.PCA`.

  """
  if len(X.shape) != 2:
    raise Exception("Training data must be a matrix")
  mean = X.mean(0)
  X = X - mean
  # Find covariance matrix of X - mu
  cov = np.dot(X, X.T)
  # Find eigenvectors of symmetric covariance matrix
  eigenvalues, eigenvectors = np.linalg.eigh(cov)
  # Full transformation
  transform = np.dot(X.T, eigenvectors).T
  # Reorder transformation by descending eigenvalue.
  order = np.argsort(eigenvalues)[::-1]
  transform = transform[ order ]
  # Any negative eigenvalues are zero, and negative sign is caused by numerical
  # approximation error.
  eigenvalues[ eigenvalues < 0 ] = 0
  stdev = np.sqrt(eigenvalues)[ order ]
  return transform, stdev
github joshuaskelly / tmx2map / tmx2map / mathhelper.py View on Github external
def angle_between(dest, base=(1,0,0)):
    """Returns the angle in positive degrees from base to dest in the
    xy-plane.

    dest: A vector
    base: A vector

    Returns:
        Angle in degrees [0, 360)
    """

    target = dest[0], dest[1]
    p_axis = -base[1], base[0]
    b_axis = base[0], base[1]

    x_proj = numpy.dot(target, b_axis)
    y_proj = numpy.dot(target, p_axis)

    result = math.degrees(math.atan2(y_proj, x_proj))

    return (result + 360) % 360
github leon-nn / face-fitting / mm.py View on Github external
# Mapping from (uz, vz, z) to (x, y, z)
    pixel2real = np.linalg.inv(real2pixel)
    
    # Mark depth values that are non-zero
    nonZeroZ = d[:, 2] != 0
    
    if not inverse:
        uvz = d[nonZeroZ, :]
        uzvzz = np.c_[np.prod(uvz[:, ::2], axis = 1), np.prod(uvz[:, 1:], axis = 1), uvz[:, 2]]
        xyz = np.dot(pixel2real, uzvzz.T).T
        
        return xyz, nonZeroZ
    
    else:
        xyz = d[nonZeroZ, :]
        uzvzz = np.dot(real2pixel, xyz.T).T
        uvz = np.c_[uzvzz[:, 0] / xyz[:, 2], uzvzz[:, 1] / xyz[:, 2], xyz[:, 2]]
        
        return uvz, nonZeroZ