How to use the scipy.interpolate.interp1d function in scipy

To help you get started, we’ve selected a few scipy examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github ChoSungMan / awesome-face-recognition / src / verification.py View on Github external
val = np.zeros(nrof_folds)
    far = np.zeros(nrof_folds)

    diff = np.subtract(embeddings1, embeddings2)
    dist = np.sum(np.square(diff), 1)
    indices = np.arange(nrof_pairs)

    for fold_idx, (train_set, test_set) in enumerate(k_fold.split(indices)):

        # Find the threshold that gives FAR = far_target
        far_train = np.zeros(nrof_thresholds)
        for threshold_idx, threshold in enumerate(thresholds):
            _, far_train[threshold_idx] = calculate_val_far(threshold, dist[train_set], actual_issame[train_set])
        if np.max(far_train) >= far_target:
            f = interpolate.interp1d(far_train, thresholds, kind='slinear')
            threshold = f(far_target)
        else:
            threshold = 0.0

        val[fold_idx], far[fold_idx] = calculate_val_far(threshold, dist[test_set], actual_issame[test_set])

    val_mean = np.mean(val)
    far_mean = np.mean(far)
    val_std = np.std(val)
    return val_mean, val_std, far_mean
github georgebv / coastlib / coastlib / core / data_analysis_tools.py View on Github external
Value of val1 for which an associated value val2 is found
    search_range : float
        Range of val1 within which values of val2 will be extraced for analysis (half bin size from
        joint probability)
    confidence : float
        Confidence for associated value - shows probability of non-exceedance (default 0.5 - median value)
    plot_cdf : bool
        If True - display a CDF plot of val2 in range val1 ± search_range
    """

    df = df[pd.notnull(df[val1])]
    df = df[pd.notnull(df[val2])]
    target = df[(df[val1] >= value - search_range) & (df[val1] <= value + search_range)]
    kde = sm.nonparametric.KDEUnivariate(target[val2].values)
    kde.fit()
    fit = scipy.interpolate.interp1d(kde.cdf, kde.support, kind='linear')
    if plot_cdf == True:
        with plt.style.context('bmh'):
            plt.plot(kde.support, kde.cdf, lw=1, color='orangered')
            plt.title('CDF of {0} for {1} in range [{low} - {top}]'.
                      format(val2, val1, low=round(value - search_range, 2), top=round(value + search_range, 2)))
            plt.ylabel('CDF')
            plt.xlabel(val2)
            plt.annotate(r'{perc}% Associated value {0}={1}'.format(val2, round(fit(confidence).tolist(), 2),
                                                                   perc=confidence*100),
                         xy=(fit(confidence).tolist(), confidence),
                         xytext=(fit(0.5).tolist()+search_range, 0.5),
                         arrowprops=dict(facecolor='k', shrink=0.01))
    return fit(confidence).tolist()
github pymedphys / pymedphys / pymedphys / labs / tpscompare / normalisation.py View on Github external
):
    """Normalise a profile given a defined normalisation position and
    normalisation scaling
    """
    # If scaling is to PDD interpolate along the PDD to find the scaling,
    # otherwise set scaling to 100.
    if scale_to_pdd:
        # If insufficient information has been supplies raise a meaningful
        # error
        if pdd_distance is None or pdd_relative_dose is None or scan_depth is None:
            raise Exception(
                "Scaling to PDD requires pdd_distance, pdd_relative_dose, "
                "and scan_depth to be defined."
            )

        pdd_interpolation = interp1d(pdd_distance, pdd_relative_dose)
        scaling = pdd_interpolation(scan_depth)
    else:
        scaling = 100

    # Linear interpolation function
    if smoothed_normalisation:
        filtered = savgol_filter(relative_dose, 21, 2)
        interpolation = interp1d(distance, filtered)
    else:
        interpolation = interp1d(distance, relative_dose)

    try:
        # Check if user wrote a number for normalisation position
        float_position = float(normalisation_position)
    except ValueError:
        # If text was written the conversion to float will fail
github GeoscienceAustralia / tcrm / Utilities / interpTrack.py View on Github external
timestep = 24.0*(time_ - time_[0])

    newtime = numpy.arange(timestep[0], timestep[-1]+.01, delta)
    newtime[-1] = timestep[-1]
    _newtime = (newtime/24.) + time_[0]
    newdates = num2date(_newtime)

    nid = numpy.ones(newtime.size)

    logger.info("Interpolating data...")
    if len(indicator) <= 2:
        # Use linear interpolation only (only a start and end point given):
        nLon = scint.interp1d(timestep, lon, kind='linear')(newtime)
        nLat = scint.interp1d(timestep, lat, kind='linear')(newtime)
        npCentre = scint.interp1d(timestep, pressure, kind='linear')(newtime)
        npEnv = scint.interp1d(timestep, penv, kind='linear')(newtime)
        nrMax = scint.interp1d(timestep, rmax, kind='linear')(newtime)

    else:
        if interpolation_type == 'akima':
            # Use the Akima interpolation method:
            try:
                from . import _akima
            except ImportError:
                logger.exception(("Akima interpolation module unavailable - "
                                  "default to scipy.interpolate"))
                nLon = scint.splev(newtime, scint.splrep(timestep, lon, s=0), der=0)
                nLat = scint.splev(newtime, scint.splrep(timestep, lat, s=0), der=0)
            else:
                nLon = _akima.interpolate(timestep, lon, newtime)
                nLat = _akima.interpolate(timestep, lat, newtime)
        elif interpolation_type == 'linear':
github sao-eht / eat / bin / cal_apriori_pang_uvfits.py View on Github external
elev_fake_foo = get_elev_2(earthrot(xyz[site], thetas_fake), sourcevec)#ehtim
            else:
                elev_fake_foo = get_elev(ra, dec, xyz[site], strtime_fake)##astropy

            elevfit[site] = scipy.interpolate.interp1d(time_mjd_fake, elev_fake_foo,
                                                kind=elev_interp_kind)

        try:
            caltable.data[site]
        except KeyError:
            skipsites.append(site)
            print ("No Calibration  Data for %s !" % site)
            continue

        if skip_fluxcal: #if we don't do flux calibration don't waste time on serious interpolating
            rinterp[site] = scipy.interpolate.interp1d([0],[1],kind='zero',fill_value='extrapolate')
            linterp[site] = scipy.interpolate.interp1d([0],[1],kind='zero',fill_value='extrapolate')

        else: #default option, create interpolating station based SEFD gains
            time_mjd = caltable.data[site]['time']/24.0 + caltable.mjd
            rinterp[site] = scipy.interpolate.interp1d(time_mjd, caltable.data[site]['rscale'],
                                                    kind=interp, fill_value=fill_value)
            linterp[site] = scipy.interpolate.interp1d(time_mjd, caltable.data[site]['lscale'],
                                                    kind=interp, fill_value=fill_value)



    #-------------------------------------------
    # sort by baseline
    data =  datastruct.data
    idx = np.lexsort((data['t2'], data['t1']))
    bllist = []
github yandex-load / volta / volta / listeners / sync / sync.py View on Github external
def ref_signal(sync):
        """ Generate square reference signal """
        logger.info("Generating ref signal...")
        if len(sync) == 0:
            raise ValueError('Sync events not found.')
        f = interpolate.interp1d(sync["sample_offset"], sync["message"], kind="zero")
        X = np.linspace(0, sync["sample_offset"].values[-1], sync["sample_offset"].values[-1])
        rs = f(X)
        return rs - np.mean(rs)
github MITHaystack / digital_rf / python / examples / beacon / beacon_analysis.py View on Github external
thresh = 15
    # outlier removal
    keep0 = 10 * sp.log10(snr0) > thresh
    if keep0.sum() == 0:
        return False, 0
    dopfit0 = sp.interpolate.interp1d(
        tvec[keep0], freqm0[keep0], fill_value="extrapolate"
    )(tvec)
    dfmean0 = sp.mean(dopfit0)
    dfstd0 = sp.std(dopfit0)

    keep1 = 10 * sp.log10(snr1) > thresh
    if keep1.sum() == 0:
        return False, 0
    dopfit1 = sp.interpolate.interp1d(
        tvec[keep1], freqm1[keep1], fill_value="extrapolate"
    )(tvec)
    dfmean1 = sp.mean(dopfit1)
    dfstd1 = sp.std(dopfit1)

    toff = window / sps
    doppler0 = e["dop1"](tvec + toff)
    dmean0 = sp.mean(doppler0)
    dstd0 = sp.std(doppler0)

    doppler1 = e["dop2"](tvec + toff)
    dmean1 = sp.mean(doppler1)
    dstd1 = sp.std(doppler1)

    # cross correlation
    xcor0 = sig.correlate(dopfit0 - dfmean0, doppler0 - dmean0) / (
github psychopy / psychopy / psychopy / monitors / calibTools.py View on Github external
def makeDKL2RGB(nm, powerRGB):
    """Creates a 3x3 DKL->RGB conversion matrix from the spectral input powers
    """
    interpolateCones = interpolate.interp1d(wavelength_5nm,
                                            cones_SmithPokorny)
    interpolateJudd = interpolate.interp1d(wavelength_5nm,
                                           juddVosXYZ1976_5nm)
    judd = interpolateJudd(nm)
    cones = interpolateCones(nm)
    judd = np.asarray(judd)
    cones = np.asarray(cones)
    rgb_to_cones = np.dot(cones, np.transpose(powerRGB))
    # get LMS weights for Judd vl
    lumwt = np.dot(judd[1, :], np.linalg.pinv(cones))

    # cone weights for achromatic primary
    dkl_to_cones = np.dot(rgb_to_cones, [[1, 0, 0], [1, 0, 0], [1, 0, 0]])

    # cone weights for L-M primary
    dkl_to_cones[0, 1] = old_div(lumwt[1], lumwt[0])
    dkl_to_cones[1, 1] = -1
    dkl_to_cones[2, 1] = lumwt[2]
github CNES / swot-hydrology-toolbox / sisimp / lib / tropo_module.py View on Github external
def calculate_tropo_error_map(self, latmin):

        my_api.printInfo("Computing map tropo_error field on %d x %d image" % (self.azmax-self.azmin, self.rmax-self.rmin))

        ds = nc.Dataset(self.tropo_error_map_file, 'r')
        delta_wtc_MEAN = ds.variables['delta_wtc_MEAN'][:]
        delta_wtc_STD = ds.variables['delta_wtc_STD'][:]
        latitude = ds.variables['latitude'][:]

        f=scipy.interpolate.interp1d(latitude, delta_wtc_MEAN,kind='linear')
        delta_wtc_MEAN_local=f(latmin)        
        f=scipy.interpolate.interp1d(latitude, delta_wtc_STD,kind='linear')
        delta_wtc_STD_local=f(latmin) 
               
        my_api.printInfo("%f cm mean biais and %f cm stv  estimated at latitude %f" % (delta_wtc_MEAN_local, delta_wtc_STD_local, latmin))

        self.tropo_map_rg_az = height_model.generate_2d_profile_gaussian(1, self.azmin, self.azmax+1, 1, self.rmin, self.rmax+1, delta_wtc_STD_local*0.01, lcorr = self.tropo_error_correlation)+delta_wtc_MEAN_local*0.01
github astropy / specutils / specutils / extinction.py View on Github external
else:
            raise ValueError('The given float {0} matches multiple available'
                             ' r_vs [3.1, 4.0, 5.5] - unexpected code error')

        fname = apydata.get_pkg_data_filename(fname)

        data = ascii.read(fname, Reader=ascii.FixedWidth, data_start=67,
                          names=['wave', 'albedo', 'avg_cos', 'C_ext',
                                 'K_abs', 'avg_cos_sq', 'comment'],
                          col_starts=[0, 12, 20, 27, 37, 47, 55],
                          col_ends=[11, 19, 26, 36, 46, 54, 80], guess=False)
        xknots = 1. / np.asarray(data['wave'])
        cknots = np.asarray(data['C_ext'])

        # Create a spline just to get normalization.
        spline = interp1d(xknots, cknots)
        cknots = cknots / spline((1. / (5495. * u.angstrom)).to('1/micron').value)  # Normalize cknots.
        self._spline = interp1d(xknots, cknots)