How to use the fancyimpute.SoftImpute function in fancyimpute

To help you get started, we’ve selected a few fancyimpute examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github iskandr / fancyimpute / test / test_soft_impute.py View on Github external
def test_soft_impute_with_low_rank_random_matrix():
    solver = SoftImpute()
    XY_completed = solver.fit_transform(XY_incomplete)
    _, missing_mae = reconstruction_error(
        XY,
        XY_completed,
        missing_mask,
        name="SoftImpute")
    assert missing_mae < 0.1, "Error too high!"
github YyzHarry / ME-Net / train_pure.py View on Github external
[Mazumder, R. et al. Spectral regularization algorithms for learning large incomplete matrices. 2010.]

    :param img: original image
    :param maskp: observation probability of each entry in mask matrix
    :return: preprocessed image
    """
    h, w, c = img.shape
    img = img.astype('float64') * 2 / 255 - 1

    if args.me_channel == 'concat':
        img = img.transpose(2, 0, 1)
        img = np.concatenate((np.concatenate((img[0], img[1]), axis=1), img[2]), axis=1)
        mask = np.random.binomial(1, maskp, h * w * c).reshape(h, w * c).astype(float)
        mask[mask < 1] = np.nan

        W = SoftImpute(verbose=False).fit_transform(mask * img)
        W[W < -1] = -1
        W[W > 1] = 1
        est_matrix = (W + 1) * 255 / 2
        outputs = np.zeros((h, w, c))
        for channel in range(c):
            outputs[:, :, channel] = est_matrix[:, channel * w:(channel + 1) * w]
    else:
        mask = np.random.binomial(1, maskp, h * w).reshape(h, w).astype(float)
        mask[mask < 1] = np.nan

        outputs = np.zeros((h, w, c))
        for channel in range(c):
            mask_img = img[:, :, channel] * mask
            W = SoftImpute(verbose=False).fit_transform(mask_img)
            W[W < -1] = -1
            W[W > 1] = 1
github ANTsX / ANTsPy / ants / utils / impute.py View on Github external
_base_options = {'mean', 'median', 'constant'}
    if (method not in _base_options) and (method not in _fancyimpute_options) and (not isinstance(method, (int,float))):
        raise ValueError('method not understood.. Use `mean`, `median`, a scalar, or an option from `fancyimpute`')

    X_incomplete = data.copy()

    if method == 'KNN':
        if value is None:
            value = 3
        X_filled = KNN(k=value, verbose=False).complete(X_incomplete)

    elif method == 'BiScaler':
        X_filled = BiScaler(verbose=False).fit_transform(X_incomplete)

    elif method == 'SoftImpute':
        X_filled = SoftImpute(verbose=False).complete(X_incomplete)

    elif method == 'IterativeSVD':
        if value is None:
            rank = min(10, X_incomplete.shape[0]-2)
        else:
            rank = value
        X_filled = IterativeSVD(rank=rank, verbose=False).complete(X_incomplete)

    elif method == 'mean':
        col_means = np.nanmean(X_incomplete, axis=0)
        for i in range(X_incomplete.shape[1]):
            X_incomplete[:,i][np.isnan(X_incomplete[:,i])] = col_means[i]
        X_filled = X_incomplete

    elif method == 'median':
        col_means = np.nanmean(X_incomplete, axis=0)
github HDI-Project / ATM / recommender_experiment.py View on Github external
def suggest_classifiers(gallery_performances, probe_performances, num_suggestions=5):
    incomplete_grid = np.vstack((gallery_performances, probe_performances))

    complete_grid = SoftImpute(max_iters=5, verbose=False).complete(incomplete_grid)

    completed_probe_performances = complete_grid[-1,:]

    suggestions = np.argsort(-completed_probe_performances) # negative so in descending order

    return suggestions[:num_suggestions]
github morganjwilliams / pyrolite / pyrolite / util / skl.py View on Github external
assert isinstance(X, pd.DataFrame)
        start = X
        y_present = y is not None
        groupby_present = self.groupby is not None
        self.imputer = []
        if y_present or groupby_present:
            assert not (groupby_present and y_present)
            if y_present:
                classes = np.unique(y)
                gen_mask = lambda c: y == c
            if groupby_present:
                classes = X[self.groupby].unique()
                gen_mask = lambda c: X[self.groupby] == c
            self.imputer = {
                c: {
                    "impute": SoftImpute(max_iters=self.max_iters, **self.kwargs),
                    "mask": gen_mask(c),
                }
                for c in classes
            }

            msg = """Building Soft Imputation Transformers for {} classes""".format(
                len(classes)
            )
            logger.info(msg)

        else:
            self.imputer = SoftImpute(max_iters=self.max_iters, **self.kwargs)
            msg = """Building Soft Imputation Transformer"""
            logger.info(msg)

        return self
github iskandr / fancyimpute / experiments / complete_faces.py View on Github external
for fill_method in ["mean", "median"]:
        table.add_entry(
            solver=SimpleFill(fill_method=fill_method),
            name="SimpleFill_%s" % fill_method)

    for k in [1, 3, 7]:
        table.add_entry(
            solver=KNN(
                k=k,
                orientation="rows"),
            name="KNN_k%d" % (k,))

    for shrinkage_value in [25, 50, 100]:
        # SoftImpute without rank constraints
        table.add_entry(
            solver=SoftImpute(
                shrinkage_value=shrinkage_value),
            name="SoftImpute_lambda%d" % (shrinkage_value,))

    for rank in [10, 20, 40]:
        table.add_entry(
            solver=IterativeSVD(
                rank=rank,
                init_fill_method="zero"),
            name="IterativeSVD_rank%d" % (rank,))

    table.save_html_table()
    table.print_sorted_errors()
github morganjwilliams / pyrolite / pyrolite / util / skl.py View on Github external
gen_mask = lambda c: X[self.groupby] == c
            self.imputer = {
                c: {
                    "impute": SoftImpute(max_iters=self.max_iters, **self.kwargs),
                    "mask": gen_mask(c),
                }
                for c in classes
            }

            msg = """Building Soft Imputation Transformers for {} classes""".format(
                len(classes)
            )
            logger.info(msg)

        else:
            self.imputer = SoftImpute(max_iters=self.max_iters, **self.kwargs)
            msg = """Building Soft Imputation Transformer"""
            logger.info(msg)

        return self
github YyzHarry / ME-Net / attack_blackbox.py View on Github external
def forward(ctx, input):
        batch_num, c, h, w = input.size()
        output = torch.zeros_like(input).cpu().numpy()

        for i in range(batch_num):
            img = (input[i] * 2 - 1).cpu().numpy()

            if args.me_channel == 'concat':
                img = np.concatenate((np.concatenate((img[0], img[1]), axis=1), img[2]), axis=1)
                mask = np.random.binomial(1, args.maskp, h * w * c).reshape(h, w * c).astype(float)
                mask[mask < 1] = np.nan
                W = SoftImpute(verbose=False).fit_transform(mask * img)
                W[W < -1] = -1
                W[W > 1] = 1
                est_matrix = (W + 1) / 2
                for channel in range(c):
                    output[i, channel] = est_matrix[:, channel * h:(channel + 1) * h]
            else:
                mask = np.random.binomial(1, args.maskp, h * w).reshape(h, w).astype(float)
                mask[mask < 1] = np.nan
                for channel in range(c):
                    mask_img = img[channel] * mask
                    W = SoftImpute(verbose=False).fit_transform(mask_img)
                    W[W < -1] = -1
                    W[W > 1] = 1
                    output[i, channel] = (W + 1) / 2

        output = output - mean
github Networks-Learning / discussion-complexity / code / soft-impute.py View on Github external
def cmd(in_mat_file, dims, suffix, i_loo, j_loo, loo_output, loo_only, verbose, seed):
    """Read M_partial from IN_MAT_FILE and complete the matrix using soft-impute method."""

    M = io.loadmat(in_mat_file)['M_partial']
    rank = dims

    LOO_mode = False
    if i_loo > -1 and j_loo > -1:
        LOO = M[i_loo, j_loo]
        M[i_loo, j_loo] = 0
        LOO_mode = True

    num_comments, num_voters = M.shape

    M[M == 0] = np.nan
    M_complete = SoftImpute(max_rank=dims).complete(M)

    if LOO_mode:
        file_tmpl = f'{in_mat_file}.r{rank}.s{seed}.i{i_loo}.j{j_loo}.soft-impute.out'

        if not loo_only:
            op_mat_file = file_tmpl + '.mat'
            io.savemat(op_mat_file, {'Mhat': M_complete})

        op_loo_file = loo_output if loo_output is not None else file_tmpl + '.loo'
        loo_pred = M_complete[i_loo, j_loo]
        with open(op_loo_file, 'wt') as f:
            f.write('{}, {}'.format(LOO, loo_pred))
    else:
        raise NotImplementedError('Use randomized_svd here.')
        # np.savetxt(in_mat_file + '.' + suffix + '.c_vecs', U)
        # np.savetxt(in_mat_file + '.' + suffix + '.v_vecs', V)