How to use the fancyimpute.common.masked_mae function in fancyimpute

To help you get started, we’ve selected a few fancyimpute examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github iskandr / fancyimpute / fancyimpute / iterative_svd.py View on Github external
X = check_array(X, force_all_finite=False)

        observed_mask = ~missing_mask
        X_filled = X
        for i in range(self.max_iters):
            # deviation from original svdImpute algorithm:
            # gradually increase the rank of our approximation
            if self.gradual_rank_increase:
                curr_rank = min(2 ** i, self.rank)
            else:
                curr_rank = self.rank
            tsvd = TruncatedSVD(curr_rank, algorithm=self.svd_algorithm)
            X_reduced = tsvd.fit_transform(X_filled)
            X_reconstructed = tsvd.inverse_transform(X_reduced)
            X_reconstructed = self.clip(X_reconstructed)
            mae = masked_mae(
                X_true=X,
                X_pred=X_reconstructed,
                mask=observed_mask)
            if self.verbose:
                print(
                    "[IterativeSVD] Iter %d: observed MAE=%0.6f" % (
                        i + 1, mae))
            converged = self._converged(
                X_old=X_filled,
                X_new=X_reconstructed,
                missing_mask=missing_mask)
            X_filled[missing_mask] = X_reconstructed[missing_mask]
            if converged:
                break
        return X_filled
github iskandr / fancyimpute / fancyimpute / soft_impute.py View on Github external
shrinkage_value = self.shrinkage_value
        else:
            # totally hackish heuristic: keep only components
            # with at least 1/50th the max singular value
            shrinkage_value = max_singular_value / 50.0

        for i in range(self.max_iters):
            X_reconstruction, rank = self._svd_step(
                X_filled,
                shrinkage_value,
                max_rank=self.max_rank)
            X_reconstruction = self.clip(X_reconstruction)

            # print error on observed data
            if self.verbose:
                mae = masked_mae(
                    X_true=X_init,
                    X_pred=X_reconstruction,
                    mask=observed_mask)
                print(
                    "[SoftImpute] Iter %d: observed MAE=%0.6f rank=%d" % (
                        i + 1,
                        mae,
                        rank))

            converged = self._converged(
                X_old=X_filled,
                X_new=X_reconstruction,
                missing_mask=missing_mask)
            X_filled[missing_mask] = X_reconstruction[missing_mask]
            if converged:
                break
github iskandr / fancyimpute / fancyimpute / auto_encoder.py View on Github external
assert self.network is not None, \
            "Network should have been constructed but was found to be None"

        max_training_epochs, patience_epochs = self._get_training_params(
            n_samples)

        observed_mask = ~missing_mask

        best_error_seen = np.inf
        epochs_since_best_error = 0
        recent_predictions = deque([], maxlen=self.output_history_size)

        for epoch in range(max_training_epochs):
            X_pred = self._train_epoch(X=X, missing_mask=missing_mask)
            recent_predictions.append(X_pred)
            observed_mae = masked_mae(
                X_true=X,
                X_pred=X_pred,
                mask=observed_mask)

            if epoch == 0:
                best_error_seen = observed_mae
            elif observed_mae / best_error_seen < self.min_improvement:
                best_error_seen = observed_mae
                epochs_since_best_error = 0
            else:
                epochs_since_best_error += 1

            if self.verbose:
                print("[AutoEncoder] Epoch %d/%d Observed MAE=%f %s" % (
                    epoch + 1,
                    max_training_epochs,
github iskandr / fancyimpute / experiments / complete_faces.py View on Github external
def add_entry(self, solver, name):
        print("Running %s" % name)
        completed_normalized = solver.fit_transform(self.incomplete_normalized)
        completed = self.normalizer.inverse_transform(completed_normalized)

        mae = masked_mae(
            X_true=self.flattened_images,
            X_pred=completed,
            mask=self.missing_mask)
        mse = masked_mse(
            X_true=self.flattened_images,
            X_pred=completed,
            mask=self.missing_mask)
        print("==> %s: MSE=%0.4f MAE=%0.4f" % (name, mse, mae))
        self.mse_dict[name] = mse
        self.mae_dict[name] = mae
        self.save_images(completed, base_filename=name)