How to use the petsc4py.PETSc.Scatter.toZero function in petsc4py

To help you get started, we’ve selected a few petsc4py examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github pism / pism / util / fill_missing_petsc.py View on Github external
def create_scatter(vector):
    "Create the scatter to processor 0."
    comm = vector.getComm()
    rank = comm.getRank()
    scatter, V0 = PETSc.Scatter.toZero(vector)
    scatter.scatter(vector, V0, False, PETSc.Scatter.Mode.FORWARD)
    comm.barrier()

    return V0, scatter
github pism / pism / site-packages / PISM / toproczero.py View on Github external
:param grid: the IceGrid to be shared by all IceModelVec's
    :param dof:  the number of degrees of freedom for the IceModelVec's 
    (e.g. 1 for scalar valued Vecs, 2 for horizontal velocity Vecs)
    :param dim:  the dimension of the IceModel"""
    self.grid = grid
    self.dof = dof
    self.dim = dim

    if dim != 2:
      raise NotImplementedError()

    self.da = grid.get_dm(dof,0)

    self.tmp_U         = self.da.createGlobalVector()
    self.tmp_U_natural = self.da.createNaturalVector()
    self.scatter, self.U0 = PETSc.Scatter.toZero(self.tmp_U_natural)
github anstmichaels / emopt / emopt / fdfd.py View on Github external
elif(self._solver_type == 'direct' or self._solver_type == 'auto'):
            ksp = self.ksp_dir
            ksp.setOperators(self._A, self._A)
            ksp.setFromOptions()

        ksp.solveTranspose(self.b_adj, self.x_adj)

        if(NOT_PARALLEL):
            convergence = ksp.getConvergedReason()
            if(convergence < 0):
                error_message('Adjoint solution did not converge.')

        # Save the full result on the master node so it can be accessed in the
        # future
        scatter, x_adj_full = PETSc.Scatter.toZero(self.x_adj)
        scatter.scatter(self.x_adj, x_adj_full, False, PETSc.Scatter.Mode.FORWARD)

        if(NOT_PARALLEL):
            fields = x_adj_full[...]
            self.fields_adj = fields

            MN = self._M*self._N

            Nc = self.Nc
            self.Ez_adj = np.reshape(fields[0::Nc], [self._M, self._N])
            self.Hx_adj = np.reshape(fields[1::Nc], [self._M, self._N])
            self.Hy_adj = np.reshape(fields[2::Nc], [self._M, self._N])
github simpeg / simpeg / GCEtools / PETScIO.py View on Github external
outDir = _os.path.dirname(filename)
    else:
        outDir = './'+_os.path.dirname(filename)

    # create output directory if it doesn't exist
    try:
        _os.mkdir(outDir)
    except OSError as exception:
        if exception.errno != _errno.EEXIST:
            raise

    if filetype == 'txt':
        # scatter prob to process 0
        comm = vec.getComm()
        rank = comm.getRank()
        scatter, vec0 = _PETSc.Scatter.toZero(vec)
        scatter.scatter(vec, vec0, False, _PETSc.Scatter.Mode.FORWARD)

        # use process 0 to write to text file
        if rank == 0:
            array0 = _np.asarray(vec0)
            with open(filename,'w') as f:
                for i in range(len(array0)):
                    f.write('{0: .12e}\n'.format(array0[i]))

        # deallocate
        comm.barrier()
        scatter.destroy()
        vec0.destroy()

    elif filetype == 'bin':
        binSave = _PETSc.Viewer().createBinary(filename, 'w')
github simpeg / simpeg / GCEtools / PETScIO.py View on Github external
def vecToArray0(obj):
    """ Converts a PETSc vector to a numpy array available on MPI node 0.

        Args:
            obj (petsc4py.PETSc.Vec): input vector.

        Returns:
            numpy.array :
        """
    # scatter vector 'obj' to process 0
    comm = obj.getComm()
    rank = comm.getRank()
    scatter, obj0 = _PETSc.Scatter.toZero(obj)
    scatter.scatter(obj, obj0, False, _PETSc.Scatter.Mode.FORWARD)

    if rank == 0:   return _np.asarray(obj0)

    # deallocate
    comm.barrier()
    scatter.destroy()
    obj0.destroy()
github anstmichaels / emopt / emopt / fdfd.py View on Github external
elif(self._solver_type == 'direct' or self._solver_type == 'auto'):
            ksp = self.ksp_dir
            ksp.setOperators(self._A, self._A)
            ksp.setFromOptions()

        ksp.solve(self.b, self.x)

        if(RANK == 0):
            convergence = ksp.getConvergedReason()
            if(convergence < 0):
                error_message('Forward solution did not converge with error '
                              'code %d.' % (convergence))

        # Save the full result on the master node so it can be accessed in the
        # future
        scatter, x_full = PETSc.Scatter.toZero(self.x)
        scatter.scatter(self.x, x_full, False, PETSc.Scatter.Mode.FORWARD)

        if(NOT_PARALLEL):
            fields = x_full[...]
            self.fields = fields

            MN = self._M*self._N

            Nc = self.Nc
            self.Ez = np.reshape(fields[0::Nc], [self._M, self._N])
            self.Hx = np.reshape(fields[1::Nc], [self._M, self._N])
            self.Hy = np.reshape(fields[2::Nc], [self._M, self._N])

        # store the source power
        self._source_power = self.get_source_power()