#include "petscmat.h" PetscErrorCode MatCreateShell(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,void *ctx,Mat *A)Collective on MPI_Comm Many br
comm | - MPI communicator Many br | |
m | - number of local rows (must be given) Many br | |
n | - number of local columns (must be given) Many br | |
M | - number of global rows (may be PETSC_DETERMINE) Many br | |
N | - number of global columns (may be PETSC_DETERMINE) Many br | |
ctx | - pointer to data needed by the shell matrix routines Many br |
Many br
extern int mult(Mat,Vec,Vec);
MatCreateShell(comm,m,n,M,N,ctx,&mat);
MatShellSetOperation(mat,MATOP_MULT,(void(*)(void))mult);
[ Use matrix for operations that have been set ]
MatDestroy(mat);
Fortran Notes: To use this from Fortran with a ctx you must write an interface definition for this Many brfunction and for MatShellGetContext() that tells Fortran the Fortran derived data type you are passing Many brin as the ctx argument. Many br
PETSc requires that matrices and vectors being used for certain Many broperations are partitioned accordingly. For example, when Many brcreating a shell matrix, A, that supports parallel matrix-vector Many brproducts using MatMult(A,x,y) the user should set the number Many brof local matrix rows to be the number of local elements of the Many brcorresponding result vector, y. Note that this is information is Many brrequired for use of the matrix interface routines, even though Many brthe shell matrix may not actually be physically partitioned. Many brFor example, Many br
Vec x, y
extern int mult(Mat,Vec,Vec);
Mat A
VecCreateMPI(comm,PETSC_DECIDE,M,&y);
VecCreateMPI(comm,PETSC_DECIDE,N,&x);
VecGetLocalSize(y,&m);
VecGetLocalSize(x,&n);
MatCreateShell(comm,m,n,M,N,ctx,&A);
MatShellSetOperation(mat,MATOP_MULT,(void(*)(void))mult);
MatMult(A,x,y);
MatDestroy(A);
VecDestroy(y); VecDestroy(x);
Level:advanced
Location:src/mat/impls/shell/shell.c
Index of all Mat routines
Table of Contents for all manual pages
Index of all manual pages