Actual source code: petscmat.h

  1: /*
  2:      Include file for the matrix component of PETSc
  3: */
  4: #ifndef __PETSCMAT_H
 6:  #include petscvec.h

  9: /*S
 10:      Mat - Abstract PETSc matrix object

 12:    Level: beginner

 14:   Concepts: matrix; linear operator

 16: .seealso:  MatCreate(), MatType, MatSetType()
 17: S*/
 18: typedef struct _p_Mat*           Mat;

 20: /*J
 21:     MatType - String with the name of a PETSc matrix or the creation function
 22:        with an optional dynamic library name, for example
 23:        http://www.mcs.anl.gov/petsc/lib.a:mymatcreate()

 25:    Level: beginner

 27: .seealso: MatSetType(), Mat, MatSolverPackage
 28: J*/
 29: #define MatType char*
 30: #define MATSAME            "same"
 31: #define MATMAIJ            "maij"
 32: #define MATSEQMAIJ         "seqmaij"
 33: #define MATMPIMAIJ         "mpimaij"
 34: #define MATIS              "is"
 35: #define MATAIJ             "aij"
 36: #define MATSEQAIJ          "seqaij"
 37: #define MATSEQAIJPTHREAD   "seqaijpthread"
 38: #define MATAIJPTHREAD      "aijpthread"
 39: #define MATMPIAIJ          "mpiaij"
 40: #define MATAIJCRL          "aijcrl"
 41: #define MATSEQAIJCRL       "seqaijcrl"
 42: #define MATMPIAIJCRL       "mpiaijcrl"
 43: #define MATAIJCUSP         "aijcusp"
 44: #define MATSEQAIJCUSP      "seqaijcusp"
 45: #define MATMPIAIJCUSP      "mpiaijcusp"
 46: #define MATAIJPERM         "aijperm"
 47: #define MATSEQAIJPERM      "seqaijperm"
 48: #define MATMPIAIJPERM      "mpiaijperm"
 49: #define MATSHELL           "shell"
 50: #define MATDENSE           "dense"
 51: #define MATSEQDENSE        "seqdense"
 52: #define MATMPIDENSE        "mpidense"
 53: #define MATBAIJ            "baij"
 54: #define MATSEQBAIJ         "seqbaij"
 55: #define MATMPIBAIJ         "mpibaij"
 56: #define MATMPIADJ          "mpiadj"
 57: #define MATSBAIJ           "sbaij"
 58: #define MATSEQSBAIJ        "seqsbaij"
 59: #define MATMPISBAIJ        "mpisbaij"
 60: #define MATSEQBSTRM        "seqbstrm"
 61: #define MATMPIBSTRM        "mpibstrm"
 62: #define MATBSTRM           "bstrm"
 63: #define MATSEQSBSTRM       "seqsbstrm"
 64: #define MATMPISBSTRM       "mpisbstrm"
 65: #define MATSBSTRM          "sbstrm"
 66: #define MATDAAD            "daad"
 67: #define MATMFFD            "mffd"
 68: #define MATNORMAL          "normal"
 69: #define MATLRC             "lrc"
 70: #define MATSCATTER         "scatter"
 71: #define MATBLOCKMAT        "blockmat"
 72: #define MATCOMPOSITE       "composite"
 73: #define MATFFT             "fft"
 74: #define MATFFTW            "fftw"
 75: #define MATSEQCUFFT        "seqcufft"
 76: #define MATTRANSPOSEMAT    "transpose"
 77: #define MATSCHURCOMPLEMENT "schurcomplement"
 78: #define MATPYTHON          "python"
 79: #define MATHYPRESTRUCT     "hyprestruct"
 80: #define MATHYPRESSTRUCT    "hypresstruct"
 81: #define MATSUBMATRIX       "submatrix"
 82: #define MATLOCALREF        "localref"
 83: #define MATNEST            "nest"

 85: /*J
 86:     MatSolverPackage - String with the name of a PETSc matrix solver type. 

 88:     For example: "petsc" indicates what PETSc provides, "superlu" indicates either 
 89:        SuperLU or SuperLU_Dist etc.


 92:    Level: beginner

 94: .seealso: MatGetFactor(), Mat, MatSetType(), MatType
 95: J*/
 96: #define MatSolverPackage char*
 97: #define MATSOLVERSPOOLES      "spooles"
 98: #define MATSOLVERSUPERLU      "superlu"
 99: #define MATSOLVERSUPERLU_DIST "superlu_dist"
100: #define MATSOLVERUMFPACK      "umfpack"
101: #define MATSOLVERCHOLMOD      "cholmod"
102: #define MATSOLVERESSL         "essl"
103: #define MATSOLVERLUSOL        "lusol"
104: #define MATSOLVERMUMPS        "mumps"
105: #define MATSOLVERPASTIX       "pastix"
106: #define MATSOLVERMATLAB       "matlab"
107: #define MATSOLVERPETSC        "petsc"
108: #define MATSOLVERPLAPACK      "plapack"
109: #define MATSOLVERBAS          "bas"

111: #define MATSOLVERBSTRM        "bstrm"
112: #define MATSOLVERSBSTRM       "sbstrm"

114: /*E
115:     MatFactorType - indicates what type of factorization is requested

117:     Level: beginner

119:    Any additions/changes here MUST also be made in include/finclude/petscmat.h

121: .seealso: MatSolverPackage, MatGetFactor()
122: E*/
123: typedef enum {MAT_FACTOR_NONE, MAT_FACTOR_LU, MAT_FACTOR_CHOLESKY, MAT_FACTOR_ILU, MAT_FACTOR_ICC,MAT_FACTOR_ILUDT} MatFactorType;


131: /* Logging support */
132: #define    MAT_FILE_CLASSID 1211216    /* used to indicate matrices in binary files */

139: /*E
140:     MatReuse - Indicates if matrices obtained from a previous call to MatGetSubMatrices()
141:      or MatGetSubMatrix() are to be reused to store the new matrix values. For MatConvert() is used to indicate
142:      that the input matrix is to be replaced with the converted matrix.

144:     Level: beginner

146:    Any additions/changes here MUST also be made in include/finclude/petscmat.h

148: .seealso: MatGetSubMatrices(), MatGetSubMatrix(), MatDestroyMatrices(), MatConvert()
149: E*/
150: typedef enum {MAT_INITIAL_MATRIX,MAT_REUSE_MATRIX,MAT_IGNORE_MATRIX} MatReuse;

152: /*E
153:     MatGetSubMatrixOption - Indicates if matrices obtained from a call to MatGetSubMatrices()
154:      include the matrix values. Currently it is only used by MatGetSeqNonzerostructure().

156:     Level: beginner

158: .seealso: MatGetSeqNonzerostructure()
159: E*/
160: typedef enum {MAT_DO_NOT_GET_VALUES,MAT_GET_VALUES} MatGetSubMatrixOption;


165: PetscPolymorphicFunction(MatCreate,(MPI_Comm comm),(comm,&A),Mat,A)
166: PetscPolymorphicFunction(MatCreate,(),(PETSC_COMM_WORLD,&A),Mat,A)

178: /*MC
179:    MatRegisterDynamic - Adds a new matrix type

181:    Synopsis:
182:    PetscErrorCode MatRegisterDynamic(const char *name,const char *path,const char *name_create,PetscErrorCode (*routine_create)(Mat))

184:    Not Collective

186:    Input Parameters:
187: +  name - name of a new user-defined matrix type
188: .  path - path (either absolute or relative) the library containing this solver
189: .  name_create - name of routine to create method context
190: -  routine_create - routine to create method context

192:    Notes:
193:    MatRegisterDynamic() may be called multiple times to add several user-defined solvers.

195:    If dynamic libraries are used, then the fourth input argument (routine_create)
196:    is ignored.

198:    Sample usage:
199: .vb
200:    MatRegisterDynamic("my_mat",/home/username/my_lib/lib/libO/solaris/mylib.a,
201:                "MyMatCreate",MyMatCreate);
202: .ve

204:    Then, your solver can be chosen with the procedural interface via
205: $     MatSetType(Mat,"my_mat")
206:    or at runtime via the option
207: $     -mat_type my_mat

209:    Level: advanced

211:    Notes: ${PETSC_ARCH} occuring in pathname will be replaced with appropriate values.
212:          If your function is not being put into a shared library then use VecRegister() instead

214: .keywords: Mat, register

216: .seealso: MatRegisterAll(), MatRegisterDestroy()

218: M*/
219: #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
220: #define MatRegisterDynamic(a,b,c,d) MatRegister(a,b,c,0)
221: #else
222: #define MatRegisterDynamic(a,b,c,d) MatRegister(a,b,c,d)
223: #endif


230: /*E
231:     MatStructure - Indicates if the matrix has the same nonzero structure

233:     Level: beginner

235:    Any additions/changes here MUST also be made in include/finclude/petscmat.h

237: .seealso: MatCopy(), KSPSetOperators(), PCSetOperators()
238: E*/
239: typedef enum {DIFFERENT_NONZERO_PATTERN,SUBSET_NONZERO_PATTERN,SAME_NONZERO_PATTERN,SAME_PRECONDITIONER} MatStructure;

244: PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,m,n,nz,nnz,&A),Mat,A)
245: PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,m,n,nz,PETSC_NULL,&A),Mat,A)
246: PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,m,n,0,nnz,&A),Mat,A)
247: PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n),(PETSC_COMM_SELF,m,n,0,PETSC_NULL,&A),Mat,A)
248: PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,m,n,nz,PETSC_NULL,A))
249: PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,m,n,0,nnz,A))
250: PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,m,n,0,PETSC_NULL,A))
252: PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
253: PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
254: PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
255: PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
256: PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
257: PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,m,n,M,N,0,nnz,0,onz,A))
258: PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
259: PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
260: PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
261: PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
262: PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
263: PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
264: PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,m,n,M,N,0,nnz,0,onz,A))
265: PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))

270: PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,nz,nnz,&A),Mat,A)
271: PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,&A),Mat,A)
272: PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,0,nnz,&A),Mat,A)
273: PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,&A),Mat,A)
274: PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,A))
275: PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,bs,m,n,0,nnz,A))
276: PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,A))
278: PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
279: PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
280: PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
281: PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
282: PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
283: PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,bs,m,n,M,N,0,nnz,0,onz,A))
284: PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
285: PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
286: PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
287: PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
288: PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
289: PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
290: PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,A))
291: PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))

296: PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,nz,nnz,&A),Mat,A)
297: PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,&A),Mat,A)
298: PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,0,nnz,&A),Mat,A)
299: PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,&A),Mat,A)
300: PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,A))
301: PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,bs,m,n,0,nnz,A))
302: PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,A))

305: PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
306: PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
307: PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
308: PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
309: PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
310: PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,bs,m,n,M,N,0,nnz,0,onz,A))
311: PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
312: PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
313: PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
314: PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
315: PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
316: PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
317: PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,A))
318: PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))

323: PetscPolymorphicFunction(MatCreateShell,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,void *ctx),(comm,m,n,M,N,ctx,&A),Mat,A)
324: PetscPolymorphicFunction(MatCreateShell,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,void *ctx),(PETSC_COMM_WORLD,m,n,M,N,ctx,&A),Mat,A)
327: PetscPolymorphicFunction(MatCreateNormal,(Mat mat),(mat,&A),Mat,A)


345: typedef enum {MAT_COMPOSITE_ADDITIVE,MAT_COMPOSITE_MULTIPLICATIVE} MatCompositeType;






368: /* ------------------------------------------------------------*/

375: /*S
376:      MatStencil - Data structure (C struct) for storing information about a single row or
377:         column of a matrix as index on an associated grid.

379:    Level: beginner

381:   Concepts: matrix; linear operator

383: .seealso:  MatSetValuesStencil(), MatSetStencil(), MatSetValuesBlockStencil()
384: S*/
385: typedef struct {
386:   PetscInt k,j,i,c;
387: } MatStencil;



397: /*E
398:     MatAssemblyType - Indicates if the matrix is now to be used, or if you plan 
399:      to continue to add values to it

401:     Level: beginner

403: .seealso: MatAssemblyBegin(), MatAssemblyEnd()
404: E*/
405: typedef enum {MAT_FLUSH_ASSEMBLY=1,MAT_FINAL_ASSEMBLY=0} MatAssemblyType;



412: /*E
413:     MatOption - Options that may be set for a matrix and its behavior or storage

415:     Level: beginner

417:    Any additions/changes here MUST also be made in include/finclude/petscmat.h

419: .seealso: MatSetOption()
420: E*/
421: typedef enum {MAT_ROW_ORIENTED,MAT_NEW_NONZERO_LOCATIONS,
422:               MAT_SYMMETRIC,
423:               MAT_STRUCTURALLY_SYMMETRIC,
424:               MAT_NEW_DIAGONALS,MAT_IGNORE_OFF_PROC_ENTRIES,
425:               MAT_NEW_NONZERO_LOCATION_ERR,
426:               MAT_NEW_NONZERO_ALLOCATION_ERR,MAT_USE_HASH_TABLE,
427:               MAT_KEEP_NONZERO_PATTERN,MAT_IGNORE_ZERO_ENTRIES,
428:               MAT_USE_INODES,
429:               MAT_HERMITIAN,
430:               MAT_SYMMETRY_ETERNAL,
431:               MAT_CHECK_COMPRESSED_ROW,
432:               MAT_IGNORE_LOWER_TRIANGULAR,MAT_ERROR_LOWER_TRIANGULAR,
433:               MAT_GETROW_UPPERTRIANGULAR,MAT_UNUSED_NONZERO_LOCATION_ERR,
434:               MAT_SPD,MAT_NO_OFF_PROC_ENTRIES,MAT_NO_OFF_PROC_ZERO_ROWS,
435:               NUM_MAT_OPTIONS} MatOption;
439: PetscPolymorphicFunction(MatGetType,(Mat mat),(mat,&t),const MatType,t)

450: PetscPolymorphicFunction(MatGetArray,(Mat mat),(mat,&a),PetscScalar*,a)
453: PetscPolymorphicFunction(MatGetBlockSize,(Mat mat),(mat,&a),PetscInt,a)


460: PetscPolymorphicSubroutine(MatMultAdd,(Mat A,Vec x,Vec y),(A,x,y,y))
464: PetscPolymorphicFunction(MatIsTranspose,(Mat A,Mat B,PetscReal tol),(A,B,tol,&t),PetscBool ,t)
465: PetscPolymorphicFunction(MatIsTranspose,(Mat A,Mat B),(A,B,0,&t),PetscBool ,t)
469: PetscPolymorphicSubroutine(MatMultTransposeAdd,(Mat A,Vec x,Vec y),(A,x,y,y))

474: /*E
475:     MatDuplicateOption - Indicates if a duplicated sparse matrix should have
476:   its numerical values copied over or just its nonzero structure.

478:     Level: beginner

480:    Any additions/changes here MUST also be made in include/finclude/petscmat.h

482: $   MAT_SHARE_NONZERO_PATTERN - the i and j arrays in the new matrix will be shared with the original matrix
483: $                               this also triggers the MAT_DO_NOT_COPY_VALUES option. This is used when you
484: $                               have several matrices with the same nonzero pattern.

486: .seealso: MatDuplicate()
487: E*/
488: typedef enum {MAT_DO_NOT_COPY_VALUES,MAT_COPY_VALUES,MAT_SHARE_NONZERO_PATTERN} MatDuplicateOption;

491: PetscPolymorphicFunction(MatConvert,(Mat A,const MatType t),(A,t,MAT_INITIAL_MATRIX,&a),Mat,a)
493: PetscPolymorphicFunction(MatDuplicate,(Mat A,MatDuplicateOption o),(A,o,&a),Mat,a)
494: PetscPolymorphicFunction(MatDuplicate,(Mat A),(A,MAT_COPY_VALUES,&a),Mat,a)


500: PetscPolymorphicFunction(MatIsSymmetric,(Mat A,PetscReal tol),(A,tol,&t),PetscBool ,t)
501: PetscPolymorphicFunction(MatIsSymmetric,(Mat A),(A,0,&t),PetscBool ,t)
503: PetscPolymorphicFunction(MatIsStructurallySymmetric,(Mat A),(A,&t),PetscBool ,t)
505: PetscPolymorphicFunction(MatIsHermitian,(Mat A),(A,0,&t),PetscBool ,t)


516: /*S
517:      MatInfo - Context of matrix information, used with MatGetInfo()

519:    In Fortran this is simply a double precision array of dimension MAT_INFO_SIZE

521:    Level: intermediate

523:   Concepts: matrix^nonzero information

525: .seealso:  MatGetInfo(), MatInfoType
526: S*/
527: typedef struct {
528:   PetscLogDouble block_size;                         /* block size */
529:   PetscLogDouble nz_allocated,nz_used,nz_unneeded;   /* number of nonzeros */
530:   PetscLogDouble memory;                             /* memory allocated */
531:   PetscLogDouble assemblies;                         /* number of matrix assemblies called */
532:   PetscLogDouble mallocs;                            /* number of mallocs during MatSetValues() */
533:   PetscLogDouble fill_ratio_given,fill_ratio_needed; /* fill ratio for LU/ILU */
534:   PetscLogDouble factor_mallocs;                     /* number of mallocs during factorization */
535: } MatInfo;

537: /*E
538:     MatInfoType - Indicates if you want information about the local part of the matrix,
539:      the entire parallel matrix or the maximum over all the local parts.

541:     Level: beginner

543:    Any additions/changes here MUST also be made in include/finclude/petscmat.h

545: .seealso: MatGetInfo(), MatInfo
546: E*/
547: typedef enum {MAT_LOCAL=1,MAT_GLOBAL_MAX=2,MAT_GLOBAL_SUM=3} MatInfoType;
556: PetscPolymorphicFunction(MatTranspose,(Mat A),(A,MAT_INITIAL_MATRIX,&t),Mat,t)
559: PetscPolymorphicFunction(MatPermute,(Mat A,IS is1,IS is2),(A,is1,is2,&t),Mat,t)
563: PetscPolymorphicFunction(MatEqual,(Mat A,Mat B),(A,B,&t),PetscBool ,t)

570: PetscPolymorphicFunction(MatNorm,(Mat A,NormType t),(A,t,&n),PetscReal,n)




607: #if defined (PETSC_USE_CTABLE)
608:  #include petscctable.h
610: #else
612: #endif









651: PetscPolymorphicSubroutine(MatInterpolateAdd,(Mat A,Vec x,Vec y),(A,x,y,y))

658: /*MC
659:    MatSetValue - Set a single entry into a matrix.

661:    Not collective

663:    Input Parameters:
664: +  m - the matrix
665: .  row - the row location of the entry
666: .  col - the column location of the entry
667: .  value - the value to insert
668: -  mode - either INSERT_VALUES or ADD_VALUES

670:    Notes: 
671:    For efficiency one should use MatSetValues() and set several or many
672:    values simultaneously if possible.

674:    Level: beginner

676: .seealso: MatSetValues(), MatSetValueLocal()
677: M*/
678: PETSC_STATIC_INLINE PetscErrorCode MatSetValue(Mat v,PetscInt i,PetscInt j,PetscScalar va,InsertMode mode) {return MatSetValues(v,1,&i,1,&j,&va,mode);}

680: PETSC_STATIC_INLINE PetscErrorCode MatGetValue(Mat v,PetscInt i,PetscInt j,PetscScalar *va) {return MatGetValues(v,1,&i,1,&j,va);}

682: PETSC_STATIC_INLINE PetscErrorCode MatSetValueLocal(Mat v,PetscInt i,PetscInt j,PetscScalar va,InsertMode mode) {return MatSetValuesLocal(v,1,&i,1,&j,&va,mode);}

684: /*MC
685:    MatPreallocateInitialize - Begins the block of code that will count the number of nonzeros per
686:        row in a matrix providing the data that one can use to correctly preallocate the matrix.

688:    Synopsis:
689:    PetscErrorCode MatPreallocateInitialize(MPI_Comm comm, PetscInt nrows, PetscInt ncols, PetscInt *dnz, PetscInt *onz)

691:    Collective on MPI_Comm

693:    Input Parameters:
694: +  comm - the communicator that will share the eventually allocated matrix
695: .  nrows - the number of LOCAL rows in the matrix
696: -  ncols - the number of LOCAL columns in the matrix

698:    Output Parameters:
699: +  dnz - the array that will be passed to the matrix preallocation routines
700: -  ozn - the other array passed to the matrix preallocation routines


703:    Level: intermediate

705:    Notes:
706:     See the <A href="../../docs/manual.pdf#nameddest=Chapter 12 Hints for Performance Tuning">Hints for Performance Improvment</A> chapter in the users manual for more details.

708:    Do not malloc or free dnz and onz, that is handled internally by these routines

710:    Use MatPreallocateInitializeSymmetric() for symmetric matrices (MPISBAIJ matrices)

712:    This is a MACRO not a function because it has a leading { that is closed by PetscPreallocateFinalize().

714:   Concepts: preallocation^Matrix

716: .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
717:           MatPreallocateInitializeSymmetric(), MatPreallocateSymmetricSetLocal()
718: M*/
719: #define MatPreallocateInitialize(comm,nrows,ncols,dnz,onz) 0; \
720: { \
721:   PetscErrorCode _4_ierr; PetscInt __nrows = (nrows),__ctmp = (ncols),__rstart,__start,__end; \
722:   _4_PetscMalloc2(__nrows,PetscInt,&dnz,__nrows,PetscInt,&onz);CHKERRQ(_4_ierr); \
723:   _4_PetscMemzero(dnz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
724:   _4_PetscMemzero(onz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
725:   _4_MPI_Scan(&__ctmp,&__end,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __start = __end - __ctmp;\
726:   _4_MPI_Scan(&__nrows,&__rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __rstart = __rstart - __nrows;

728: /*MC
729:    MatPreallocateSymmetricInitialize - Begins the block of code that will count the number of nonzeros per
730:        row in a matrix providing the data that one can use to correctly preallocate the matrix.

732:    Synopsis:
733:    PetscErrorCode MatPreallocateSymmetricInitialize(MPI_Comm comm, PetscInt nrows, PetscInt ncols, PetscInt *dnz, PetscInt *onz)

735:    Collective on MPI_Comm

737:    Input Parameters:
738: +  comm - the communicator that will share the eventually allocated matrix
739: .  nrows - the number of LOCAL rows in the matrix
740: -  ncols - the number of LOCAL columns in the matrix

742:    Output Parameters:
743: +  dnz - the array that will be passed to the matrix preallocation routines
744: -  ozn - the other array passed to the matrix preallocation routines


747:    Level: intermediate

749:    Notes:
750:     See the <A href="../../docs/manual.pdf#nameddest=Chapter 12 Hints for Performance Tuning">Hints for Performance Improvment</A> chapter in the users manual for more details.

752:    Do not malloc or free dnz and onz, that is handled internally by these routines

754:    This is a MACRO not a function because it has a leading { that is closed by PetscPreallocateFinalize().

756:   Concepts: preallocation^Matrix

758: .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
759:           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal()
760: M*/
761: #define MatPreallocateSymmetricInitialize(comm,nrows,ncols,dnz,onz) 0; \
762: { \
763:   PetscErrorCode _4_ierr; PetscInt __nrows = (nrows),__ctmp = (ncols),__rstart,__end; \
764:   _4_PetscMalloc2(__nrows,PetscInt,&dnz,__nrows,PetscInt,&onz);CHKERRQ(_4_ierr); \
765:   _4_PetscMemzero(dnz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
766:   _4_PetscMemzero(onz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
767:   _4_MPI_Scan(&__ctmp,&__end,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr);\
768:   _4_MPI_Scan(&__nrows,&__rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __rstart = __rstart - __nrows;

770: /*MC
771:    MatPreallocateSetLocal - Indicates the locations (rows and columns) in the matrix where nonzeros will be
772:        inserted using a local number of the rows and columns

774:    Synopsis:
775:    PetscErrorCode MatPreallocateSetLocal(ISLocalToGlobalMappping map,PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)

777:    Not Collective

779:    Input Parameters:
780: +  map - the row mapping from local numbering to global numbering
781: .  nrows - the number of rows indicated
782: .  rows - the indices of the rows
783: .  cmap - the column mapping from local to global numbering
784: .  ncols - the number of columns in the matrix
785: .  cols - the columns indicated
786: .  dnz - the array that will be passed to the matrix preallocation routines
787: -  ozn - the other array passed to the matrix preallocation routines


790:    Level: intermediate

792:    Notes:
793:     See the <A href="../../docs/manual.pdf#nameddest=Chapter 12 Hints for Performance Tuning">Hints for Performance Improvment</A> chapter in the users manual for more details.

795:    Do not malloc or free dnz and onz, that is handled internally by these routines

797:   Concepts: preallocation^Matrix

799: .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
800:           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal()
801: M*/
802: #define MatPreallocateSetLocal(rmap,nrows,rows,cmap,ncols,cols,dnz,onz) 0; \
803: {\
804:   PetscInt __l;\
805:   _4_ISLocalToGlobalMappingApply(rmap,nrows,rows,rows);CHKERRQ(_4_ierr);\
806:   _4_ISLocalToGlobalMappingApply(cmap,ncols,cols,cols);CHKERRQ(_4_ierr);\
807:   for (__l=0;__l<nrows;__l++) {\
808:     _4_MatPreallocateSet((rows)[__l],ncols,cols,dnz,onz);CHKERRQ(_4_ierr);\
809:   }\
810: }
811: 
812: /*MC
813:    MatPreallocateSymmetricSetLocal - Indicates the locations (rows and columns) in the matrix where nonzeros will be
814:        inserted using a local number of the rows and columns

816:    Synopsis:
817:    PetscErrorCode MatPreallocateSymmetricSetLocal(ISLocalToGlobalMappping map,PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)

819:    Not Collective

821:    Input Parameters:
822: +  map - the mapping between local numbering and global numbering
823: .  nrows - the number of rows indicated
824: .  rows - the indices of the rows 
825: .  ncols - the number of columns in the matrix
826: .  cols - the columns indicated
827: .  dnz - the array that will be passed to the matrix preallocation routines
828: -  ozn - the other array passed to the matrix preallocation routines


831:    Level: intermediate

833:    Notes:
834:     See the <A href="../../docs/manual.pdf#nameddest=Chapter 12 Hints for Performance Tuning">Hints for Performance Improvment</A> chapter in the users manual for more details.

836:    Do not malloc or free dnz and onz that is handled internally by these routines

838:   Concepts: preallocation^Matrix

840: .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
841:           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal()
842: M*/
843: #define MatPreallocateSymmetricSetLocal(map,nrows,rows,ncols,cols,dnz,onz) 0;\
844: {\
845:   PetscInt __l;\
846:   _4_ISLocalToGlobalMappingApply(map,nrows,rows,rows);CHKERRQ(_4_ierr);\
847:   _4_ISLocalToGlobalMappingApply(map,ncols,cols,cols);CHKERRQ(_4_ierr);\
848:   for (__l=0;__l<nrows;__l++) {\
849:     _4_MatPreallocateSymmetricSet((rows)[__l],ncols,cols,dnz,onz);CHKERRQ(_4_ierr);\
850:   }\
851: }

853: /*MC
854:    MatPreallocateSet - Indicates the locations (rows and columns) in the matrix where nonzeros will be
855:        inserted using a local number of the rows and columns

857:    Synopsis:
858:    PetscErrorCode MatPreallocateSet(PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)

860:    Not Collective

862:    Input Parameters:
863: +  row - the row
864: .  ncols - the number of columns in the matrix
865: -  cols - the columns indicated

867:    Output Parameters:
868: +  dnz - the array that will be passed to the matrix preallocation routines
869: -  ozn - the other array passed to the matrix preallocation routines


872:    Level: intermediate

874:    Notes:
875:     See the <A href="../../docs/manual.pdf#nameddest=Chapter 12 Hints for Performance Tuning">Hints for Performance Improvment</A> chapter in the users manual for more details.

877:    Do not malloc or free dnz and onz that is handled internally by these routines

879:    This is a MACRO not a function because it uses variables declared in MatPreallocateInitialize().

881:   Concepts: preallocation^Matrix

883: .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
884:           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal()
885: M*/
886: #define MatPreallocateSet(row,nc,cols,dnz,onz) 0;\
887: { PetscInt __i; \
888:   if (row < __rstart) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Trying to set preallocation for row %D less than first local row %D",row,__rstart);\
889:   if (row >= __rstart+__nrows) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Trying to set preallocation for row %D greater than last local row %D",row,__rstart+__nrows-1);\
890:   for (__i=0; __i<nc; __i++) {\
891:     if ((cols)[__i] < __start || (cols)[__i] >= __end) onz[row - __rstart]++; \
892:     else dnz[row - __rstart]++;\
893:   }\
894: }

896: /*MC
897:    MatPreallocateSymmetricSet - Indicates the locations (rows and columns) in the matrix where nonzeros will be
898:        inserted using a local number of the rows and columns

900:    Synopsis:
901:    PetscErrorCode MatPreallocateSymmetricSet(PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)

903:    Not Collective

905:    Input Parameters:
906: +  nrows - the number of rows indicated
907: .  rows - the indices of the rows 
908: .  ncols - the number of columns in the matrix
909: .  cols - the columns indicated
910: .  dnz - the array that will be passed to the matrix preallocation routines
911: -  ozn - the other array passed to the matrix preallocation routines


914:    Level: intermediate

916:    Notes:
917:     See the <A href="../../docs/manual.pdf#nameddest=Chapter 12 Hints for Performance Tuning">Hints for Performance Improvment</A> chapter in the users manual for more details.

919:    Do not malloc or free dnz and onz that is handled internally by these routines

921:    This is a MACRO not a function because it uses variables declared in MatPreallocateInitialize().

923:   Concepts: preallocation^Matrix

925: .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
926:           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal()
927: M*/
928: #define MatPreallocateSymmetricSet(row,nc,cols,dnz,onz) 0;\
929: { PetscInt __i; \
930:   for (__i=0; __i<nc; __i++) {\
931:     if (cols[__i] >= __end) onz[row - __rstart]++; \
932:     else if (cols[__i] >= row) dnz[row - __rstart]++;\
933:   }\
934: }

936: /*MC
937:    MatPreallocateLocation -  An alternative to MatPreallocationSet() that puts the nonzero locations into the matrix if it exists

939:    Synopsis:
940:    PetscErrorCode MatPreallocateLocations(Mat A,PetscInt row,PetscInt ncols,PetscInt *cols,PetscInt *dnz,PetscInt *onz)

942:    Not Collective

944:    Input Parameters:
945: .  A - matrix
946: .  row - row where values exist (must be local to this process)
947: .  ncols - number of columns
948: .  cols - columns with nonzeros
949: .  dnz - the array that will be passed to the matrix preallocation routines
950: -  ozn - the other array passed to the matrix preallocation routines


953:    Level: intermediate

955:    Notes:
956:     See the <A href="../../docs/manual.pdf#nameddest=Chapter 12 Hints for Performance Tuning">Hints for Performance Improvment</A> chapter in the users manual for more details.

958:    Do not malloc or free dnz and onz that is handled internally by these routines

960:    This is a MACRO not a function because it uses a bunch of variables private to the MatPreallocation.... routines.

962:   Concepts: preallocation^Matrix

964: .seealso: MatPreallocateInitialize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
965:           MatPreallocateSymmetricInitialize(), MatPreallocateSymmetricSetLocal()
966: M*/
967: #define MatPreallocateLocation(A,row,ncols,cols,dnz,onz) 0;if (A) {MatSetValues(A,1,&row,ncols,cols,PETSC_NULL,INSERT_VALUES);} else { MatPreallocateSet(row,ncols,cols,dnz,onz);}


970: /*MC
971:    MatPreallocateFinalize - Ends the block of code that will count the number of nonzeros per
972:        row in a matrix providing the data that one can use to correctly preallocate the matrix.

974:    Synopsis:
975:    PetscErrorCode MatPreallocateFinalize(PetscInt *dnz, PetscInt *onz)

977:    Collective on MPI_Comm

979:    Input Parameters:
980: +  dnz - the array that was be passed to the matrix preallocation routines
981: -  ozn - the other array passed to the matrix preallocation routines


984:    Level: intermediate

986:    Notes:
987:     See the <A href="../../docs/manual.pdf#nameddest=Chapter 12 Hints for Performance Tuning">Hints for Performance Improvment</A> chapter in the users manual for more details.

989:    Do not malloc or free dnz and onz that is handled internally by these routines

991:    This is a MACRO not a function because it closes the { started in MatPreallocateInitialize().

993:   Concepts: preallocation^Matrix

995: .seealso: MatPreallocateInitialize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
996:           MatPreallocateSymmetricInitialize(), MatPreallocateSymmetricSetLocal()
997: M*/
998: #define MatPreallocateFinalize(dnz,onz) 0;_4_PetscFree2(dnz,onz);CHKERRQ(_4_ierr);}



1002: /* Routines unique to particular data structures */
1004: PetscPolymorphicFunction(MatShellGetContext,(Mat A),(A,&t),void*,t)



1015: #define MAT_SKIP_ALLOCATION -4

1018: PetscPolymorphicSubroutine(MatSeqBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[]),(A,bs,0,nnz))
1020: PetscPolymorphicSubroutine(MatSeqSBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[]),(A,bs,0,nnz))
1022: PetscPolymorphicSubroutine(MatSeqAIJSetPreallocation,(Mat A,const PetscInt nnz[]),(A,0,nnz))

1025: PetscPolymorphicSubroutine(MatMPIBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[],const PetscInt onz[]),(A,bs,0,nnz,0,onz))




1048: /* 
1049:   These routines are not usually accessed directly, rather solving is 
1050:   done through the KSP and PC interfaces.
1051: */

1053: /*J
1054:     MatOrderingType - String with the name of a PETSc matrix ordering or the creation function
1055:        with an optional dynamic library name, for example 
1056:        http://www.mcs.anl.gov/petsc/lib.a:orderingcreate()

1058:    Level: beginner

1060:    Cannot use const because the PC objects manipulate the string

1062: .seealso: MatGetOrdering()
1063: J*/
1064: #define MatOrderingType char*
1065: #define MATORDERINGNATURAL     "natural"
1066: #define MATORDERINGND          "nd"
1067: #define MATORDERING1WD         "1wd"
1068: #define MATORDERINGRCM         "rcm"
1069: #define MATORDERINGQMD         "qmd"
1070: #define MATORDERINGROWLENGTH   "rowlength"
1071: #define MATORDERINGAMD         "amd"            /* only works if UMFPACK is installed with PETSc */


1077: /*MC
1078:    MatOrderingRegisterDynamic - Adds a new sparse matrix ordering to the matrix package. 

1080:    Synopsis:
1081:    PetscErrorCode MatOrderingRegisterDynamic(const char *name_ordering,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatOrdering))

1083:    Not Collective

1085:    Input Parameters:
1086: +  sname - name of ordering (for example MATORDERINGND)
1087: .  path - location of library where creation routine is 
1088: .  name - name of function that creates the ordering type,a string
1089: -  function - function pointer that creates the ordering

1091:    Level: developer

1093:    If dynamic libraries are used, then the fourth input argument (function)
1094:    is ignored.

1096:    Sample usage:
1097: .vb
1098:    MatOrderingRegisterDynamic("my_order",/home/username/my_lib/lib/libO/solaris/mylib.a,
1099:                "MyOrder",MyOrder);
1100: .ve

1102:    Then, your partitioner can be chosen with the procedural interface via
1103: $     MatOrderingSetType(part,"my_order)
1104:    or at runtime via the option
1105: $     -pc_factor_mat_ordering_type my_order

1107:    ${PETSC_ARCH} occuring in pathname will be replaced with appropriate values.

1109: .keywords: matrix, ordering, register

1111: .seealso: MatOrderingRegisterDestroy(), MatOrderingRegisterAll()
1112: M*/
1113: #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1114: #define MatOrderingRegisterDynamic(a,b,c,d) MatOrderingRegister(a,b,c,0)
1115: #else
1116: #define MatOrderingRegisterDynamic(a,b,c,d) MatOrderingRegister(a,b,c,d)
1117: #endif



1126: /*S
1127:     MatFactorShiftType - Numeric Shift.

1129:    Level: beginner

1131: S*/
1132: typedef enum {MAT_SHIFT_NONE,MAT_SHIFT_NONZERO,MAT_SHIFT_POSITIVE_DEFINITE,MAT_SHIFT_INBLOCKS} MatFactorShiftType;

1135: /*S 
1136:    MatFactorInfo - Data passed into the matrix factorization routines

1138:    In Fortran these are simply double precision arrays of size MAT_FACTORINFO_SIZE, that is use
1139: $     MatFactorInfo  info(MAT_FACTORINFO_SIZE)

1141:    Notes: These are not usually directly used by users, instead use PC type of LU, ILU, CHOLESKY or ICC.

1143:       You can use MatFactorInfoInitialize() to set default values.

1145:    Level: developer

1147: .seealso: MatLUFactorSymbolic(), MatILUFactorSymbolic(), MatCholeskyFactorSymbolic(), MatICCFactorSymbolic(), MatICCFactor(), 
1148:           MatFactorInfoInitialize()

1150: S*/
1151: typedef struct {
1152:   PetscReal     diagonal_fill;  /* force diagonal to fill in if initially not filled */
1153:   PetscReal     usedt;
1154:   PetscReal     dt;             /* drop tolerance */
1155:   PetscReal     dtcol;          /* tolerance for pivoting */
1156:   PetscReal     dtcount;        /* maximum nonzeros to be allowed per row */
1157:   PetscReal     fill;           /* expected fill, nonzeros in factored matrix/nonzeros in original matrix */
1158:   PetscReal     levels;         /* ICC/ILU(levels) */
1159:   PetscReal     pivotinblocks;  /* for BAIJ and SBAIJ matrices pivot in factorization on blocks, default 1.0 
1160:                                    factorization may be faster if do not pivot */
1161:   PetscReal     zeropivot;      /* pivot is called zero if less than this */
1162:   PetscReal     shifttype;      /* type of shift added to matrix factor to prevent zero pivots */
1163:   PetscReal     shiftamount;     /* how large the shift is */
1164: } MatFactorInfo;



1188: /*E
1189:     MatSORType - What type of (S)SOR to perform

1191:     Level: beginner

1193:    May be bitwise ORd together

1195:    Any additions/changes here MUST also be made in include/finclude/petscmat.h

1197:    MatSORType may be bitwise ORd together, so do not change the numbers 

1199: .seealso: MatSOR()
1200: E*/
1201: typedef enum {SOR_FORWARD_SWEEP=1,SOR_BACKWARD_SWEEP=2,SOR_SYMMETRIC_SWEEP=3,
1202:               SOR_LOCAL_FORWARD_SWEEP=4,SOR_LOCAL_BACKWARD_SWEEP=8,
1203:               SOR_LOCAL_SYMMETRIC_SWEEP=12,SOR_ZERO_INITIAL_GUESS=16,
1204:               SOR_EISENSTAT=32,SOR_APPLY_UPPER=64,SOR_APPLY_LOWER=128} MatSORType;

1207: /* 
1208:     These routines are for efficiently computing Jacobians via finite differences.
1209: */

1211: /*J
1212:     MatColoringType - String with the name of a PETSc matrix coloring or the creation function
1213:        with an optional dynamic library name, for example 
1214:        http://www.mcs.anl.gov/petsc/lib.a:coloringcreate()

1216:    Level: beginner

1218: .seealso: MatGetColoring()
1219: J*/
1220: #define MatColoringType char*
1221: #define MATCOLORINGNATURAL "natural"
1222: #define MATCOLORINGSL      "sl"
1223: #define MATCOLORINGLF      "lf"
1224: #define MATCOLORINGID      "id"


1229: /*MC
1230:    MatColoringRegisterDynamic - Adds a new sparse matrix coloring to the 
1231:                                matrix package. 

1233:    Synopsis:
1234:    PetscErrorCode MatColoringRegisterDynamic(const char *name_coloring,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatColoring))

1236:    Not Collective

1238:    Input Parameters:
1239: +  sname - name of Coloring (for example MATCOLORINGSL)
1240: .  path - location of library where creation routine is 
1241: .  name - name of function that creates the Coloring type, a string
1242: -  function - function pointer that creates the coloring

1244:    Level: developer

1246:    If dynamic libraries are used, then the fourth input argument (function)
1247:    is ignored.

1249:    Sample usage:
1250: .vb
1251:    MatColoringRegisterDynamic("my_color",/home/username/my_lib/lib/libO/solaris/mylib.a,
1252:                "MyColor",MyColor);
1253: .ve

1255:    Then, your partitioner can be chosen with the procedural interface via
1256: $     MatColoringSetType(part,"my_color")
1257:    or at runtime via the option
1258: $     -mat_coloring_type my_color

1260:    $PETSC_ARCH occuring in pathname will be replaced with appropriate values.

1262: .keywords: matrix, Coloring, register

1264: .seealso: MatColoringRegisterDestroy(), MatColoringRegisterAll()
1265: M*/
1266: #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1267: #define MatColoringRegisterDynamic(a,b,c,d) MatColoringRegister(a,b,c,0)
1268: #else
1269: #define MatColoringRegisterDynamic(a,b,c,d) MatColoringRegister(a,b,c,d)
1270: #endif



1278: /*S
1279:      MatFDColoring - Object for computing a sparse Jacobian via finite differences
1280:         and coloring

1282:    Level: beginner

1284:   Concepts: coloring, sparse Jacobian, finite differences

1286: .seealso:  MatFDColoringCreate()
1287: S*/
1288: typedef struct _p_MatFDColoring* MatFDColoring;

1301: /* 
1302:     These routines are for partitioning matrices: currently used only 
1303:   for adjacency matrix, MatCreateMPIAdj().
1304: */

1306: /*S
1307:      MatPartitioning - Object for managing the partitioning of a matrix or graph

1309:    Level: beginner

1311:   Concepts: partitioning

1313: .seealso:  MatPartitioningCreate(), MatPartitioningType
1314: S*/
1315: typedef struct _p_MatPartitioning* MatPartitioning;

1317: /*J
1318:     MatPartitioningType - String with the name of a PETSc matrix partitioning or the creation function
1319:        with an optional dynamic library name, for example 
1320:        http://www.mcs.anl.gov/petsc/lib.a:partitioningcreate()

1322:    Level: beginner

1324: .seealso: MatPartitioningCreate(), MatPartitioning
1325: J*/
1326: #define MatPartitioningType char*
1327: #define MATPARTITIONINGCURRENT  "current"
1328: #define MATPARTITIONINGSQUARE   "square"
1329: #define MATPARTITIONINGPARMETIS "parmetis"
1330: #define MATPARTITIONINGCHACO    "chaco"
1331: #define MATPARTITIONINGPARTY    "party"
1332: #define MATPARTITIONINGPTSCOTCH "ptscotch"




1346: /*MC
1347:    MatPartitioningRegisterDynamic - Adds a new sparse matrix partitioning to the 
1348:    matrix package. 

1350:    Synopsis:
1351:    PetscErrorCode MatPartitioningRegisterDynamic(const char *name_partitioning,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatPartitioning))

1353:    Not Collective

1355:    Input Parameters:
1356: +  sname - name of partitioning (for example MATPARTITIONINGCURRENT) or parmetis
1357: .  path - location of library where creation routine is 
1358: .  name - name of function that creates the partitioning type, a string
1359: -  function - function pointer that creates the partitioning type

1361:    Level: developer

1363:    If dynamic libraries are used, then the fourth input argument (function)
1364:    is ignored.

1366:    Sample usage:
1367: .vb
1368:    MatPartitioningRegisterDynamic("my_part",/home/username/my_lib/lib/libO/solaris/mylib.a,
1369:                "MyPartCreate",MyPartCreate);
1370: .ve

1372:    Then, your partitioner can be chosen with the procedural interface via
1373: $     MatPartitioningSetType(part,"my_part")
1374:    or at runtime via the option
1375: $     -mat_partitioning_type my_part

1377:    $PETSC_ARCH occuring in pathname will be replaced with appropriate values.

1379: .keywords: matrix, partitioning, register

1381: .seealso: MatPartitioningRegisterDestroy(), MatPartitioningRegisterAll()
1382: M*/
1383: #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1384: #define MatPartitioningRegisterDynamic(a,b,c,d) MatPartitioningRegister(a,b,c,0)
1385: #else
1386: #define MatPartitioningRegisterDynamic(a,b,c,d) MatPartitioningRegister(a,b,c,d)
1387: #endif





1401: typedef enum { MP_CHACO_MULTILEVEL=1,MP_CHACO_SPECTRAL=2,MP_CHACO_LINEAR=4,MP_CHACO_RANDOM=5,MP_CHACO_SCATTERED=6 } MPChacoGlobalType;
1403: typedef enum { MP_CHACO_KERNIGHAN=1,MP_CHACO_NONE=2 } MPChacoLocalType;
1405: typedef enum { MP_CHACO_LANCZOS=0,MP_CHACO_RQI=1 } MPChacoEigenType;


1420: #define MP_PARTY_OPT "opt"
1421: #define MP_PARTY_LIN "lin"
1422: #define MP_PARTY_SCA "sca"
1423: #define MP_PARTY_RAN "ran"
1424: #define MP_PARTY_GBF "gbf"
1425: #define MP_PARTY_GCF "gcf"
1426: #define MP_PARTY_BUB "bub"
1427: #define MP_PARTY_DEF "def"
1429: #define MP_PARTY_HELPFUL_SETS "hs"
1430: #define MP_PARTY_KERNIGHAN_LIN "kl"
1431: #define MP_PARTY_NONE "no"

1437: typedef enum { MP_PTSCOTCH_QUALITY,MP_PTSCOTCH_SPEED,MP_PTSCOTCH_BALANCE,MP_PTSCOTCH_SAFETY,MP_PTSCOTCH_SCALABILITY } MPPTScotchStrategyType;



1448: /*
1449:     If you add entries here you must also add them to finclude/petscmat.h
1450: */
1451: typedef enum { MATOP_SET_VALUES=0,
1452:                MATOP_GET_ROW=1,
1453:                MATOP_RESTORE_ROW=2,
1454:                MATOP_MULT=3,
1455:                MATOP_MULT_ADD=4,
1456:                MATOP_MULT_TRANSPOSE=5,
1457:                MATOP_MULT_TRANSPOSE_ADD=6,
1458:                MATOP_SOLVE=7,
1459:                MATOP_SOLVE_ADD=8,
1460:                MATOP_SOLVE_TRANSPOSE=9,
1461:                MATOP_SOLVE_TRANSPOSE_ADD=10,
1462:                MATOP_LUFACTOR=11,
1463:                MATOP_CHOLESKYFACTOR=12,
1464:                MATOP_SOR=13,
1465:                MATOP_TRANSPOSE=14,
1466:                MATOP_GETINFO=15,
1467:                MATOP_EQUAL=16,
1468:                MATOP_GET_DIAGONAL=17,
1469:                MATOP_DIAGONAL_SCALE=18,
1470:                MATOP_NORM=19,
1471:                MATOP_ASSEMBLY_BEGIN=20,
1472:                MATOP_ASSEMBLY_END=21,
1473:                MATOP_SET_OPTION=22,
1474:                MATOP_ZERO_ENTRIES=23,
1475:                MATOP_ZERO_ROWS=24,
1476:                MATOP_LUFACTOR_SYMBOLIC=25,
1477:                MATOP_LUFACTOR_NUMERIC=26,
1478:                MATOP_CHOLESKY_FACTOR_SYMBOLIC=27,
1479:                MATOP_CHOLESKY_FACTOR_NUMERIC=28,
1480:                MATOP_SETUP_PREALLOCATION=29,
1481:                MATOP_ILUFACTOR_SYMBOLIC=30,
1482:                MATOP_ICCFACTOR_SYMBOLIC=31,
1483:                MATOP_GET_ARRAY=32,
1484:                MATOP_RESTORE_ARRAY=33,
1485:                MATOP_DUPLICATE=34,
1486:                MATOP_FORWARD_SOLVE=35,
1487:                MATOP_BACKWARD_SOLVE=36,
1488:                MATOP_ILUFACTOR=37,
1489:                MATOP_ICCFACTOR=38,
1490:                MATOP_AXPY=39,
1491:                MATOP_GET_SUBMATRICES=40,
1492:                MATOP_INCREASE_OVERLAP=41,
1493:                MATOP_GET_VALUES=42,
1494:                MATOP_COPY=43,
1495:                MATOP_GET_ROW_MAX=44,
1496:                MATOP_SCALE=45,
1497:                MATOP_SHIFT=46,
1498:                MATOP_DIAGONAL_SET=47,
1499:                MATOP_ILUDT_FACTOR=48,
1500:                MATOP_SET_BLOCK_SIZE=49,
1501:                MATOP_GET_ROW_IJ=50,
1502:                MATOP_RESTORE_ROW_IJ=51,
1503:                MATOP_GET_COLUMN_IJ=52,
1504:                MATOP_RESTORE_COLUMN_IJ=53,
1505:                MATOP_FDCOLORING_CREATE=54,
1506:                MATOP_COLORING_PATCH=55,
1507:                MATOP_SET_UNFACTORED=56,
1508:                MATOP_PERMUTE=57,
1509:                MATOP_SET_VALUES_BLOCKED=58,
1510:                MATOP_GET_SUBMATRIX=59,
1511:                MATOP_DESTROY=60,
1512:                MATOP_VIEW=61,
1513:                MATOP_CONVERT_FROM=62,
1514:                MATOP_USE_SCALED_FORM=63,
1515:                MATOP_SCALE_SYSTEM=64,
1516:                MATOP_UNSCALE_SYSTEM=65,
1517:                MATOP_SET_LOCAL_TO_GLOBAL_MAP=66,
1518:                MATOP_SET_VALUES_LOCAL=67,
1519:                MATOP_ZERO_ROWS_LOCAL=68,
1520:                MATOP_GET_ROW_MAX_ABS=69,
1521:                MATOP_GET_ROW_MIN_ABS=70,
1522:                MATOP_CONVERT=71,
1523:                MATOP_SET_COLORING=72,
1524:                MATOP_SET_VALUES_ADIC=73,
1525:                MATOP_SET_VALUES_ADIFOR=74,
1526:                MATOP_FD_COLORING_APPLY=75,
1527:                MATOP_SET_FROM_OPTIONS=76,
1528:                MATOP_MULT_CON=77,
1529:                MATOP_MULT_TRANSPOSE_CON=78,
1530:                MATOP_PERMUTE_SPARSIFY=79,
1531:                MATOP_MULT_MULTIPLE=80,
1532:                MATOP_SOLVE_MULTIPLE=81,
1533:                MATOP_GET_INERTIA=82,
1534:                MATOP_LOAD=83,
1535:                MATOP_IS_SYMMETRIC=84,
1536:                MATOP_IS_HERMITIAN=85,
1537:                MATOP_IS_STRUCTURALLY_SYMMETRIC=86,
1538:                MATOP_DUMMY=87,
1539:                MATOP_GET_VECS=88,
1540:                MATOP_MAT_MULT=89,
1541:                MATOP_MAT_MULT_SYMBOLIC=90,
1542:                MATOP_MAT_MULT_NUMERIC=91,
1543:                MATOP_PTAP=92,
1544:                MATOP_PTAP_SYMBOLIC=93,
1545:                MATOP_PTAP_NUMERIC=94,
1546:                MATOP_MAT_MULTTRANSPOSE=95,
1547:                MATOP_MAT_MULTTRANSPOSE_SYM=96,
1548:                MATOP_MAT_MULTTRANSPOSE_NUM=97,
1549:                MATOP_PTAP_SYMBOLIC_SEQAIJ=98,
1550:                MATOP_PTAP_NUMERIC_SEQAIJ=99,
1551:                MATOP_PTAP_SYMBOLIC_MPIAIJ=100,
1552:                MATOP_PTAP_NUMERIC_MPIAIJ=101,
1553:                MATOP_CONJUGATE=102,
1554:                MATOP_SET_SIZES=103,
1555:                MATOP_SET_VALUES_ROW=104,
1556:                MATOP_REAL_PART=105,
1557:                MATOP_IMAG_PART=106,
1558:                MATOP_GET_ROW_UTRIANGULAR=107,
1559:                MATOP_RESTORE_ROW_UTRIANGULAR=108,
1560:                MATOP_MATSOLVE=109,
1561:                MATOP_GET_REDUNDANTMATRIX=110,
1562:                MATOP_GET_ROW_MIN=111,
1563:                MATOP_GET_COLUMN_VEC=112,
1564:                MATOP_MISSING_DIAGONAL=113,
1565:                MATOP_MATGETSEQNONZEROSTRUCTURE=114,
1566:                MATOP_CREATE=115,
1567:                MATOP_GET_GHOSTS=116,
1568:                MATOP_GET_LOCALSUBMATRIX=117,
1569:                MATOP_RESTORE_LOCALSUBMATRIX=118,
1570:                MATOP_MULT_DIAGONAL_BLOCK=119,
1571:                MATOP_HERMITIANTRANSPOSE=120,
1572:                MATOP_MULTHERMITIANTRANSPOSE=121,
1573:                MATOP_MULTHERMITIANTRANSPOSEADD=122,
1574:                MATOP_GETMULTIPROCBLOCK=123,
1575:                MATOP_GETCOLUMNNORMS=125,
1576:                MATOP_GET_SUBMATRICES_PARALLEL=128,
1577:                MATOP_SET_VALUES_BATCH=129
1578:              } MatOperation;

1584: /*
1585:    Codes for matrices stored on disk. By default they are
1586:    stored in a universal format. By changing the format with 
1587:    PetscViewerSetFormat(viewer,PETSC_VIEWER_NATIVE); the matrices will
1588:    be stored in a way natural for the matrix, for example dense matrices
1589:    would be stored as dense. Matrices stored this way may only be
1590:    read into matrices of the same type.
1591: */
1592: #define MATRIX_BINARY_FORMAT_DENSE -1


1597: /*S
1598:      MatNullSpace - Object that removes a null space from a vector, i.e.
1599:          orthogonalizes the vector to a subsapce

1601:    Level: advanced

1603:   Concepts: matrix; linear operator, null space

1605:   Users manual sections:
1606: .   Section 4.16 Solving Singular Systems

1608: .seealso:  MatNullSpaceCreate()
1609: S*/
1610: typedef struct _p_MatNullSpace* MatNullSpace;







1648: /*S
1649:     MatMFFD - A data structured used to manage the computation of the h differencing parameter for matrix-free 
1650:               Jacobian vector products

1652:     Notes: MATMFFD is a specific MatType which uses the MatMFFD data structure

1654:            MatMFFD*() methods actually take the Mat as their first argument. Not a MatMFFD data structure

1656:     Level: developer

1658: .seealso: MATMFFD, MatCreateMFFD(), MatMFFDSetFuction(), MatMFFDSetType(), MatMFFDRegister()
1659: S*/
1660: typedef struct _p_MatMFFD* MatMFFD;

1662: /*J
1663:     MatMFFDType - algorithm used to compute the h used in computing matrix-vector products via differencing of the function

1665:    Level: beginner

1667: .seealso: MatMFFDSetType(), MatMFFDRegister()
1668: J*/
1669: #define MatMFFDType char*
1670: #define MATMFFD_DS  "ds"
1671: #define MATMFFD_WP  "wp"


1676: /*MC
1677:    MatMFFDRegisterDynamic - Adds a method to the MatMFFD registry.

1679:    Synopsis:
1680:    PetscErrorCode MatMFFDRegisterDynamic(const char *name_solver,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatMFFD))

1682:    Not Collective

1684:    Input Parameters:
1685: +  name_solver - name of a new user-defined compute-h module
1686: .  path - path (either absolute or relative) the library containing this solver
1687: .  name_create - name of routine to create method context
1688: -  routine_create - routine to create method context

1690:    Level: developer

1692:    Notes:
1693:    MatMFFDRegisterDynamic() may be called multiple times to add several user-defined solvers.

1695:    If dynamic libraries are used, then the fourth input argument (routine_create)
1696:    is ignored.

1698:    Sample usage:
1699: .vb
1700:    MatMFFDRegisterDynamic("my_h",/home/username/my_lib/lib/libO/solaris/mylib.a,
1701:                "MyHCreate",MyHCreate);
1702: .ve

1704:    Then, your solver can be chosen with the procedural interface via
1705: $     MatMFFDSetType(mfctx,"my_h")
1706:    or at runtime via the option
1707: $     -snes_mf_type my_h

1709: .keywords: MatMFFD, register

1711: .seealso: MatMFFDRegisterAll(), MatMFFDRegisterDestroy()
1712: M*/
1713: #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1714: #define MatMFFDRegisterDynamic(a,b,c,d) MatMFFDRegister(a,b,c,0)
1715: #else
1716: #define MatMFFDRegisterDynamic(a,b,c,d) MatMFFDRegister(a,b,c,d)
1717: #endif




1728: /* 
1729:    PETSc interface to MUMPS 
1730: */
1731: #ifdef PETSC_HAVE_MUMPS
1733: #endif

1735: /* 
1736:    PETSc interface to SUPERLU
1737: */
1738: #ifdef PETSC_HAVE_SUPERLU
1740: #endif

1742: #if defined(PETSC_HAVE_CUSP)
1745: #endif

1747: /* 
1748:    PETSc interface to FFTW
1749: */
1750: #if defined(PETSC_HAVE_FFTW)
1754: #endif


1765: #endif