Actual source code: dmimpl.h

petsc-3.6.3 2015-12-03
Report Typos and Errors

  3: #if !defined(_DMIMPL_H)
  4: #define _DMIMPL_H

  6: #include <petscdm.h>
  7: #include <petsc/private/petscimpl.h>

  9: PETSC_EXTERN PetscBool DMRegisterAllCalled;
 10: PETSC_EXTERN PetscErrorCode DMRegisterAll(void);
 11: typedef PetscErrorCode (*NullSpaceFunc)(DM dm, PetscInt field, MatNullSpace *nullSpace);

 13: typedef struct _DMOps *DMOps;
 14: struct _DMOps {
 15:   PetscErrorCode (*view)(DM,PetscViewer);
 16:   PetscErrorCode (*load)(DM,PetscViewer);
 17:   PetscErrorCode (*clone)(DM,DM*);
 18:   PetscErrorCode (*setfromoptions)(PetscOptions*,DM);
 19:   PetscErrorCode (*setup)(DM);
 20:   PetscErrorCode (*createdefaultsection)(DM);
 21:   PetscErrorCode (*createdefaultconstraints)(DM);
 22:   PetscErrorCode (*createglobalvector)(DM,Vec*);
 23:   PetscErrorCode (*createlocalvector)(DM,Vec*);
 24:   PetscErrorCode (*getlocaltoglobalmapping)(DM);
 25:   PetscErrorCode (*createfieldis)(DM,PetscInt*,char***,IS**);
 26:   PetscErrorCode (*createcoordinatedm)(DM,DM*);

 28:   PetscErrorCode (*getcoloring)(DM,ISColoringType,ISColoring*);
 29:   PetscErrorCode (*creatematrix)(DM, Mat*);
 30:   PetscErrorCode (*createinterpolation)(DM,DM,Mat*,Vec*);
 31:   PetscErrorCode (*getaggregates)(DM,DM,Mat*);
 32:   PetscErrorCode (*getinjection)(DM,DM,Mat*);

 34:   PetscErrorCode (*refine)(DM,MPI_Comm,DM*);
 35:   PetscErrorCode (*coarsen)(DM,MPI_Comm,DM*);
 36:   PetscErrorCode (*refinehierarchy)(DM,PetscInt,DM*);
 37:   PetscErrorCode (*coarsenhierarchy)(DM,PetscInt,DM*);

 39:   PetscErrorCode (*globaltolocalbegin)(DM,Vec,InsertMode,Vec);
 40:   PetscErrorCode (*globaltolocalend)(DM,Vec,InsertMode,Vec);
 41:   PetscErrorCode (*localtoglobalbegin)(DM,Vec,InsertMode,Vec);
 42:   PetscErrorCode (*localtoglobalend)(DM,Vec,InsertMode,Vec);
 43:   PetscErrorCode (*localtolocalbegin)(DM,Vec,InsertMode,Vec);
 44:   PetscErrorCode (*localtolocalend)(DM,Vec,InsertMode,Vec);

 46:   PetscErrorCode (*destroy)(DM);

 48:   PetscErrorCode (*computevariablebounds)(DM,Vec,Vec);

 50:   PetscErrorCode (*createsubdm)(DM,PetscInt,PetscInt*,IS*,DM*);
 51:   PetscErrorCode (*createfielddecomposition)(DM,PetscInt*,char***,IS**,DM**);
 52:   PetscErrorCode (*createdomaindecomposition)(DM,PetscInt*,char***,IS**,IS**,DM**);
 53:   PetscErrorCode (*createddscatters)(DM,PetscInt,DM*,VecScatter**,VecScatter**,VecScatter**);

 55:   PetscErrorCode (*getdimpoints)(DM,PetscInt,PetscInt*,PetscInt*);
 56:   PetscErrorCode (*locatepoints)(DM,Vec,IS*);
 57: };

 59: typedef struct _DMCoarsenHookLink *DMCoarsenHookLink;
 60: struct _DMCoarsenHookLink {
 61:   PetscErrorCode (*coarsenhook)(DM,DM,void*);              /* Run once, when coarse DM is created */
 62:   PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*); /* Run each time a new problem is restricted to a coarse grid */
 63:   void *ctx;
 64:   DMCoarsenHookLink next;
 65: };

 67: typedef struct _DMRefineHookLink *DMRefineHookLink;
 68: struct _DMRefineHookLink {
 69:   PetscErrorCode (*refinehook)(DM,DM,void*);     /* Run once, when a fine DM is created */
 70:   PetscErrorCode (*interphook)(DM,Mat,DM,void*); /* Run each time a new problem is interpolated to a fine grid */
 71:   void *ctx;
 72:   DMRefineHookLink next;
 73: };

 75: typedef struct _DMSubDomainHookLink *DMSubDomainHookLink;
 76: struct _DMSubDomainHookLink {
 77:   PetscErrorCode (*ddhook)(DM,DM,void*);
 78:   PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*);
 79:   void *ctx;
 80:   DMSubDomainHookLink next;
 81: };

 83: typedef struct _DMGlobalToLocalHookLink *DMGlobalToLocalHookLink;
 84: struct _DMGlobalToLocalHookLink {
 85:   PetscErrorCode (*beginhook)(DM,Vec,InsertMode,Vec,void*);
 86:   PetscErrorCode (*endhook)(DM,Vec,InsertMode,Vec,void*);
 87:   void *ctx;
 88:   DMGlobalToLocalHookLink next;
 89: };

 91: typedef struct _DMLocalToGlobalHookLink *DMLocalToGlobalHookLink;
 92: struct _DMLocalToGlobalHookLink {
 93:   PetscErrorCode (*beginhook)(DM,Vec,InsertMode,Vec,void*);
 94:   PetscErrorCode (*endhook)(DM,Vec,InsertMode,Vec,void*);
 95:   void *ctx;
 96:   DMLocalToGlobalHookLink next;
 97: };

 99: typedef enum {DMVEC_STATUS_IN,DMVEC_STATUS_OUT} DMVecStatus;
100: typedef struct _DMNamedVecLink *DMNamedVecLink;
101: struct _DMNamedVecLink {
102:   Vec X;
103:   char *name;
104:   DMVecStatus status;
105:   DMNamedVecLink next;
106: };

108: typedef struct _DMWorkLink *DMWorkLink;
109: struct _DMWorkLink {
110:   size_t     bytes;
111:   void       *mem;
112:   DMWorkLink next;
113: };

115: #define DM_MAX_WORK_VECTORS 100 /* work vectors available to users  via DMGetGlobalVector(), DMGetLocalVector() */

117: struct _p_DM {
118:   PETSCHEADER(struct _DMOps);
119:   Vec                     localin[DM_MAX_WORK_VECTORS],localout[DM_MAX_WORK_VECTORS];
120:   Vec                     globalin[DM_MAX_WORK_VECTORS],globalout[DM_MAX_WORK_VECTORS];
121:   DMNamedVecLink          namedglobal;
122:   DMNamedVecLink          namedlocal;
123:   DMWorkLink              workin,workout;
124:   void                    *ctx;    /* a user context */
125:   PetscErrorCode          (*ctxdestroy)(void**);
126:   Vec                     x;       /* location at which the functions/Jacobian are computed */
127:   ISColoringType          coloringtype;
128:   MatFDColoring           fd;
129:   VecType                 vectype;  /* type of vector created with DMCreateLocalVector() and DMCreateGlobalVector() */
130:   MatType                 mattype;  /* type of matrix created with DMCreateMatrix() */
131:   PetscInt                bs;
132:   ISLocalToGlobalMapping  ltogmap;
133:   PetscBool               prealloc_only; /* Flag indicating the DMCreateMatrix() should only preallocate, not fill the matrix */
134:   PetscInt                levelup,leveldown;  /* if the DM has been obtained by refining (or coarsening) this indicates how many times that process has been used to generate this DM */
135:   PetscBool               setupcalled;        /* Indicates that the DM has been set up, methods that modify a DM such that a fresh setup is required should reset this flag */
136:   void                    *data;
137:   DMCoarsenHookLink       coarsenhook; /* For transfering auxiliary problem data to coarser grids */
138:   DMRefineHookLink        refinehook;
139:   DMSubDomainHookLink     subdomainhook;
140:   DMGlobalToLocalHookLink gtolhook;
141:   DMLocalToGlobalHookLink ltoghook;
142:   /* Topology */
143:   PetscInt                dim;                  /* The topological dimension */
144:   /* Flexible communication */
145:   PetscSF                 sf;                   /* SF for parallel point overlap */
146:   PetscSF                 defaultSF;            /* SF for parallel dof overlap using default section */
147:   /* Allows a non-standard data layout */
148:   PetscSection            defaultSection;       /* Layout for local vectors */
149:   PetscSection            defaultGlobalSection; /* Layout for global vectors */
150:   PetscLayout             map;
151:   /* Constraints */
152:   PetscSection            defaultConstraintSection;
153:   Mat                     defaultConstraintMat;
154:   /* Coordinates */
155:   PetscInt                dimEmbed;             /* The dimension of the embedding space */
156:   DM                      coordinateDM;         /* Layout for coordinates (default section) */
157:   Vec                     coordinates;          /* Coordinate values in global vector */
158:   Vec                     coordinatesLocal;     /* Coordinate values in local  vector */
159:   PetscReal              *L, *maxCell;          /* Size of periodic box and max cell size for determining periodicity */
160:   DMBoundaryType         *bdtype;               /* Indicates type of topological boundary */
161:   /* Null spaces -- of course I should make this have a variable number of fields */
162:   /*   I now believe this might not be the right way: see below */
163:   NullSpaceFunc           nullspaceConstructors[10];
164:   /* Fields are represented by objects */
165:   PetscDS                 prob;
166:   /* Output structures */
167:   DM                      dmBC;                 /* The DM with boundary conditions in the global DM */
168:   PetscInt                outputSequenceNum;    /* The current sequence number for output */
169:   PetscReal               outputSequenceVal;    /* The current sequence value for output */

171:   PetscObject             dmksp,dmsnes,dmts;
172: };

174: PETSC_EXTERN PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal;

176: PETSC_EXTERN PetscErrorCode DMCreateGlobalVector_Section_Private(DM,Vec*);
177: PETSC_EXTERN PetscErrorCode DMCreateLocalVector_Section_Private(DM,Vec*);
178: PETSC_EXTERN PetscErrorCode DMCreateSubDM_Section_Private(DM,PetscInt,PetscInt[],IS*,DM*);

180: /*

182:           Composite Vectors

184:       Single global representation
185:       Individual global representations
186:       Single local representation
187:       Individual local representations

189:       Subsets of individual as a single????? Do we handle this by having DMComposite inside composite??????

191:        DM da_u, da_v, da_p

193:        DM dm_velocities

195:        DM dm

197:        DMDACreate(,&da_u);
198:        DMDACreate(,&da_v);
199:        DMCompositeCreate(,&dm_velocities);
200:        DMCompositeAddDM(dm_velocities,(DM)du);
201:        DMCompositeAddDM(dm_velocities,(DM)dv);

203:        DMDACreate(,&da_p);
204:        DMCompositeCreate(,&dm_velocities);
205:        DMCompositeAddDM(dm,(DM)dm_velocities);
206:        DMCompositeAddDM(dm,(DM)dm_p);


209:     Access parts of composite vectors (Composite only)
210:     ---------------------------------
211:       DMCompositeGetAccess  - access the global vector as subvectors and array (for redundant arrays)
212:       ADD for local vector -

214:     Element access
215:     --------------
216:       From global vectors
217:          -DAVecGetArray   - for DMDA
218:          -VecGetArray - for DMSliced
219:          ADD for DMComposite???  maybe

221:       From individual vector
222:           -DAVecGetArray - for DMDA
223:           -VecGetArray -for sliced
224:          ADD for DMComposite??? maybe

226:       From single local vector
227:           ADD         * single local vector as arrays?

229:    Communication
230:    -------------
231:       DMGlobalToLocal - global vector to single local vector

233:       DMCompositeScatter/Gather - direct to individual local vectors and arrays   CHANGE name closer to GlobalToLocal?

235:    Obtaining vectors
236:    -----------------
237:       DMCreateGlobal/Local
238:       DMGetGlobal/Local
239:       DMCompositeGetLocalVectors   - gives individual local work vectors and arrays


242: ?????   individual global vectors   ????

244: */

246: #if defined(PETSC_HAVE_HDF5)
247: PETSC_EXTERN PetscErrorCode DMSequenceLoad_HDF5(DM, const char *, PetscInt, PetscScalar *, PetscViewer);
248: #endif

250: #endif