Actual source code: dagtol.c
1: /*
2: Code for manipulating distributed regular arrays in parallel.
3: */
5: #include <petsc/private/dmdaimpl.h>
7: PetscErrorCode DMGlobalToLocalBegin_DA(DM da, Vec g, InsertMode mode, Vec l)
8: {
9: DM_DA *dd = (DM_DA *)da->data;
14: VecScatterBegin(dd->gtol, g, l, mode, SCATTER_FORWARD);
15: return 0;
16: }
18: PetscErrorCode DMGlobalToLocalEnd_DA(DM da, Vec g, InsertMode mode, Vec l)
19: {
20: DM_DA *dd = (DM_DA *)da->data;
25: VecScatterEnd(dd->gtol, g, l, mode, SCATTER_FORWARD);
26: return 0;
27: }
29: PetscErrorCode DMLocalToGlobalBegin_DA(DM da, Vec l, InsertMode mode, Vec g)
30: {
31: DM_DA *dd = (DM_DA *)da->data;
36: if (mode == ADD_VALUES) {
37: VecScatterBegin(dd->gtol, l, g, ADD_VALUES, SCATTER_REVERSE);
38: } else if (mode == INSERT_VALUES) {
42: VecScatterBegin(dd->gtol, l, g, INSERT_VALUES, SCATTER_REVERSE_LOCAL);
43: } else SETERRQ(PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Not yet implemented");
44: return 0;
45: }
47: PetscErrorCode DMLocalToGlobalEnd_DA(DM da, Vec l, InsertMode mode, Vec g)
48: {
49: DM_DA *dd = (DM_DA *)da->data;
54: if (mode == ADD_VALUES) {
55: VecScatterEnd(dd->gtol, l, g, ADD_VALUES, SCATTER_REVERSE);
56: } else if (mode == INSERT_VALUES) {
57: VecScatterEnd(dd->gtol, l, g, INSERT_VALUES, SCATTER_REVERSE_LOCAL);
58: } else SETERRQ(PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Not yet implemented");
59: return 0;
60: }
62: extern PetscErrorCode DMDAGetNatural_Private(DM, PetscInt *, IS *);
63: /*
64: DMDAGlobalToNatural_Create - Create the global to natural scatter object
66: Collective on da
68: Input Parameter:
69: . da - the distributed array context
71: Level: developer
73: Notes:
74: This is an internal routine called by DMDAGlobalToNatural() to
75: create the scatter context.
77: .seealso: `DMDAGlobalToNaturalBegin()`, `DMDAGlobalToNaturalEnd()`, `DMLocalToGlobalBegin()`, `DMDACreate2d()`,
78: `DMGlobalToLocalBegin()`, `DMGlobalToLocalEnd()`, `DMDACreateNaturalVector()`
79: */
80: PetscErrorCode DMDAGlobalToNatural_Create(DM da)
81: {
82: PetscInt m, start, Nlocal;
83: IS from, to;
84: Vec global;
85: DM_DA *dd = (DM_DA *)da->data;
90: /* create the scatter context */
91: VecGetLocalSize(dd->natural, &m);
92: VecGetOwnershipRange(dd->natural, &start, NULL);
94: DMDAGetNatural_Private(da, &Nlocal, &to);
96: ISCreateStride(PetscObjectComm((PetscObject)da), m, start, 1, &from);
97: VecCreateMPIWithArray(PetscObjectComm((PetscObject)da), dd->w, dd->Nlocal, PETSC_DETERMINE, NULL, &global);
98: VecScatterCreate(global, from, dd->natural, to, &dd->gton);
99: VecDestroy(&global);
100: ISDestroy(&from);
101: ISDestroy(&to);
102: return 0;
103: }
105: /*@
106: DMDAGlobalToNaturalBegin - Maps values from the global vector to a global vector
107: in the "natural" grid ordering. Must be followed by
108: DMDAGlobalToNaturalEnd() to complete the exchange.
110: Neighbor-wise Collective on da
112: Input Parameters:
113: + da - the distributed array context
114: . g - the global vector
115: - mode - one of INSERT_VALUES or ADD_VALUES
117: Output Parameter:
118: . l - the natural ordering values
120: Level: advanced
122: Notes:
123: The global and natrual vectors used here need not be the same as those
124: obtained from DMCreateGlobalVector() and DMDACreateNaturalVector(), BUT they
125: must have the same parallel data layout; they could, for example, be
126: obtained with VecDuplicate() from the DMDA originating vectors.
128: You must call DMDACreateNaturalVector() before using this routine
130: .seealso: `DMDAGlobalToNaturalEnd()`, `DMLocalToGlobalBegin()`, `DMDACreate2d()`,
131: `DMGlobalToLocalBegin()`, `DMGlobalToLocalEnd()`, `DMDACreateNaturalVector()`
133: @*/
134: PetscErrorCode DMDAGlobalToNaturalBegin(DM da, Vec g, InsertMode mode, Vec n)
135: {
136: DM_DA *dd = (DM_DA *)da->data;
141: if (!dd->gton) {
142: /* create the scatter context */
143: DMDAGlobalToNatural_Create(da);
144: }
145: VecScatterBegin(dd->gton, g, n, mode, SCATTER_FORWARD);
146: return 0;
147: }
149: /*@
150: DMDAGlobalToNaturalEnd - Maps values from the global vector to a global vector
151: in the natural ordering. Must be preceded by DMDAGlobalToNaturalBegin().
153: Neighbor-wise Collective on da
155: Input Parameters:
156: + da - the distributed array context
157: . g - the global vector
158: - mode - one of INSERT_VALUES or ADD_VALUES
160: Output Parameter:
161: . l - the global values in the natural ordering
163: Level: advanced
165: Notes:
166: The global and local vectors used here need not be the same as those
167: obtained from DMCreateGlobalVector() and DMDACreateNaturalVector(), BUT they
168: must have the same parallel data layout; they could, for example, be
169: obtained with VecDuplicate() from the DMDA originating vectors.
171: .seealso: `DMDAGlobalToNaturalBegin()`, `DMLocalToGlobalBegin()`, `DMDACreate2d()`,
172: `DMGlobalToLocalBegin()`, `DMGlobalToLocalEnd()`, `DMDACreateNaturalVector()`
174: @*/
175: PetscErrorCode DMDAGlobalToNaturalEnd(DM da, Vec g, InsertMode mode, Vec n)
176: {
177: DM_DA *dd = (DM_DA *)da->data;
182: VecScatterEnd(dd->gton, g, n, mode, SCATTER_FORWARD);
183: return 0;
184: }
186: /*@
187: DMDANaturalToGlobalBegin - Maps values from a global vector in the "natural" ordering
188: to a global vector in the PETSc DMDA grid ordering. Must be followed by
189: DMDANaturalToGlobalEnd() to complete the exchange.
191: Neighbor-wise Collective on da
193: Input Parameters:
194: + da - the distributed array context
195: . g - the global vector in a natural ordering
196: - mode - one of INSERT_VALUES or ADD_VALUES
198: Output Parameter:
199: . l - the values in the DMDA ordering
201: Level: advanced
203: Notes:
204: The global and natural vectors used here need not be the same as those
205: obtained from DMCreateGlobalVector() and DMDACreateNaturalVector(), BUT they
206: must have the same parallel data layout; they could, for example, be
207: obtained with VecDuplicate() from the DMDA originating vectors.
209: .seealso: `DMDAGlobalToNaturalEnd()`, `DMDAGlobalToNaturalBegin()`, `DMLocalToGlobalBegin()`, `DMDACreate2d()`,
210: `DMGlobalToLocalBegin()`, `DMGlobalToLocalEnd()`, `DMDACreateNaturalVector()`
212: @*/
213: PetscErrorCode DMDANaturalToGlobalBegin(DM da, Vec n, InsertMode mode, Vec g)
214: {
215: DM_DA *dd = (DM_DA *)da->data;
220: if (!dd->gton) {
221: /* create the scatter context */
222: DMDAGlobalToNatural_Create(da);
223: }
224: VecScatterBegin(dd->gton, n, g, mode, SCATTER_REVERSE);
225: return 0;
226: }
228: /*@
229: DMDANaturalToGlobalEnd - Maps values from the natural ordering global vector
230: to a global vector in the PETSc DMDA ordering. Must be preceded by DMDANaturalToGlobalBegin().
232: Neighbor-wise Collective on da
234: Input Parameters:
235: + da - the distributed array context
236: . g - the global vector in a natural ordering
237: - mode - one of INSERT_VALUES or ADD_VALUES
239: Output Parameter:
240: . l - the global values in the PETSc DMDA ordering
242: Level: advanced
244: Notes:
245: The global and local vectors used here need not be the same as those
246: obtained from DMCreateGlobalVector() and DMDACreateNaturalVector(), BUT they
247: must have the same parallel data layout; they could, for example, be
248: obtained with VecDuplicate() from the DMDA originating vectors.
250: .seealso: `DMDAGlobalToNaturalBegin()`, `DMDAGlobalToNaturalEnd()`, `DMLocalToGlobalBegin()`, `DMDACreate2d()`,
251: `DMGlobalToLocalBegin()`, `DMGlobalToLocalEnd()`, `DMDACreateNaturalVector()`
253: @*/
254: PetscErrorCode DMDANaturalToGlobalEnd(DM da, Vec n, InsertMode mode, Vec g)
255: {
256: DM_DA *dd = (DM_DA *)da->data;
261: VecScatterEnd(dd->gton, n, g, mode, SCATTER_REVERSE);
262: return 0;
263: }