Actual source code: pcis.c
2: #include <../src/ksp/pc/impls/is/pcis.h>
4: static PetscErrorCode PCISSetUseStiffnessScaling_IS(PC pc, PetscBool use)
5: {
6: PC_IS *pcis = (PC_IS*)pc->data;
8: pcis->use_stiffness_scaling = use;
9: return 0;
10: }
12: /*@
13: PCISSetUseStiffnessScaling - Tells PCIS to construct partition of unity using
14: local matrices' diagonal.
16: Not collective
18: Input Parameters:
19: + pc - the preconditioning context
20: - use - whether or not pcis use matrix diagonal to build partition of unity.
22: Level: intermediate
24: Notes:
26: .seealso: PCBDDC
27: @*/
28: PetscErrorCode PCISSetUseStiffnessScaling(PC pc, PetscBool use)
29: {
32: PetscTryMethod(pc,"PCISSetUseStiffnessScaling_C",(PC,PetscBool),(pc,use));
33: return 0;
34: }
36: static PetscErrorCode PCISSetSubdomainDiagonalScaling_IS(PC pc, Vec scaling_factors)
37: {
38: PC_IS *pcis = (PC_IS*)pc->data;
40: PetscObjectReference((PetscObject)scaling_factors);
41: VecDestroy(&pcis->D);
42: pcis->D = scaling_factors;
43: if (pc->setupcalled) {
44: PetscInt sn;
46: VecGetSize(pcis->D,&sn);
47: if (sn == pcis->n) {
48: VecScatterBegin(pcis->N_to_B,pcis->D,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);
49: VecScatterEnd(pcis->N_to_B,pcis->D,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);
50: VecDestroy(&pcis->D);
51: VecDuplicate(pcis->vec1_B,&pcis->D);
52: VecCopy(pcis->vec1_B,pcis->D);
54: }
55: return 0;
56: }
58: /*@
59: PCISSetSubdomainDiagonalScaling - Set diagonal scaling for PCIS.
61: Not collective
63: Input Parameters:
64: + pc - the preconditioning context
65: - scaling_factors - scaling factors for the subdomain
67: Level: intermediate
69: Notes:
70: Intended to use with jumping coefficients cases.
72: .seealso: PCBDDC
73: @*/
74: PetscErrorCode PCISSetSubdomainDiagonalScaling(PC pc, Vec scaling_factors)
75: {
78: PetscTryMethod(pc,"PCISSetSubdomainDiagonalScaling_C",(PC,Vec),(pc,scaling_factors));
79: return 0;
80: }
82: static PetscErrorCode PCISSetSubdomainScalingFactor_IS(PC pc, PetscScalar scal)
83: {
84: PC_IS *pcis = (PC_IS*)pc->data;
86: pcis->scaling_factor = scal;
87: if (pcis->D) {
89: VecSet(pcis->D,pcis->scaling_factor);
90: }
91: return 0;
92: }
94: /*@
95: PCISSetSubdomainScalingFactor - Set scaling factor for PCIS.
97: Not collective
99: Input Parameters:
100: + pc - the preconditioning context
101: - scal - scaling factor for the subdomain
103: Level: intermediate
105: Notes:
106: Intended to use with jumping coefficients cases.
108: .seealso: PCBDDC
109: @*/
110: PetscErrorCode PCISSetSubdomainScalingFactor(PC pc, PetscScalar scal)
111: {
113: PetscTryMethod(pc,"PCISSetSubdomainScalingFactor_C",(PC,PetscScalar),(pc,scal));
114: return 0;
115: }
117: /* -------------------------------------------------------------------------- */
118: /*
119: PCISSetUp -
120: */
121: PetscErrorCode PCISSetUp(PC pc, PetscBool computematrices, PetscBool computesolvers)
122: {
123: PC_IS *pcis = (PC_IS*)(pc->data);
124: Mat_IS *matis;
125: MatReuse reuse;
127: PetscBool flg,issbaij;
129: PetscObjectTypeCompare((PetscObject)pc->pmat,MATIS,&flg);
131: matis = (Mat_IS*)pc->pmat->data;
132: if (pc->useAmat) {
133: PetscObjectTypeCompare((PetscObject)pc->mat,MATIS,&flg);
135: }
137: /* first time creation, get info on substructuring */
138: if (!pc->setupcalled) {
139: PetscInt n_I;
140: PetscInt *idx_I_local,*idx_B_local,*idx_I_global,*idx_B_global;
141: PetscBT bt;
142: PetscInt i,j;
144: /* get info on mapping */
145: PetscObjectReference((PetscObject)matis->rmapping);
146: ISLocalToGlobalMappingDestroy(&pcis->mapping);
147: pcis->mapping = matis->rmapping;
148: ISLocalToGlobalMappingGetSize(pcis->mapping,&pcis->n);
149: ISLocalToGlobalMappingGetInfo(pcis->mapping,&(pcis->n_neigh),&(pcis->neigh),&(pcis->n_shared),&(pcis->shared));
151: /* Identifying interior and interface nodes, in local numbering */
152: PetscBTCreate(pcis->n,&bt);
153: for (i=0;i<pcis->n_neigh;i++)
154: for (j=0;j<pcis->n_shared[i];j++) {
155: PetscBTSet(bt,pcis->shared[i][j]);
156: }
158: /* Creating local and global index sets for interior and inteface nodes. */
159: PetscMalloc1(pcis->n,&idx_I_local);
160: PetscMalloc1(pcis->n,&idx_B_local);
161: for (i=0, pcis->n_B=0, n_I=0; i<pcis->n; i++) {
162: if (!PetscBTLookup(bt,i)) {
163: idx_I_local[n_I] = i;
164: n_I++;
165: } else {
166: idx_B_local[pcis->n_B] = i;
167: pcis->n_B++;
168: }
169: }
171: /* Getting the global numbering */
172: idx_B_global = idx_I_local + n_I; /* Just avoiding allocating extra memory, since we have vacant space */
173: idx_I_global = idx_B_local + pcis->n_B;
174: ISLocalToGlobalMappingApply(pcis->mapping,pcis->n_B,idx_B_local,idx_B_global);
175: ISLocalToGlobalMappingApply(pcis->mapping,n_I,idx_I_local,idx_I_global);
177: /* Creating the index sets */
178: ISCreateGeneral(PETSC_COMM_SELF,pcis->n_B,idx_B_local,PETSC_COPY_VALUES, &pcis->is_B_local);
179: ISCreateGeneral(PetscObjectComm((PetscObject)pc),pcis->n_B,idx_B_global,PETSC_COPY_VALUES,&pcis->is_B_global);
180: ISCreateGeneral(PETSC_COMM_SELF,n_I,idx_I_local,PETSC_COPY_VALUES, &pcis->is_I_local);
181: ISCreateGeneral(PetscObjectComm((PetscObject)pc),n_I,idx_I_global,PETSC_COPY_VALUES,&pcis->is_I_global);
183: /* Freeing memory */
184: PetscFree(idx_B_local);
185: PetscFree(idx_I_local);
186: PetscBTDestroy(&bt);
188: /* Creating work vectors and arrays */
189: VecDuplicate(matis->x,&pcis->vec1_N);
190: VecDuplicate(pcis->vec1_N,&pcis->vec2_N);
191: VecCreate(PETSC_COMM_SELF,&pcis->vec1_D);
192: VecSetSizes(pcis->vec1_D,pcis->n-pcis->n_B,PETSC_DECIDE);
193: VecSetType(pcis->vec1_D,((PetscObject)pcis->vec1_N)->type_name);
194: VecDuplicate(pcis->vec1_D,&pcis->vec2_D);
195: VecDuplicate(pcis->vec1_D,&pcis->vec3_D);
196: VecDuplicate(pcis->vec1_D,&pcis->vec4_D);
197: VecCreate(PETSC_COMM_SELF,&pcis->vec1_B);
198: VecSetSizes(pcis->vec1_B,pcis->n_B,PETSC_DECIDE);
199: VecSetType(pcis->vec1_B,((PetscObject)pcis->vec1_N)->type_name);
200: VecDuplicate(pcis->vec1_B,&pcis->vec2_B);
201: VecDuplicate(pcis->vec1_B,&pcis->vec3_B);
202: MatCreateVecs(pc->pmat,&pcis->vec1_global,NULL);
203: PetscMalloc1(pcis->n,&pcis->work_N);
204: /* scaling vector */
205: if (!pcis->D) { /* it can happen that the user passed in a scaling vector via PCISSetSubdomainDiagonalScaling */
206: VecDuplicate(pcis->vec1_B,&pcis->D);
207: VecSet(pcis->D,pcis->scaling_factor);
208: }
210: /* Creating the scatter contexts */
211: VecScatterCreate(pcis->vec1_N,pcis->is_I_local,pcis->vec1_D,(IS)0,&pcis->N_to_D);
212: VecScatterCreate(pcis->vec1_global,pcis->is_I_global,pcis->vec1_D,(IS)0,&pcis->global_to_D);
213: VecScatterCreate(pcis->vec1_N,pcis->is_B_local,pcis->vec1_B,(IS)0,&pcis->N_to_B);
214: VecScatterCreate(pcis->vec1_global,pcis->is_B_global,pcis->vec1_B,(IS)0,&pcis->global_to_B);
216: /* map from boundary to local */
217: ISLocalToGlobalMappingCreateIS(pcis->is_B_local,&pcis->BtoNmap);
218: }
220: {
221: PetscInt sn;
223: VecGetSize(pcis->D,&sn);
224: if (sn == pcis->n) {
225: VecScatterBegin(pcis->N_to_B,pcis->D,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);
226: VecScatterEnd(pcis->N_to_B,pcis->D,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);
227: VecDestroy(&pcis->D);
228: VecDuplicate(pcis->vec1_B,&pcis->D);
229: VecCopy(pcis->vec1_B,pcis->D);
231: }
233: /*
234: Extracting the blocks A_II, A_BI, A_IB and A_BB from A. If the numbering
235: is such that interior nodes come first than the interface ones, we have
237: [ A_II | A_IB ]
238: A = [------+------]
239: [ A_BI | A_BB ]
240: */
241: if (computematrices) {
242: PetscBool amat = (PetscBool)(pc->mat != pc->pmat && pc->useAmat);
243: PetscInt bs,ibs;
245: reuse = MAT_INITIAL_MATRIX;
246: if (pcis->reusesubmatrices && pc->setupcalled) {
247: if (pc->flag == SAME_NONZERO_PATTERN) {
248: reuse = MAT_REUSE_MATRIX;
249: } else {
250: reuse = MAT_INITIAL_MATRIX;
251: }
252: }
253: if (reuse == MAT_INITIAL_MATRIX) {
254: MatDestroy(&pcis->A_II);
255: MatDestroy(&pcis->pA_II);
256: MatDestroy(&pcis->A_IB);
257: MatDestroy(&pcis->A_BI);
258: MatDestroy(&pcis->A_BB);
259: }
261: ISLocalToGlobalMappingGetBlockSize(pcis->mapping,&ibs);
262: MatGetBlockSize(matis->A,&bs);
263: MatCreateSubMatrix(matis->A,pcis->is_I_local,pcis->is_I_local,reuse,&pcis->pA_II);
264: if (amat) {
265: Mat_IS *amatis = (Mat_IS*)pc->mat->data;
266: MatCreateSubMatrix(amatis->A,pcis->is_I_local,pcis->is_I_local,reuse,&pcis->A_II);
267: } else {
268: PetscObjectReference((PetscObject)pcis->pA_II);
269: MatDestroy(&pcis->A_II);
270: pcis->A_II = pcis->pA_II;
271: }
272: MatSetBlockSize(pcis->A_II,bs == ibs ? bs : 1);
273: MatSetBlockSize(pcis->pA_II,bs == ibs ? bs : 1);
274: MatCreateSubMatrix(matis->A,pcis->is_B_local,pcis->is_B_local,reuse,&pcis->A_BB);
275: PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);
276: if (!issbaij) {
277: MatCreateSubMatrix(matis->A,pcis->is_I_local,pcis->is_B_local,reuse,&pcis->A_IB);
278: MatCreateSubMatrix(matis->A,pcis->is_B_local,pcis->is_I_local,reuse,&pcis->A_BI);
279: } else {
280: Mat newmat;
282: MatConvert(matis->A,MATSEQBAIJ,MAT_INITIAL_MATRIX,&newmat);
283: MatCreateSubMatrix(newmat,pcis->is_I_local,pcis->is_B_local,reuse,&pcis->A_IB);
284: MatCreateSubMatrix(newmat,pcis->is_B_local,pcis->is_I_local,reuse,&pcis->A_BI);
285: MatDestroy(&newmat);
286: }
287: MatSetBlockSize(pcis->A_BB,bs == ibs ? bs : 1);
288: }
290: /* Creating scaling vector D */
291: PetscOptionsGetBool(((PetscObject)pc)->options,((PetscObject)pc)->prefix,"-pc_is_use_stiffness_scaling",&pcis->use_stiffness_scaling,NULL);
292: if (pcis->use_stiffness_scaling) {
293: PetscScalar *a;
294: PetscInt i,n;
296: if (pcis->A_BB) {
297: MatGetDiagonal(pcis->A_BB,pcis->D);
298: } else {
299: MatGetDiagonal(matis->A,pcis->vec1_N);
300: VecScatterBegin(pcis->N_to_B,pcis->vec1_N,pcis->D,INSERT_VALUES,SCATTER_FORWARD);
301: VecScatterEnd(pcis->N_to_B,pcis->vec1_N,pcis->D,INSERT_VALUES,SCATTER_FORWARD);
302: }
303: VecAbs(pcis->D);
304: VecGetLocalSize(pcis->D,&n);
305: VecGetArray(pcis->D,&a);
306: for (i=0;i<n;i++) if (PetscAbsScalar(a[i])<PETSC_SMALL) a[i] = 1.0;
307: VecRestoreArray(pcis->D,&a);
308: }
309: VecSet(pcis->vec1_global,0.0);
310: VecScatterBegin(pcis->global_to_B,pcis->D,pcis->vec1_global,ADD_VALUES,SCATTER_REVERSE);
311: VecScatterEnd(pcis->global_to_B,pcis->D,pcis->vec1_global,ADD_VALUES,SCATTER_REVERSE);
312: VecScatterBegin(pcis->global_to_B,pcis->vec1_global,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);
313: VecScatterEnd(pcis->global_to_B,pcis->vec1_global,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);
314: VecPointwiseDivide(pcis->D,pcis->D,pcis->vec1_B);
315: /* See historical note 01, at the bottom of this file. */
317: /* Creating the KSP contexts for the local Dirichlet and Neumann problems */
318: if (computesolvers) {
319: PC pc_ctx;
321: pcis->pure_neumann = matis->pure_neumann;
322: /* Dirichlet */
323: KSPCreate(PETSC_COMM_SELF,&pcis->ksp_D);
324: KSPSetErrorIfNotConverged(pcis->ksp_D,pc->erroriffailure);
325: PetscObjectIncrementTabLevel((PetscObject)pcis->ksp_D,(PetscObject)pc,1);
326: KSPSetOperators(pcis->ksp_D,pcis->A_II,pcis->A_II);
327: KSPSetOptionsPrefix(pcis->ksp_D,"is_localD_");
328: KSPGetPC(pcis->ksp_D,&pc_ctx);
329: PCSetType(pc_ctx,PCLU);
330: KSPSetType(pcis->ksp_D,KSPPREONLY);
331: KSPSetFromOptions(pcis->ksp_D);
332: /* the vectors in the following line are dummy arguments, just telling the KSP the vector size. Values are not used */
333: KSPSetUp(pcis->ksp_D);
334: /* Neumann */
335: KSPCreate(PETSC_COMM_SELF,&pcis->ksp_N);
336: KSPSetErrorIfNotConverged(pcis->ksp_N,pc->erroriffailure);
337: PetscObjectIncrementTabLevel((PetscObject)pcis->ksp_N,(PetscObject)pc,1);
338: KSPSetOperators(pcis->ksp_N,matis->A,matis->A);
339: KSPSetOptionsPrefix(pcis->ksp_N,"is_localN_");
340: KSPGetPC(pcis->ksp_N,&pc_ctx);
341: PCSetType(pc_ctx,PCLU);
342: KSPSetType(pcis->ksp_N,KSPPREONLY);
343: KSPSetFromOptions(pcis->ksp_N);
344: {
345: PetscBool damp_fixed = PETSC_FALSE,
346: remove_nullspace_fixed = PETSC_FALSE,
347: set_damping_factor_floating = PETSC_FALSE,
348: not_damp_floating = PETSC_FALSE,
349: not_remove_nullspace_floating = PETSC_FALSE;
350: PetscReal fixed_factor,
351: floating_factor;
353: PetscOptionsGetReal(((PetscObject)pc_ctx)->options,((PetscObject)pc_ctx)->prefix,"-pc_is_damp_fixed",&fixed_factor,&damp_fixed);
354: if (!damp_fixed) fixed_factor = 0.0;
355: PetscOptionsGetBool(((PetscObject)pc_ctx)->options,((PetscObject)pc_ctx)->prefix,"-pc_is_damp_fixed",&damp_fixed,NULL);
357: PetscOptionsGetBool(((PetscObject)pc_ctx)->options,((PetscObject)pc_ctx)->prefix,"-pc_is_remove_nullspace_fixed",&remove_nullspace_fixed,NULL);
359: PetscOptionsGetReal(((PetscObject)pc_ctx)->options,((PetscObject)pc_ctx)->prefix,"-pc_is_set_damping_factor_floating",
360: &floating_factor,&set_damping_factor_floating);
361: if (!set_damping_factor_floating) floating_factor = 0.0;
362: PetscOptionsGetBool(((PetscObject)pc_ctx)->options,((PetscObject)pc_ctx)->prefix,"-pc_is_set_damping_factor_floating",&set_damping_factor_floating,NULL);
363: if (!set_damping_factor_floating) floating_factor = 1.e-12;
365: PetscOptionsGetBool(((PetscObject)pc_ctx)->options,((PetscObject)pc_ctx)->prefix,"-pc_is_not_damp_floating",¬_damp_floating,NULL);
367: PetscOptionsGetBool(((PetscObject)pc_ctx)->options,((PetscObject)pc_ctx)->prefix,"-pc_is_not_remove_nullspace_floating",¬_remove_nullspace_floating,NULL);
369: if (pcis->pure_neumann) { /* floating subdomain */
370: if (!(not_damp_floating)) {
371: PCFactorSetShiftType(pc_ctx,MAT_SHIFT_NONZERO);
372: PCFactorSetShiftAmount(pc_ctx,floating_factor);
373: }
374: if (!(not_remove_nullspace_floating)) {
375: MatNullSpace nullsp;
376: MatNullSpaceCreate(PETSC_COMM_SELF,PETSC_TRUE,0,NULL,&nullsp);
377: MatSetNullSpace(matis->A,nullsp);
378: MatNullSpaceDestroy(&nullsp);
379: }
380: } else { /* fixed subdomain */
381: if (damp_fixed) {
382: PCFactorSetShiftType(pc_ctx,MAT_SHIFT_NONZERO);
383: PCFactorSetShiftAmount(pc_ctx,floating_factor);
384: }
385: if (remove_nullspace_fixed) {
386: MatNullSpace nullsp;
387: MatNullSpaceCreate(PETSC_COMM_SELF,PETSC_TRUE,0,NULL,&nullsp);
388: MatSetNullSpace(matis->A,nullsp);
389: MatNullSpaceDestroy(&nullsp);
390: }
391: }
392: }
393: /* the vectors in the following line are dummy arguments, just telling the KSP the vector size. Values are not used */
394: KSPSetUp(pcis->ksp_N);
395: }
396: return 0;
397: }
399: /* -------------------------------------------------------------------------- */
400: /*
401: PCISDestroy -
402: */
403: PetscErrorCode PCISDestroy(PC pc)
404: {
405: PC_IS *pcis = (PC_IS*)(pc->data);
407: ISDestroy(&pcis->is_B_local);
408: ISDestroy(&pcis->is_I_local);
409: ISDestroy(&pcis->is_B_global);
410: ISDestroy(&pcis->is_I_global);
411: MatDestroy(&pcis->A_II);
412: MatDestroy(&pcis->pA_II);
413: MatDestroy(&pcis->A_IB);
414: MatDestroy(&pcis->A_BI);
415: MatDestroy(&pcis->A_BB);
416: VecDestroy(&pcis->D);
417: KSPDestroy(&pcis->ksp_N);
418: KSPDestroy(&pcis->ksp_D);
419: VecDestroy(&pcis->vec1_N);
420: VecDestroy(&pcis->vec2_N);
421: VecDestroy(&pcis->vec1_D);
422: VecDestroy(&pcis->vec2_D);
423: VecDestroy(&pcis->vec3_D);
424: VecDestroy(&pcis->vec4_D);
425: VecDestroy(&pcis->vec1_B);
426: VecDestroy(&pcis->vec2_B);
427: VecDestroy(&pcis->vec3_B);
428: VecDestroy(&pcis->vec1_global);
429: VecScatterDestroy(&pcis->global_to_D);
430: VecScatterDestroy(&pcis->N_to_B);
431: VecScatterDestroy(&pcis->N_to_D);
432: VecScatterDestroy(&pcis->global_to_B);
433: PetscFree(pcis->work_N);
434: if (pcis->n_neigh > -1) {
435: ISLocalToGlobalMappingRestoreInfo(pcis->mapping,&(pcis->n_neigh),&(pcis->neigh),&(pcis->n_shared),&(pcis->shared));
436: }
437: ISLocalToGlobalMappingDestroy(&pcis->mapping);
438: ISLocalToGlobalMappingDestroy(&pcis->BtoNmap);
439: PetscObjectComposeFunction((PetscObject)pc,"PCISSetUseStiffnessScaling_C",NULL);
440: PetscObjectComposeFunction((PetscObject)pc,"PCISSetSubdomainScalingFactor_C",NULL);
441: PetscObjectComposeFunction((PetscObject)pc,"PCISSetSubdomainDiagonalScaling_C",NULL);
442: return 0;
443: }
445: /* -------------------------------------------------------------------------- */
446: /*
447: PCISCreate -
448: */
449: PetscErrorCode PCISCreate(PC pc)
450: {
451: PC_IS *pcis = (PC_IS*)(pc->data);
453: pcis->n_neigh = -1;
454: pcis->scaling_factor = 1.0;
455: pcis->reusesubmatrices = PETSC_TRUE;
456: /* composing functions */
457: PetscObjectComposeFunction((PetscObject)pc,"PCISSetUseStiffnessScaling_C",PCISSetUseStiffnessScaling_IS);
458: PetscObjectComposeFunction((PetscObject)pc,"PCISSetSubdomainScalingFactor_C",PCISSetSubdomainScalingFactor_IS);
459: PetscObjectComposeFunction((PetscObject)pc,"PCISSetSubdomainDiagonalScaling_C",PCISSetSubdomainDiagonalScaling_IS);
460: return 0;
461: }
463: /* -------------------------------------------------------------------------- */
464: /*
465: PCISApplySchur -
467: Input parameters:
468: . pc - preconditioner context
469: . v - vector to which the Schur complement is to be applied (it is NOT modified inside this function, UNLESS vec2_B is null)
471: Output parameters:
472: . vec1_B - result of Schur complement applied to chunk
473: . vec2_B - garbage (used as work space), or null (and v is used as workspace)
474: . vec1_D - garbage (used as work space)
475: . vec2_D - garbage (used as work space)
477: */
478: PetscErrorCode PCISApplySchur(PC pc, Vec v, Vec vec1_B, Vec vec2_B, Vec vec1_D, Vec vec2_D)
479: {
480: PC_IS *pcis = (PC_IS*)(pc->data);
482: if (!vec2_B) vec2_B = v;
484: MatMult(pcis->A_BB,v,vec1_B);
485: MatMult(pcis->A_IB,v,vec1_D);
486: KSPSolve(pcis->ksp_D,vec1_D,vec2_D);
487: KSPCheckSolve(pcis->ksp_D,pc,vec2_D);
488: MatMult(pcis->A_BI,vec2_D,vec2_B);
489: VecAXPY(vec1_B,-1.0,vec2_B);
490: return 0;
491: }
493: /* -------------------------------------------------------------------------- */
494: /*
495: PCISScatterArrayNToVecB - Scatters interface node values from a big array (of all local nodes, interior or interface,
496: including ghosts) into an interface vector, when in SCATTER_FORWARD mode, or vice-versa, when in SCATTER_REVERSE
497: mode.
499: Input parameters:
500: . pc - preconditioner context
501: . array_N - [when in SCATTER_FORWARD mode] Array to be scattered into the vector
502: . v_B - [when in SCATTER_REVERSE mode] Vector to be scattered into the array
504: Output parameter:
505: . array_N - [when in SCATTER_REVERSE mode] Array to receive the scattered vector
506: . v_B - [when in SCATTER_FORWARD mode] Vector to receive the scattered array
508: Notes:
509: The entries in the array that do not correspond to interface nodes remain unaltered.
510: */
511: PetscErrorCode PCISScatterArrayNToVecB(PetscScalar *array_N, Vec v_B, InsertMode imode, ScatterMode smode, PC pc)
512: {
513: PetscInt i;
514: const PetscInt *idex;
515: PetscScalar *array_B;
516: PC_IS *pcis = (PC_IS*)(pc->data);
518: VecGetArray(v_B,&array_B);
519: ISGetIndices(pcis->is_B_local,&idex);
521: if (smode == SCATTER_FORWARD) {
522: if (imode == INSERT_VALUES) {
523: for (i=0; i<pcis->n_B; i++) array_B[i] = array_N[idex[i]];
524: } else { /* ADD_VALUES */
525: for (i=0; i<pcis->n_B; i++) array_B[i] += array_N[idex[i]];
526: }
527: } else { /* SCATTER_REVERSE */
528: if (imode == INSERT_VALUES) {
529: for (i=0; i<pcis->n_B; i++) array_N[idex[i]] = array_B[i];
530: } else { /* ADD_VALUES */
531: for (i=0; i<pcis->n_B; i++) array_N[idex[i]] += array_B[i];
532: }
533: }
534: ISRestoreIndices(pcis->is_B_local,&idex);
535: VecRestoreArray(v_B,&array_B);
536: return 0;
537: }
539: /* -------------------------------------------------------------------------- */
540: /*
541: PCISApplyInvSchur - Solves the Neumann problem related to applying the inverse of the Schur complement.
542: More precisely, solves the problem:
543: [ A_II A_IB ] [ . ] [ 0 ]
544: [ ] [ ] = [ ]
545: [ A_BI A_BB ] [ x ] [ b ]
547: Input parameters:
548: . pc - preconditioner context
549: . b - vector of local interface nodes (including ghosts)
551: Output parameters:
552: . x - vector of local interface nodes (including ghosts); returns the application of the inverse of the Schur
553: complement to b
554: . vec1_N - vector of local nodes (interior and interface, including ghosts); returns garbage (used as work space)
555: . vec2_N - vector of local nodes (interior and interface, including ghosts); returns garbage (used as work space)
557: */
558: PetscErrorCode PCISApplyInvSchur(PC pc, Vec b, Vec x, Vec vec1_N, Vec vec2_N)
559: {
560: PC_IS *pcis = (PC_IS*)(pc->data);
562: /*
563: Neumann solvers.
564: Applying the inverse of the local Schur complement, i.e, solving a Neumann
565: Problem with zero at the interior nodes of the RHS and extracting the interface
566: part of the solution. inverse Schur complement is applied to b and the result
567: is stored in x.
568: */
569: /* Setting the RHS vec1_N */
570: VecSet(vec1_N,0.0);
571: VecScatterBegin(pcis->N_to_B,b,vec1_N,INSERT_VALUES,SCATTER_REVERSE);
572: VecScatterEnd (pcis->N_to_B,b,vec1_N,INSERT_VALUES,SCATTER_REVERSE);
573: /* Checking for consistency of the RHS */
574: {
575: PetscBool flg = PETSC_FALSE;
576: PetscOptionsGetBool(NULL,NULL,"-pc_is_check_consistency",&flg,NULL);
577: if (flg) {
578: PetscScalar average;
579: PetscViewer viewer;
580: PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)pc),&viewer);
582: VecSum(vec1_N,&average);
583: average = average / ((PetscReal)pcis->n);
584: PetscViewerASCIIPushSynchronized(viewer);
585: if (pcis->pure_neumann) {
586: PetscViewerASCIISynchronizedPrintf(viewer,"Subdomain %04d is floating. Average = % 1.14e\n",PetscGlobalRank,PetscAbsScalar(average));
587: } else {
588: PetscViewerASCIISynchronizedPrintf(viewer,"Subdomain %04d is fixed. Average = % 1.14e\n",PetscGlobalRank,PetscAbsScalar(average));
589: }
590: PetscViewerFlush(viewer);
591: PetscViewerASCIIPopSynchronized(viewer);
592: }
593: }
594: /* Solving the system for vec2_N */
595: KSPSolve(pcis->ksp_N,vec1_N,vec2_N);
596: KSPCheckSolve(pcis->ksp_N,pc,vec2_N);
597: /* Extracting the local interface vector out of the solution */
598: VecScatterBegin(pcis->N_to_B,vec2_N,x,INSERT_VALUES,SCATTER_FORWARD);
599: VecScatterEnd (pcis->N_to_B,vec2_N,x,INSERT_VALUES,SCATTER_FORWARD);
600: return 0;
601: }