Actual source code: pod.c
1: #include <petsc/private/kspimpl.h>
2: #include <petsc/private/matimpl.h>
3: #include <petscblaslapack.h>
4: static PetscBool cited = PETSC_FALSE;
5: static const char citation[] =
6: "@phdthesis{zampini2010non,\n"
7: " title={Non-overlapping Domain Decomposition Methods for Cardiac Reaction-Diffusion Models and Applications},\n"
8: " author={Zampini, S},\n"
9: " year={2010},\n"
10: " school={PhD thesis, Universita degli Studi di Milano}\n"
11: "}\n";
13: typedef struct {
14: PetscInt maxn; /* maximum number of snapshots */
15: PetscInt n; /* number of active snapshots */
16: PetscInt curr; /* current tip of snapshots set */
17: Vec *xsnap; /* snapshots */
18: Vec *bsnap; /* rhs snapshots */
19: Vec *work; /* parallel work vectors */
20: PetscScalar *dots_iallreduce;
21: MPI_Request req_iallreduce;
22: PetscInt ndots_iallreduce; /* if we have iallreduce we can hide the VecMDot communications */
23: PetscReal tol; /* relative tolerance to retain eigenvalues */
24: PetscBool Aspd; /* if true, uses the SPD operator as inner product */
25: PetscScalar *corr; /* correlation matrix */
26: PetscReal *eigs; /* eigenvalues */
27: PetscScalar *eigv; /* eigenvectors */
28: PetscBLASInt nen; /* dimension of lower dimensional system */
29: PetscInt st; /* first eigenvector of correlation matrix to be retained */
30: PetscBLASInt *iwork; /* integer work vector */
31: PetscScalar *yhay; /* Y^H * A * Y */
32: PetscScalar *low; /* lower dimensional linear system */
33: #if defined(PETSC_USE_COMPLEX)
34: PetscReal *rwork;
35: #endif
36: PetscBLASInt lwork;
37: PetscScalar *swork;
38: PetscBool monitor;
39: } KSPGuessPOD;
41: static PetscErrorCode KSPGuessReset_POD(KSPGuess guess)
42: {
43: KSPGuessPOD *pod = (KSPGuessPOD*)guess->data;
44: PetscLayout Alay = NULL,vlay = NULL;
45: PetscBool cong;
47: pod->nen = 0;
48: pod->n = 0;
49: pod->curr = 0;
50: /* need to wait for completion of outstanding requests */
51: if (pod->ndots_iallreduce) {
52: MPI_Wait(&pod->req_iallreduce,MPI_STATUS_IGNORE);
53: }
54: pod->ndots_iallreduce = 0;
55: /* destroy vectors if the size of the linear system has changed */
56: if (guess->A) {
57: MatGetLayouts(guess->A,&Alay,NULL);
58: }
59: if (pod->xsnap) {
60: VecGetLayout(pod->xsnap[0],&vlay);
61: }
62: cong = PETSC_FALSE;
63: if (vlay && Alay) {
64: PetscLayoutCompare(Alay,vlay,&cong);
65: }
66: if (!cong) {
67: VecDestroyVecs(pod->maxn,&pod->xsnap);
68: VecDestroyVecs(pod->maxn,&pod->bsnap);
69: VecDestroyVecs(1,&pod->work);
70: }
71: return 0;
72: }
74: static PetscErrorCode KSPGuessSetUp_POD(KSPGuess guess)
75: {
76: KSPGuessPOD *pod = (KSPGuessPOD*)guess->data;
79: if (!pod->corr) {
80: PetscScalar sdummy;
81: PetscReal rdummy = 0;
82: PetscBLASInt bN,lierr,idummy;
84: PetscCalloc6(pod->maxn*pod->maxn,&pod->corr,pod->maxn,&pod->eigs,pod->maxn*pod->maxn,&pod->eigv,
85: 6*pod->maxn,&pod->iwork,pod->maxn*pod->maxn,&pod->yhay,pod->maxn*pod->maxn,&pod->low);
86: #if defined(PETSC_USE_COMPLEX)
87: PetscMalloc1(7*pod->maxn,&pod->rwork);
88: #endif
89: #if defined(PETSC_HAVE_MPI_NONBLOCKING_COLLECTIVES)
90: PetscMalloc1(3*pod->maxn,&pod->dots_iallreduce);
91: #endif
92: pod->lwork = -1;
93: PetscBLASIntCast(pod->maxn,&bN);
94: #if !defined(PETSC_USE_COMPLEX)
95: PetscStackCallBLAS("LAPACKsyevx",LAPACKsyevx_("V","A","L",&bN,pod->corr,&bN,&rdummy,&rdummy,&idummy,&idummy,
96: &rdummy,&idummy,pod->eigs,pod->eigv,&bN,&sdummy,&pod->lwork,pod->iwork,pod->iwork+5*bN,&lierr));
97: #else
98: PetscStackCallBLAS("LAPACKsyevx",LAPACKsyevx_("V","A","L",&bN,pod->corr,&bN,&rdummy,&rdummy,&idummy,&idummy,
99: &rdummy,&idummy,pod->eigs,pod->eigv,&bN,&sdummy,&pod->lwork,pod->rwork,pod->iwork,pod->iwork+5*bN,&lierr));
100: #endif
102: PetscBLASIntCast((PetscInt)PetscRealPart(sdummy),&pod->lwork);
103: PetscMalloc1(pod->lwork+PetscMax(bN*bN,6*bN),&pod->swork);
104: }
105: /* work vectors are sequential, we explicitly use MPI_Allreduce */
106: if (!pod->xsnap) {
107: VecType type;
108: Vec *v,vseq;
109: PetscInt n;
111: KSPCreateVecs(guess->ksp,1,&v,0,NULL);
112: VecCreate(PETSC_COMM_SELF,&vseq);
113: VecGetLocalSize(v[0],&n);
114: VecSetSizes(vseq,n,n);
115: VecGetType(v[0],&type);
116: VecSetType(vseq,type);
117: VecDestroyVecs(1,&v);
118: VecDuplicateVecs(vseq,pod->maxn,&pod->xsnap);
119: VecDestroy(&vseq);
120: PetscLogObjectParents(guess,pod->maxn,pod->xsnap);
121: }
122: if (!pod->bsnap) {
123: VecDuplicateVecs(pod->xsnap[0],pod->maxn,&pod->bsnap);
124: PetscLogObjectParents(guess,pod->maxn,pod->bsnap);
125: }
126: if (!pod->work) {
127: KSPCreateVecs(guess->ksp,1,&pod->work,0,NULL);
128: }
129: return 0;
130: }
132: static PetscErrorCode KSPGuessDestroy_POD(KSPGuess guess)
133: {
134: KSPGuessPOD *pod = (KSPGuessPOD*)guess->data;
135: PetscErrorCode ierr;
137: PetscFree6(pod->corr,pod->eigs,pod->eigv,pod->iwork,
138: pod->yhay,pod->low);
139: #if defined(PETSC_USE_COMPLEX)
140: PetscFree(pod->rwork);
141: #endif
142: /* need to wait for completion before destroying dots_iallreduce */
143: if (pod->ndots_iallreduce) {
144: MPI_Wait(&pod->req_iallreduce,MPI_STATUS_IGNORE);
145: }
146: PetscFree(pod->dots_iallreduce);
147: PetscFree(pod->swork);
148: VecDestroyVecs(pod->maxn,&pod->bsnap);
149: VecDestroyVecs(pod->maxn,&pod->xsnap);
150: VecDestroyVecs(1,&pod->work);
151: PetscFree(pod);
152: return 0;
153: }
155: static PetscErrorCode KSPGuessUpdate_POD(KSPGuess,Vec,Vec);
157: static PetscErrorCode KSPGuessFormGuess_POD(KSPGuess guess,Vec b,Vec x)
158: {
159: KSPGuessPOD *pod = (KSPGuessPOD*)guess->data;
160: PetscScalar one = 1, zero = 0, *array;
161: PetscBLASInt bN,ione = 1,bNen,lierr;
162: PetscInt i;
164: PetscCitationsRegister(citation,&cited);
165: if (pod->ndots_iallreduce) { /* complete communication and project the linear system */
166: KSPGuessUpdate_POD(guess,NULL,NULL);
167: }
168: if (!pod->nen) return 0;
169: /* b_low = S * V^T * X^T * b */
170: VecGetArrayRead(b,(const PetscScalar**)&array);
171: VecPlaceArray(pod->bsnap[pod->curr],array);
172: VecRestoreArrayRead(b,(const PetscScalar**)&array);
173: VecMDot(pod->bsnap[pod->curr],pod->n,pod->xsnap,pod->swork);
174: VecResetArray(pod->bsnap[pod->curr]);
175: MPIU_Allreduce(pod->swork,pod->swork + pod->n,pod->n,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)guess));
176: PetscBLASIntCast(pod->n,&bN);
177: PetscBLASIntCast(pod->nen,&bNen);
178: PetscStackCallBLAS("BLASgemv",BLASgemv_("T",&bN,&bNen,&one,pod->eigv+pod->st*pod->n,&bN,pod->swork+pod->n,&ione,&zero,pod->swork,&ione));
179: if (pod->monitor) {
180: PetscPrintf(PetscObjectComm((PetscObject)guess)," KSPGuessPOD alphas = ");
181: for (i=0; i<pod->nen; i++) {
182: #if defined(PETSC_USE_COMPLEX)
183: PetscPrintf(PetscObjectComm((PetscObject)guess),"%g + %g i",(double)PetscRealPart(pod->swork[i]),(double)PetscImaginaryPart(pod->swork[i]));
184: #else
185: PetscPrintf(PetscObjectComm((PetscObject)guess),"%g ",(double)pod->swork[i]);
186: #endif
187: }
188: PetscPrintf(PetscObjectComm((PetscObject)guess),"\n");
189: }
190: /* A_low x_low = b_low */
191: if (!pod->Aspd) { /* A is spd -> LOW = Identity */
192: KSP pksp = guess->ksp;
193: PetscBool tsolve,symm;
195: if (pod->monitor) {
196: PetscMPIInt rank;
197: Mat L;
199: MPI_Comm_rank(PetscObjectComm((PetscObject)guess),&rank);
200: MatCreateSeqDense(PETSC_COMM_SELF,pod->nen,pod->nen,pod->low,&L);
201: if (rank == 0) {
202: MatView(L,NULL);
203: }
204: MatDestroy(&L);
205: }
206: MatGetOption(guess->A,MAT_SYMMETRIC,&symm);
207: tsolve = symm ? PETSC_FALSE : pksp->transpose_solve;
208: PetscStackCallBLAS("LAPACKgetrf",LAPACKgetrf_(&bNen,&bNen,pod->low,&bNen,pod->iwork,&lierr));
210: PetscStackCallBLAS("LAPACKgetrs",LAPACKgetrs_(tsolve ? "T" : "N",&bNen,&ione,pod->low,&bNen,pod->iwork,pod->swork,&bNen,&lierr));
212: }
213: /* x = X * V * S * x_low */
214: PetscStackCallBLAS("BLASgemv",BLASgemv_("N",&bN,&bNen,&one,pod->eigv+pod->st*pod->n,&bN,pod->swork,&ione,&zero,pod->swork+pod->n,&ione));
215: if (pod->monitor) {
216: PetscPrintf(PetscObjectComm((PetscObject)guess)," KSPGuessPOD sol = ");
217: for (i=0; i<pod->nen; i++) {
218: #if defined(PETSC_USE_COMPLEX)
219: PetscPrintf(PetscObjectComm((PetscObject)guess),"%g + %g i",(double)PetscRealPart(pod->swork[i+pod->n]),(double)PetscImaginaryPart(pod->swork[i+pod->n]));
220: #else
221: PetscPrintf(PetscObjectComm((PetscObject)guess),"%g ",(double)pod->swork[i+pod->n]);
222: #endif
223: }
224: PetscPrintf(PetscObjectComm((PetscObject)guess),"\n");
225: }
226: VecGetArray(x,&array);
227: VecPlaceArray(pod->bsnap[pod->curr],array);
228: VecSet(pod->bsnap[pod->curr],0);
229: VecMAXPY(pod->bsnap[pod->curr],pod->n,pod->swork+pod->n,pod->xsnap);
230: VecResetArray(pod->bsnap[pod->curr]);
231: VecRestoreArray(x,&array);
232: return 0;
233: }
235: static PetscErrorCode KSPGuessUpdate_POD(KSPGuess guess, Vec b, Vec x)
236: {
237: KSPGuessPOD *pod = (KSPGuessPOD*)guess->data;
238: PetscScalar one = 1, zero = 0,*array;
239: PetscReal toten, parten, reps = 0; /* dlamch? */
240: PetscBLASInt bN,lierr,idummy;
241: PetscInt i;
243: if (pod->ndots_iallreduce) goto complete_request;
244: pod->n = pod->n < pod->maxn ? pod->n+1 : pod->maxn;
245: VecCopy(x,pod->xsnap[pod->curr]);
246: VecGetArray(pod->bsnap[pod->curr],&array);
247: VecPlaceArray(pod->work[0],array);
248: KSP_MatMult(guess->ksp,guess->A,x,pod->work[0]);
249: VecResetArray(pod->work[0]);
250: VecRestoreArray(pod->bsnap[pod->curr],&array);
251: if (pod->Aspd) {
252: VecMDot(pod->xsnap[pod->curr],pod->n,pod->bsnap,pod->swork);
253: #if !defined(PETSC_HAVE_MPI_NONBLOCKING_COLLECTIVES)
254: MPIU_Allreduce(pod->swork,pod->swork + 3*pod->n,pod->n,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)guess));
255: #else
256: MPI_Iallreduce(pod->swork,pod->dots_iallreduce,pod->n,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)guess),&pod->req_iallreduce);
257: pod->ndots_iallreduce = 1;
258: #endif
259: } else {
260: PetscInt off;
261: PetscBool herm;
263: #if defined(PETSC_USE_COMPLEX)
264: MatGetOption(guess->A,MAT_HERMITIAN,&herm);
265: #else
266: MatGetOption(guess->A,MAT_SYMMETRIC,&herm);
267: #endif
268: off = (guess->ksp->transpose_solve && !herm) ? 2*pod->n : pod->n;
270: /* TODO: we may want to use a user-defined dot for the correlation matrix */
271: VecMDot(pod->xsnap[pod->curr],pod->n,pod->xsnap,pod->swork);
272: VecMDot(pod->bsnap[pod->curr],pod->n,pod->xsnap,pod->swork + off);
273: if (!herm) {
274: off = (off == pod->n) ? 2*pod->n : pod->n;
275: VecMDot(pod->xsnap[pod->curr],pod->n,pod->bsnap,pod->swork + off);
276: #if !defined(PETSC_HAVE_MPI_NONBLOCKING_COLLECTIVES)
277: MPIU_Allreduce(pod->swork,pod->swork + 3*pod->n,3*pod->n,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)guess));
278: #else
279: MPI_Iallreduce(pod->swork,pod->dots_iallreduce,3*pod->n,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)guess),&pod->req_iallreduce);
280: pod->ndots_iallreduce = 3;
281: #endif
282: } else {
283: #if !defined(PETSC_HAVE_MPI_NONBLOCKING_COLLECTIVES)
284: MPIU_Allreduce(pod->swork,pod->swork + 3*pod->n,2*pod->n,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)guess));
285: for (i=0;i<pod->n;i++) pod->swork[5*pod->n + i] = pod->swork[4*pod->n + i];
286: #else
287: MPI_Iallreduce(pod->swork,pod->dots_iallreduce,2*pod->n,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)guess),&pod->req_iallreduce);
288: pod->ndots_iallreduce = 2;
289: #endif
290: }
291: }
292: if (pod->ndots_iallreduce) return 0;
294: complete_request:
295: if (pod->ndots_iallreduce) {
296: MPI_Wait(&pod->req_iallreduce,MPI_STATUS_IGNORE);
297: switch (pod->ndots_iallreduce) {
298: case 3:
299: for (i=0;i<pod->n;i++) pod->swork[3*pod->n + i] = pod->dots_iallreduce[ i];
300: for (i=0;i<pod->n;i++) pod->swork[4*pod->n + i] = pod->dots_iallreduce[ pod->n+i];
301: for (i=0;i<pod->n;i++) pod->swork[5*pod->n + i] = pod->dots_iallreduce[2*pod->n+i];
302: break;
303: case 2:
304: for (i=0;i<pod->n;i++) pod->swork[3*pod->n + i] = pod->dots_iallreduce[ i];
305: for (i=0;i<pod->n;i++) pod->swork[4*pod->n + i] = pod->dots_iallreduce[pod->n+i];
306: for (i=0;i<pod->n;i++) pod->swork[5*pod->n + i] = pod->dots_iallreduce[pod->n+i];
307: break;
308: case 1:
309: for (i=0;i<pod->n;i++) pod->swork[3*pod->n + i] = pod->dots_iallreduce[i];
310: break;
311: default:
312: SETERRQ(PetscObjectComm((PetscObject)guess),PETSC_ERR_PLIB,"Invalid number of outstanding dots operations: %D",pod->ndots_iallreduce);
313: }
314: }
315: pod->ndots_iallreduce = 0;
317: /* correlation matrix and Y^H A Y (Galerkin) */
318: for (i=0;i<pod->n;i++) {
319: pod->corr[pod->curr*pod->maxn+i] = pod->swork[3*pod->n + i];
320: pod->corr[i*pod->maxn+pod->curr] = PetscConj(pod->swork[3*pod->n + i]);
321: if (!pod->Aspd) {
322: pod->yhay[pod->curr*pod->maxn+i] = pod->swork[4*pod->n + i];
323: pod->yhay[i*pod->maxn+pod->curr] = PetscConj(pod->swork[5*pod->n + i]);
324: }
325: }
326: /* syevx change the input matrix */
327: for (i=0;i<pod->n;i++) {
328: PetscInt j;
329: for (j=i;j<pod->n;j++) pod->swork[i*pod->n+j] = pod->corr[i*pod->maxn+j];
330: }
331: PetscBLASIntCast(pod->n,&bN);
332: #if !defined(PETSC_USE_COMPLEX)
333: PetscStackCallBLAS("LAPACKsyevx",LAPACKsyevx_("V","A","L",&bN,pod->swork,&bN,
334: &reps,&reps,&idummy,&idummy,
335: &reps,&idummy,pod->eigs,pod->eigv,&bN,
336: pod->swork+bN*bN,&pod->lwork,pod->iwork,pod->iwork+5*bN,&lierr));
337: #else
338: PetscStackCallBLAS("LAPACKsyevx",LAPACKsyevx_("V","A","L",&bN,pod->swork,&bN,
339: &reps,&reps,&idummy,&idummy,
340: &reps,&idummy,pod->eigs,pod->eigv,&bN,
341: pod->swork+bN*bN,&pod->lwork,pod->rwork,pod->iwork,pod->iwork+5*bN,&lierr));
342: #endif
346: /* dimension of lower dimensional system */
347: pod->st = -1;
348: for (i=0,toten=0;i<pod->n;i++) {
349: pod->eigs[i] = PetscMax(pod->eigs[i],0.0);
350: toten += pod->eigs[i];
351: if (!pod->eigs[i]) pod->st = i;
352: }
353: pod->nen = 0;
354: for (i=pod->n-1,parten=0;i>pod->st && toten > 0;i--) {
355: pod->nen++;
356: parten += pod->eigs[i];
357: if (parten + toten*pod->tol >= toten) break;
358: }
359: pod->st = pod->n - pod->nen;
361: /* Compute eigv = V * S */
362: for (i=pod->st;i<pod->n;i++) {
363: const PetscReal v = 1.0/PetscSqrtReal(pod->eigs[i]);
364: const PetscInt st = pod->n*i;
365: PetscInt j;
367: for (j=0;j<pod->n;j++) pod->eigv[st+j] *= v;
368: }
370: /* compute S * V^T * X^T * A * X * V * S if needed */
371: if (pod->nen && !pod->Aspd) {
372: PetscBLASInt bNen,bMaxN;
373: PetscInt st = pod->st*pod->n;
374: PetscBLASIntCast(pod->nen,&bNen);
375: PetscBLASIntCast(pod->maxn,&bMaxN);
376: PetscStackCallBLAS("BLASgemm",BLASgemm_("T","N",&bNen,&bN,&bN,&one,pod->eigv+st,&bN,pod->yhay,&bMaxN,&zero,pod->swork,&bNen));
377: PetscStackCallBLAS("BLASgemm",BLASgemm_("N","N",&bNen,&bNen,&bN,&one,pod->swork,&bNen,pod->eigv+st,&bN,&zero,pod->low,&bNen));
378: }
380: if (pod->monitor) {
381: PetscPrintf(PetscObjectComm((PetscObject)guess)," KSPGuessPOD: basis %D, energy fractions = ",pod->nen);
382: for (i=pod->n-1;i>=0;i--) {
383: PetscPrintf(PetscObjectComm((PetscObject)guess),"%1.6e (%d) ",pod->eigs[i]/toten,i >= pod->st ? 1 : 0);
384: }
385: PetscPrintf(PetscObjectComm((PetscObject)guess),"\n");
386: if (PetscDefined(USE_DEBUG)) {
387: for (i=0;i<pod->n;i++) {
388: Vec v;
389: PetscInt j;
390: PetscBLASInt bNen,ione = 1;
392: VecDuplicate(pod->xsnap[i],&v);
393: VecCopy(pod->xsnap[i],v);
394: PetscBLASIntCast(pod->nen,&bNen);
395: PetscStackCallBLAS("BLASgemv",BLASgemv_("T",&bN,&bNen,&one,pod->eigv+pod->st*pod->n,&bN,pod->corr+pod->maxn*i,&ione,&zero,pod->swork,&ione));
396: PetscStackCallBLAS("BLASgemv",BLASgemv_("N",&bN,&bNen,&one,pod->eigv+pod->st*pod->n,&bN,pod->swork,&ione,&zero,pod->swork+pod->n,&ione));
397: for (j=0;j<pod->n;j++) pod->swork[j] = -pod->swork[pod->n+j];
398: VecMAXPY(v,pod->n,pod->swork,pod->xsnap);
399: VecDot(v,v,pod->swork);
400: MPIU_Allreduce(pod->swork,pod->swork + 1,1,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)guess));
401: PetscPrintf(PetscObjectComm((PetscObject)guess)," Error projection %D: %g (expected lower than %g)\n",i,(double)PetscRealPart(pod->swork[1]),(double)(toten-parten));
402: VecDestroy(&v);
403: }
404: }
405: }
406: /* new tip */
407: pod->curr = (pod->curr+1)%pod->maxn;
408: return 0;
409: }
411: static PetscErrorCode KSPGuessSetFromOptions_POD(KSPGuess guess)
412: {
413: KSPGuessPOD *pod = (KSPGuessPOD *)guess->data;
416: PetscOptionsBegin(PetscObjectComm((PetscObject)guess),((PetscObject)guess)->prefix,"POD initial guess options","KSPGuess");
417: PetscOptionsInt("-ksp_guess_pod_size","Number of snapshots",NULL,pod->maxn,&pod->maxn,NULL);
418: PetscOptionsBool("-ksp_guess_pod_monitor","Monitor initial guess generator",NULL,pod->monitor,&pod->monitor,NULL);
419: PetscOptionsReal("-ksp_guess_pod_tol","Tolerance to retain eigenvectors","KSPGuessSetTolerance",pod->tol,&pod->tol,NULL);
420: PetscOptionsBool("-ksp_guess_pod_Ainner","Use the operator as inner product (must be SPD)",NULL,pod->Aspd,&pod->Aspd,NULL);
421: PetscOptionsEnd();
422: return 0;
423: }
425: static PetscErrorCode KSPGuessSetTolerance_POD(KSPGuess guess,PetscReal tol)
426: {
427: KSPGuessPOD *pod = (KSPGuessPOD *)guess->data;
429: pod->tol = tol;
430: return 0;
431: }
433: static PetscErrorCode KSPGuessView_POD(KSPGuess guess,PetscViewer viewer)
434: {
435: KSPGuessPOD *pod = (KSPGuessPOD*)guess->data;
436: PetscBool isascii;
438: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&isascii);
439: if (isascii) {
440: PetscViewerASCIIPrintf(viewer,"Max size %D, tolerance %g, Ainner %d\n",pod->maxn,pod->tol,pod->Aspd);
441: }
442: return 0;
443: }
445: /*
446: KSPGUESSPOD - Implements a proper orthogonal decomposition based Galerkin scheme for repeated linear system solves.
448: The initial guess is obtained by solving a small and dense linear system, obtained by Galerkin projection on a lower dimensional space generated by the previous solutions.
449: The number of solutions to be retained and the energy tolerance to construct the lower dimensional basis can be specified at command line by -ksp_guess_pod_tol <real> and -ksp_guess_pod_size <int>.
451: References:
452: . * - http://www.math.uni-konstanz.de/numerik/personen/volkwein/teaching/POD-Book.pdf
454: Level: intermediate
456: .seealso: KSPGuess, KSPGuessType, KSPGuessCreate(), KSPSetGuess(), KSPGetGuess()
457: @*/
458: PetscErrorCode KSPGuessCreate_POD(KSPGuess guess)
459: {
460: KSPGuessPOD *pod;
462: PetscNewLog(guess,&pod);
463: pod->maxn = 10;
464: pod->tol = PETSC_MACHINE_EPSILON;
465: guess->data = pod;
467: guess->ops->setfromoptions = KSPGuessSetFromOptions_POD;
468: guess->ops->destroy = KSPGuessDestroy_POD;
469: guess->ops->settolerance = KSPGuessSetTolerance_POD;
470: guess->ops->setup = KSPGuessSetUp_POD;
471: guess->ops->view = KSPGuessView_POD;
472: guess->ops->reset = KSPGuessReset_POD;
473: guess->ops->update = KSPGuessUpdate_POD;
474: guess->ops->formguess = KSPGuessFormGuess_POD;
475: return 0;
476: }