Actual source code: test43.c
slepc-3.18.0 2022-10-01
1: /*
2: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
3: SLEPc - Scalable Library for Eigenvalue Problem Computations
4: Copyright (c) 2002-, Universitat Politecnica de Valencia, Spain
6: This file is part of SLEPc.
7: SLEPc is distributed under a 2-clause BSD license (see LICENSE).
8: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
9: */
11: static char help[] = "Solves a linear system using PCHPDDM.\n"
12: "Modification of ${PETSC_DIR}/src/ksp/ksp/tutorials/ex76.c where concurrent EPS are instantiated explicitly by the user.\n\n";
14: #include <slepceps.h>
16: int main(int argc,char **args)
17: {
18: Mat A,aux,a,P,B,X;
19: Vec b;
20: KSP ksp;
21: PC pc;
22: EPS eps;
23: ST st;
24: IS is,sizes;
25: const PetscInt *idx;
26: PetscInt m,rstart,rend,location,nev,nconv;
27: PetscMPIInt rank,size;
28: PetscViewer viewer;
29: char dir[PETSC_MAX_PATH_LEN],name[PETSC_MAX_PATH_LEN];
30: PetscBool flg;
33: SlepcInitialize(&argc,&args,NULL,help);
34: MPI_Comm_size(PETSC_COMM_WORLD,&size);
36: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
37: MatCreate(PETSC_COMM_WORLD,&A);
38: MatCreate(PETSC_COMM_SELF,&aux);
39: ISCreate(PETSC_COMM_SELF,&is);
40: PetscStrcpy(dir,".");
41: PetscOptionsGetString(NULL,NULL,"-load_dir",dir,sizeof(dir),NULL);
42: /* loading matrices */
43: PetscSNPrintf(name,sizeof(name),"%s/sizes_%d_%d.dat",dir,rank,size);
44: PetscViewerBinaryOpen(PETSC_COMM_SELF,name,FILE_MODE_READ,&viewer);
45: ISCreate(PETSC_COMM_SELF,&sizes);
46: ISLoad(sizes,viewer);
47: ISGetIndices(sizes,&idx);
48: MatSetSizes(A,idx[0],idx[1],idx[2],idx[3]);
49: ISRestoreIndices(sizes,&idx);
50: ISDestroy(&sizes);
51: PetscViewerDestroy(&viewer);
52: MatSetUp(A);
53: PetscSNPrintf(name,sizeof(name),"%s/A.dat",dir);
54: PetscViewerBinaryOpen(PETSC_COMM_WORLD,name,FILE_MODE_READ,&viewer);
55: MatLoad(A,viewer);
56: PetscViewerDestroy(&viewer);
57: PetscSNPrintf(name,sizeof(name),"%s/is_%d_%d.dat",dir,rank,size);
58: PetscViewerBinaryOpen(PETSC_COMM_SELF,name,FILE_MODE_READ,&viewer);
59: ISLoad(is,viewer);
60: PetscViewerDestroy(&viewer);
61: ISSetBlockSize(is,2);
62: PetscSNPrintf(name,sizeof(name),"%s/Neumann_%d_%d.dat",dir,rank,size);
63: PetscViewerBinaryOpen(PETSC_COMM_SELF,name,FILE_MODE_READ,&viewer);
64: MatLoad(aux,viewer);
65: PetscViewerDestroy(&viewer);
66: MatSetBlockSizesFromMats(aux,A,A);
67: MatSetOption(A,MAT_SYMMETRIC,PETSC_TRUE);
68: MatSetOption(aux,MAT_SYMMETRIC,PETSC_TRUE);
69: /* ready for testing */
70: KSPCreate(PETSC_COMM_WORLD,&ksp);
71: KSPSetOperators(ksp,A,A);
72: KSPGetPC(ksp,&pc);
73: PCSetType(pc,PCHPDDM);
74: MatDuplicate(aux,MAT_DO_NOT_COPY_VALUES,&B); /* duplicate so that MatStructure is SAME_NONZERO_PATTERN */
75: MatGetDiagonalBlock(A,&a);
76: MatGetOwnershipRange(A,&rstart,&rend);
77: ISGetLocalSize(is,&m);
78: MatCreateSeqAIJ(PETSC_COMM_SELF,rend-rstart,m,1,NULL,&P);
79: for (m = rstart; m < rend; ++m) {
80: ISLocate(is,m,&location);
82: MatSetValue(P,m-rstart,location,1.0,INSERT_VALUES);
83: }
84: MatAssemblyBegin(P,MAT_FINAL_ASSEMBLY);
85: MatAssemblyEnd(P,MAT_FINAL_ASSEMBLY);
86: MatPtAP(a,P,MAT_INITIAL_MATRIX,1.0,&X);
87: MatDestroy(&P);
88: MatAXPY(B,1.0,X,SUBSET_NONZERO_PATTERN);
89: MatDestroy(&X);
90: MatSetOption(B,MAT_SYMMETRIC,PETSC_TRUE);
91: EPSCreate(PETSC_COMM_SELF,&eps);
92: EPSSetOperators(eps,aux,B);
93: EPSSetProblemType(eps,EPS_GHEP);
94: EPSSetTarget(eps,0.0);
95: EPSSetWhichEigenpairs(eps,EPS_TARGET_MAGNITUDE);
96: EPSGetST(eps,&st);
97: STSetType(st,STSINVERT);
98: EPSSetFromOptions(eps);
99: EPSGetDimensions(eps,&nev,NULL,NULL);
100: EPSSolve(eps);
101: EPSGetConverged(eps,&nconv);
102: nev = PetscMin(nev,nconv);
103: ISGetLocalSize(is,&m);
104: MatCreateSeqDense(PETSC_COMM_SELF,m,nev,NULL,&P);
105: for (m = 0; m < nev; ++m) {
106: MatDenseGetColumnVecWrite(P,m,&b);
107: EPSGetEigenvector(eps,m,b,NULL);
108: MatDenseRestoreColumnVecWrite(P,m,&b);
109: }
110: EPSDestroy(&eps);
111: MatDestroy(&B);
112: PCHPDDMSetDeflationMat(pc,is,P);
113: MatDestroy(&P);
114: MatDestroy(&aux);
115: KSPSetFromOptions(ksp);
116: ISDestroy(&is);
117: MatCreateVecs(A,NULL,&b);
118: VecSet(b,1.0);
119: KSPSolve(ksp,b,b);
120: PetscObjectTypeCompare((PetscObject)pc,PCHPDDM,&flg);
121: if (flg) {
122: PCHPDDMGetSTShareSubKSP(pc,&flg);
123: /* since EPSSolve() is called outside PCSetUp_HPDDM() and there is not mechanism (yet) to pass the underlying ST, */
124: /* PCHPDDMGetSTShareSubKSP() should not return PETSC_TRUE when using PCHPDDMSetDeflationMat() */
126: }
127: VecDestroy(&b);
128: KSPDestroy(&ksp);
129: MatDestroy(&A);
130: SlepcFinalize();
131: return 0;
132: }
134: /*TEST
136: build:
137: requires: hpddm
139: test:
140: requires: double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES)
141: nsize: 4
142: filter: grep -v " total: nonzeros=" | grep -v " rows=" | grep -v " factor fill ratio given " | grep -v " using I-node" | sed -e "s/Linear solve converged due to CONVERGED_RTOL iterations 5/Linear solve converged due to CONVERGED_RTOL iterations 4/g" -e "s/amount of overlap = 1/user-defined overlap/g"
143: # similar output as ex76 with -pc_hpddm_levels_eps_nev val instead of just -eps_nev val
144: args: -ksp_rtol 1e-3 -ksp_max_it 10 -ksp_error_if_not_converged -ksp_converged_reason -ksp_view -ksp_type hpddm -pc_type hpddm -pc_hpddm_levels_1_sub_pc_type lu -pc_hpddm_define_subdomains {{false true}shared output} -pc_hpddm_levels_1_pc_type asm -pc_hpddm_levels_1_sub_pc_type lu -pc_hpddm_levels_1_st_share_sub_ksp -eps_nev 10 -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO -matload_block_size 2
146: TEST*/