Actual source code: dsghep.c

slepc-3.10.1 2018-10-23
Report Typos and Errors
  1: /*
  2:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
  3:    SLEPc - Scalable Library for Eigenvalue Problem Computations
  4:    Copyright (c) 2002-2018, Universitat Politecnica de Valencia, Spain

  6:    This file is part of SLEPc.
  7:    SLEPc is distributed under a 2-clause BSD license (see LICENSE).
  8:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
  9: */

 11: #include <slepc/private/dsimpl.h>
 12: #include <slepcblaslapack.h>

 14: PetscErrorCode DSAllocate_GHEP(DS ds,PetscInt ld)
 15: {

 19:   DSAllocateMat_Private(ds,DS_MAT_A);
 20:   DSAllocateMat_Private(ds,DS_MAT_B);
 21:   DSAllocateMat_Private(ds,DS_MAT_Q);
 22:   PetscFree(ds->perm);
 23:   PetscMalloc1(ld,&ds->perm);
 24:   PetscLogObjectMemory((PetscObject)ds,ld*sizeof(PetscInt));
 25:   return(0);
 26: }

 28: PetscErrorCode DSView_GHEP(DS ds,PetscViewer viewer)
 29: {

 33:   DSViewMat(ds,viewer,DS_MAT_A);
 34:   DSViewMat(ds,viewer,DS_MAT_B);
 35:   if (ds->state>DS_STATE_INTERMEDIATE) {
 36:     DSViewMat(ds,viewer,DS_MAT_Q);
 37:   }
 38:   if (ds->mat[DS_MAT_X]) {
 39:     DSViewMat(ds,viewer,DS_MAT_X);
 40:   }
 41:   return(0);
 42: }

 44: PetscErrorCode DSVectors_GHEP(DS ds,DSMatType mat,PetscInt *j,PetscReal *rnorm)
 45: {
 46:   PetscScalar    *Q = ds->mat[DS_MAT_Q];
 47:   PetscInt       ld = ds->ld,i;

 51:   if (rnorm) SETERRQ(PetscObjectComm((PetscObject)ds),PETSC_ERR_SUP,"Not implemented yet");
 52:   switch (mat) {
 53:     case DS_MAT_X:
 54:     case DS_MAT_Y:
 55:       if (j) {
 56:         if (ds->state>=DS_STATE_CONDENSED) {
 57:           PetscMemcpy(ds->mat[mat]+(*j)*ld,Q+(*j)*ld,ld*sizeof(PetscScalar));
 58:         } else {
 59:           PetscMemzero(ds->mat[mat]+(*j)*ld,ld*sizeof(PetscScalar));
 60:           *(ds->mat[mat]+(*j)+(*j)*ld) = 1.0;
 61:         }
 62:       } else {
 63:         if (ds->state>=DS_STATE_CONDENSED) {
 64:           PetscMemcpy(ds->mat[mat],Q,ld*ld*sizeof(PetscScalar));
 65:         } else {
 66:           PetscMemzero(ds->mat[mat],ld*ld*sizeof(PetscScalar));
 67:           for (i=0;i<ds->n;i++) *(ds->mat[mat]+i+i*ld) = 1.0;
 68:         }
 69:       }
 70:       break;
 71:     case DS_MAT_U:
 72:     case DS_MAT_VT:
 73:       SETERRQ(PetscObjectComm((PetscObject)ds),PETSC_ERR_SUP,"Not implemented yet");
 74:       break;
 75:     default:
 76:       SETERRQ(PetscObjectComm((PetscObject)ds),PETSC_ERR_ARG_OUTOFRANGE,"Invalid mat parameter");
 77:   }
 78:   return(0);
 79: }

 81: PetscErrorCode DSSort_GHEP(DS ds,PetscScalar *wr,PetscScalar *wi,PetscScalar *rr,PetscScalar *ri,PetscInt *k)
 82: {
 84:   PetscInt       n,l,i,*perm,ld=ds->ld;
 85:   PetscScalar    *A;

 88:   if (!ds->sc) return(0);
 89:   n = ds->n;
 90:   l = ds->l;
 91:   A  = ds->mat[DS_MAT_A];
 92:   perm = ds->perm;
 93:   for (i=l;i<n;i++) wr[i] = A[i+i*ld];
 94:   if (rr) {
 95:     DSSortEigenvalues_Private(ds,rr,ri,perm,PETSC_FALSE);
 96:   } else {
 97:     DSSortEigenvalues_Private(ds,wr,NULL,perm,PETSC_FALSE);
 98:   }
 99:   for (i=l;i<n;i++) A[i+i*ld] = wr[perm[i]];
100:   for (i=l;i<n;i++) wr[i] = A[i+i*ld];
101:   DSPermuteColumns_Private(ds,l,n,DS_MAT_Q,perm);
102:   return(0);
103: }

105: PetscErrorCode DSSolve_GHEP(DS ds,PetscScalar *wr,PetscScalar *wi)
106: {
107: #if defined(SLEPC_MISSING_LAPACK_SYGVD)
109:   SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"SYGVD - Lapack routine is unavailable");
110: #else
112:   PetscScalar    *work,*A,*B,*Q;
113:   PetscBLASInt   itype = 1,*iwork,info,n1,liwork,ld,lrwork=0,lwork;
114:   PetscInt       off,i;
115: #if defined(PETSC_USE_COMPLEX)
116:   PetscReal      *rwork,*rr;
117: #endif

120:   PetscBLASIntCast(ds->n-ds->l,&n1);
121:   PetscBLASIntCast(ds->ld,&ld);
122:   PetscBLASIntCast(5*ds->n+3,&liwork);
123: #if defined(PETSC_USE_COMPLEX)
124:   PetscBLASIntCast(ds->n*ds->n+2*ds->n,&lwork);
125:   PetscBLASIntCast(2*ds->n*ds->n+5*ds->n+1+n1,&lrwork);
126: #else
127:   PetscBLASIntCast(2*ds->n*ds->n+6*ds->n+1,&lwork);
128: #endif
129:   DSAllocateWork_Private(ds,lwork,lrwork,liwork);
130:   work = ds->work;
131:   iwork = ds->iwork;
132:   off = ds->l+ds->l*ld;
133:   A = ds->mat[DS_MAT_A];
134:   B = ds->mat[DS_MAT_B];
135:   Q = ds->mat[DS_MAT_Q];
136: #if defined(PETSC_USE_COMPLEX)
137:   rr = ds->rwork;
138:   rwork = ds->rwork + n1;
139:   lrwork = ds->lrwork - n1;
140:   PetscStackCallBLAS("LAPACKsygvd",LAPACKsygvd_(&itype,"V","U",&n1,A+off,&ld,B+off,&ld,rr,work,&lwork,rwork,&lrwork,iwork,&liwork,&info));
141:   for (i=0;i<n1;i++) wr[ds->l+i] = rr[i];
142: #else
143:   PetscStackCallBLAS("LAPACKsygvd",LAPACKsygvd_(&itype,"V","U",&n1,A+off,&ld,B+off,&ld,wr+ds->l,work,&lwork,iwork,&liwork,&info));
144: #endif
145:   SlepcCheckLapackInfo("sygvd",info);
146:   PetscMemzero(Q+ds->l*ld,n1*ld*sizeof(PetscScalar));
147:   for (i=ds->l;i<ds->n;i++) {
148:     PetscMemcpy(Q+ds->l+i*ld,A+ds->l+i*ld,n1*sizeof(PetscScalar));
149:   }
150:   PetscMemzero(B+ds->l*ld,n1*ld*sizeof(PetscScalar));
151:   PetscMemzero(A+ds->l*ld,n1*ld*sizeof(PetscScalar));
152:   for (i=ds->l;i<ds->n;i++) {
153:     if (wi) wi[i] = 0.0;
154:     B[i+i*ld] = 1.0;
155:     A[i+i*ld] = wr[i];
156:   }
157:   return(0);
158: #endif
159: }

161: PetscErrorCode DSSynchronize_GHEP(DS ds,PetscScalar eigr[],PetscScalar eigi[])
162: {
164:   PetscInt       ld=ds->ld,l=ds->l,k;
165:   PetscMPIInt    n,rank,off=0,size,ldn;

168:   k = 2*(ds->n-l)*ld;
169:   if (ds->state>DS_STATE_RAW) k += (ds->n-l)*ld;
170:   if (eigr) k += (ds->n-l); 
171:   if (eigi) k += (ds->n-l); 
172:   DSAllocateWork_Private(ds,k,0,0);
173:   PetscMPIIntCast(k*sizeof(PetscScalar),&size);
174:   PetscMPIIntCast(ds->n-l,&n);
175:   PetscMPIIntCast(ld*(ds->n-l),&ldn);
176:   MPI_Comm_rank(PetscObjectComm((PetscObject)ds),&rank);
177:   if (!rank) {
178:     MPI_Pack(ds->mat[DS_MAT_A]+l*ld,ldn,MPIU_SCALAR,ds->work,size,&off,PetscObjectComm((PetscObject)ds));
179:     MPI_Pack(ds->mat[DS_MAT_B]+l*ld,ldn,MPIU_SCALAR,ds->work,size,&off,PetscObjectComm((PetscObject)ds));
180:     if (ds->state>DS_STATE_RAW) {
181:       MPI_Pack(ds->mat[DS_MAT_Q]+l*ld,ldn,MPIU_SCALAR,ds->work,size,&off,PetscObjectComm((PetscObject)ds));
182:     }
183:     if (eigr) {
184:       MPI_Pack(eigr+l,n,MPIU_SCALAR,ds->work,size,&off,PetscObjectComm((PetscObject)ds));
185:     }
186:     if (eigi) {
187:       MPI_Pack(eigi+l,n,MPIU_SCALAR,ds->work,size,&off,PetscObjectComm((PetscObject)ds));
188:     }
189:   }
190:   MPI_Bcast(ds->work,size,MPI_BYTE,0,PetscObjectComm((PetscObject)ds));
191:   if (rank) {
192:     MPI_Unpack(ds->work,size,&off,ds->mat[DS_MAT_A]+l*ld,ldn,MPIU_SCALAR,PetscObjectComm((PetscObject)ds));
193:     MPI_Unpack(ds->work,size,&off,ds->mat[DS_MAT_B]+l*ld,ldn,MPIU_SCALAR,PetscObjectComm((PetscObject)ds));
194:     if (ds->state>DS_STATE_RAW) {
195:       MPI_Unpack(ds->work,size,&off,ds->mat[DS_MAT_Q]+l*ld,ldn,MPIU_SCALAR,PetscObjectComm((PetscObject)ds));
196:     }
197:     if (eigr) {
198:       MPI_Unpack(ds->work,size,&off,eigr+l,n,MPIU_SCALAR,PetscObjectComm((PetscObject)ds));
199:     }
200:     if (eigi) {
201:       MPI_Unpack(ds->work,size,&off,eigi+l,n,MPIU_SCALAR,PetscObjectComm((PetscObject)ds));
202:     }
203:   }
204:   return(0);
205: }

207: PetscErrorCode DSHermitian_GHEP(DS ds,DSMatType m,PetscBool *flg)
208: {
210:   if (m==DS_MAT_A || m==DS_MAT_B) *flg = PETSC_TRUE;
211:   else *flg = PETSC_FALSE;
212:   return(0);
213: }

215: PETSC_EXTERN PetscErrorCode DSCreate_GHEP(DS ds)
216: {
218:   ds->ops->allocate      = DSAllocate_GHEP;
219:   ds->ops->view          = DSView_GHEP;
220:   ds->ops->vectors       = DSVectors_GHEP;
221:   ds->ops->solve[0]      = DSSolve_GHEP;
222:   ds->ops->sort          = DSSort_GHEP;
223:   ds->ops->synchronize   = DSSynchronize_GHEP;
224:   ds->ops->hermitian     = DSHermitian_GHEP;
225:   return(0);
226: }