Actual source code: mpicusp.cu

petsc-3.4.2 2013-07-02
  2: /*
  3:    This file contains routines for Parallel vector operations.
  4:  */
  5: #include <petscconf.h>
  6: PETSC_CUDA_EXTERN_C_BEGIN
  7: #include <../src/vec/vec/impls/mpi/pvecimpl.h>   /*I  "petscvec.h"   I*/
  8: PETSC_CUDA_EXTERN_C_END
  9: #include <../src/vec/vec/impls/seq/seqcusp/cuspvecimpl.h>

 13: PetscErrorCode VecDestroy_MPICUSP(Vec v)
 14: {

 18:   try {
 19:     if (v->spptr) {
 20:       delete ((Vec_CUSP*)v->spptr)->GPUarray;
 21:       delete (Vec_CUSP*) v->spptr;
 22:     }
 23:   } catch(char * ex) {
 24:     SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"CUSP error: %s", ex);
 25:   }
 26:   VecDestroy_MPI(v);
 27:   return(0);
 28: }

 32: PetscErrorCode VecNorm_MPICUSP(Vec xin,NormType type,PetscReal *z)
 33: {
 34:   PetscReal      sum,work = 0.0;

 38:   if (type == NORM_2 || type == NORM_FROBENIUS) {
 39:     VecNorm_SeqCUSP(xin,NORM_2,&work);
 40:     work *= work;
 41:     MPI_Allreduce(&work,&sum,1,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)xin));
 42:     *z    = PetscSqrtReal(sum);
 43:     //printf("VecNorm_MPICUSP : z=%1.5g\n",*z);
 44:   } else if (type == NORM_1) {
 45:     /* Find the local part */
 46:     VecNorm_SeqCUSP(xin,NORM_1,&work);
 47:     /* Find the global max */
 48:     MPI_Allreduce(&work,z,1,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)xin));
 49:   } else if (type == NORM_INFINITY) {
 50:     /* Find the local max */
 51:     VecNorm_SeqCUSP(xin,NORM_INFINITY,&work);
 52:     /* Find the global max */
 53:     MPI_Allreduce(&work,z,1,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)xin));
 54:   } else if (type == NORM_1_AND_2) {
 55:     PetscReal temp[2];
 56:     VecNorm_SeqCUSP(xin,NORM_1,temp);
 57:     VecNorm_SeqCUSP(xin,NORM_2,temp+1);
 58:     temp[1] = temp[1]*temp[1];
 59:     MPI_Allreduce(temp,z,2,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)xin));
 60:     z[1] = PetscSqrtReal(z[1]);
 61:   }
 62:   return(0);
 63: }

 67: PetscErrorCode VecDot_MPICUSP(Vec xin,Vec yin,PetscScalar *z)
 68: {
 69:   PetscScalar    sum,work;

 73:   VecDot_SeqCUSP(xin,yin,&work);
 74:   MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin));
 75:   *z   = sum;
 76:   return(0);
 77: }

 81: PetscErrorCode VecTDot_MPICUSP(Vec xin,Vec yin,PetscScalar *z)
 82: {
 83:   PetscScalar    sum,work;

 87:   VecTDot_SeqCUSP(xin,yin,&work);
 88:   MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin));
 89:   *z   = sum;
 90:   return(0);
 91: }

 95: PetscErrorCode VecMDot_MPICUSP(Vec xin,PetscInt nv,const Vec y[],PetscScalar *z)
 96: {
 97:   PetscScalar    awork[128],*work = awork;

101:   if (nv > 128) {
102:     PetscMalloc(nv*sizeof(PetscScalar),&work);
103:   }
104:   VecMDot_SeqCUSP(xin,nv,y,work);
105:   MPI_Allreduce(work,z,nv,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin));
106:   if (nv > 128) {
107:     PetscFree(work);
108:   }
109:   return(0);
110: }

112: /*MC
113:    VECMPICUSP - VECMPICUSP = "mpicusp" - The basic parallel vector, modified to use CUSP

115:    Options Database Keys:
116: . -vec_type mpicusp - sets the vector type to VECMPICUSP during a call to VecSetFromOptions()

118:   Level: beginner

120: .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMpiWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMpi()
121: M*/


126: PetscErrorCode VecDuplicate_MPICUSP(Vec win,Vec *v)
127: {
129:   Vec_MPI        *vw,*w = (Vec_MPI*)win->data;
130:   PetscScalar    *array;

133:   VecCreate(PetscObjectComm((PetscObject)win),v);
134:   PetscLayoutReference(win->map,&(*v)->map);

136:   VecCreate_MPI_Private(*v,PETSC_FALSE,w->nghost,0);
137:   vw   = (Vec_MPI*)(*v)->data;
138:   PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));

140:   /* save local representation of the parallel vector (and scatter) if it exists */
141:   if (w->localrep) {
142:     VecGetArray(*v,&array);
143:     VecCreateSeqWithArray(PETSC_COMM_SELF,1,win->map->n+w->nghost,array,&vw->localrep);
144:     PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));
145:     VecRestoreArray(*v,&array);
146:     PetscLogObjectParent(*v,vw->localrep);
147:     vw->localupdate = w->localupdate;
148:     if (vw->localupdate) {
149:       PetscObjectReference((PetscObject)vw->localupdate);
150:     }
151:   }

153:   /* New vector should inherit stashing property of parent */
154:   (*v)->stash.donotstash   = win->stash.donotstash;
155:   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;

157:   /* change type_name appropriately */
158:   PetscObjectChangeTypeName((PetscObject)(*v),VECMPICUSP);

160:   PetscObjectListDuplicate(((PetscObject)win)->olist,&((PetscObject)(*v))->olist);
161:   PetscFunctionListDuplicate(((PetscObject)win)->qlist,&((PetscObject)(*v))->qlist);
162:   (*v)->map->bs   = win->map->bs;
163:   (*v)->bstash.bs = win->bstash.bs;
164:   return(0);
165: }

169: PetscErrorCode VecDotNorm2_MPICUSP(Vec s,Vec t,PetscScalar *dp,PetscScalar *nm)
170: {
172:   PetscScalar    work[2],sum[2];

175:   VecDotNorm2_SeqCUSP(s,t,work,work+1);
176:   MPI_Allreduce(&work,&sum,2,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)s));
177:   *dp  = sum[0];
178:   *nm  = sum[1];
179:   //printf("VecDotNorm2_MPICUSP=%1.5g,%1.5g\n",PetscRealPart(*dp),PetscImaginaryPart(*dp));
180:   //printf("VecDotNorm2_MPICUSP=%1.5g,%1.5g\n",PetscRealPart(*nm),PetscImaginaryPart(*nm));
181:   return(0);
182: }

186: PETSC_EXTERN PetscErrorCode VecCreate_MPICUSP(Vec vv)
187: {

191:   VecCreate_MPI_Private(vv,PETSC_FALSE,0,0);
192:   PetscObjectChangeTypeName((PetscObject)vv,VECMPICUSP);

194:   vv->ops->dotnorm2        = VecDotNorm2_MPICUSP;
195:   vv->ops->waxpy           = VecWAXPY_SeqCUSP;
196:   vv->ops->duplicate       = VecDuplicate_MPICUSP;
197:   vv->ops->dot             = VecDot_MPICUSP;
198:   vv->ops->mdot            = VecMDot_MPICUSP;
199:   vv->ops->tdot            = VecTDot_MPICUSP;
200:   vv->ops->norm            = VecNorm_MPICUSP;
201:   vv->ops->scale           = VecScale_SeqCUSP;
202:   vv->ops->copy            = VecCopy_SeqCUSP;
203:   vv->ops->set             = VecSet_SeqCUSP;
204:   vv->ops->swap            = VecSwap_SeqCUSP;
205:   vv->ops->axpy            = VecAXPY_SeqCUSP;
206:   vv->ops->axpby           = VecAXPBY_SeqCUSP;
207:   vv->ops->maxpy           = VecMAXPY_SeqCUSP;
208:   vv->ops->aypx            = VecAYPX_SeqCUSP;
209:   vv->ops->axpbypcz        = VecAXPBYPCZ_SeqCUSP;
210:   vv->ops->pointwisemult   = VecPointwiseMult_SeqCUSP;
211:   vv->ops->setrandom       = VecSetRandom_SeqCUSP;
212:   vv->ops->replacearray    = VecReplaceArray_SeqCUSP;
213:   vv->ops->dot_local       = VecDot_SeqCUSP;
214:   vv->ops->tdot_local      = VecTDot_SeqCUSP;
215:   vv->ops->norm_local      = VecNorm_SeqCUSP;
216:   vv->ops->mdot_local      = VecMDot_SeqCUSP;
217:   vv->ops->destroy         = VecDestroy_MPICUSP;
218:   vv->ops->pointwisedivide = VecPointwiseDivide_SeqCUSP;
219:   /* place array?
220:      reset array?
221:      get values?
222:   */
223:   VecCUSPAllocateCheck(vv);CHKERRCUSP(ierr);
224:   vv->valid_GPU_array      = PETSC_CUSP_GPU;
225:   VecSet(vv,0.0);
226:   return(0);
227: }

231: PETSC_EXTERN PetscErrorCode VecCreate_CUSP(Vec v)
232: {
234:   PetscMPIInt    size;

237:   MPI_Comm_size(PetscObjectComm((PetscObject)v),&size);
238:   if (size == 1) {
239:     VecSetType(v,VECSEQCUSP);
240:   } else {
241:     VecSetType(v,VECMPICUSP);
242:   }
243:   return(0);
244: }