Actual source code: ex61f.F90

petsc-3.9.1 2018-04-29
Report Typos and Errors
  1: !
  2: !        Demonstrates having each OpenMP thread manage its own PETSc objects and solves
  3: !           - each thread is ONLY allowed to access objects that IT created
  4: !                  that is, threads cannot shared objects
  5: !
  6: !        Run with "export OMP_NUM_THREADS=16 ./ex61f"
  7: !           to use 16 independent threads
  8: !
  9: !        ./configure --with-threadsafety --with-log=0 --with-openmp
 10: !
 11:          module omp_module
 12:          implicit none
 13:          contains
 14:          subroutine split_indices(total,num_pieces,ibeg,iend)
 15:            implicit none

 17:            integer :: total
 18:            integer :: num_pieces
 19:            integer :: ibeg(num_pieces), iend(num_pieces)
 20:            integer :: itmp1, itmp2, ioffset, i

 22:            itmp1 = total/num_pieces
 23:            itmp2 = mod(total,num_pieces)
 24:            ioffset = 0
 25:            do i=1,itmp2
 26:               ibeg(i) = ioffset + 1
 27:               iend(i) = ioffset + (itmp1+1)
 28:               ioffset = iend(i)
 29:            enddo
 30:            do i=itmp2+1,num_pieces
 31:               ibeg(i) = ioffset + 1
 32:               if (ibeg(i) > total) then
 33:                  iend(i) = ibeg(i) - 1
 34:               else
 35:                  iend(i) = ioffset + itmp1
 36:                  ioffset = iend(i)
 37:               endif
 38:            enddo

 40:          end subroutine split_indices
 41:        end module omp_module

 43:       module assert_mod
 44:       implicit none
 45:       contains
 46:       subroutine assert(lcond,msg,icase)
 47:       logical,intent(in) :: lcond
 48:       character(len=*), intent(in) :: msg
 49:       integer, intent(in) :: icase

 51:       if (.not.lcond) then
 52:          write(*,*) msg, icase
 53:          stop 'assertion error '
 54:       endif
 55:       return
 56:       end subroutine assert
 57:       end module assert_mod

 59:       program tpetsc
 60: !/*T
 61: !   TODO: Need to determine if deprecated
 62: !T*/

 64:  #include <petsc/finclude/petsc.h>
 65:       use assert_mod
 66:       use omp_module
 67:       use petsc
 68:       implicit none
 69: !     ----------------------------
 70: !     test concurrent petsc solver
 71: !     ----------------------------

 73:       integer, parameter :: NCASES=100
 74:       integer, parameter :: MAXTHREADS=64
 75:       real(8), parameter :: tol = 1.0d-6

 77:       integer, dimension(MAXTHREADS) :: ibeg,iend

 79: !$   integer, external :: omp_get_num_threads

 81:       Mat, target :: Mcol_f_mat(MAXTHREADS)
 82:       Vec, target :: Mcol_f_vecb(MAXTHREADS)
 83:       Vec, target :: Mcol_f_vecx(MAXTHREADS)
 84:       KSP, target :: Mcol_f_ksp(MAXTHREADS)
 85:       PC,  target :: MPC(MAXTHREADS)

 87:       Mat, pointer :: col_f_mat
 88:       Vec, pointer :: col_f_vecb
 89:       Vec, pointer :: col_f_vecx
 90:       KSP, pointer :: col_f_ksp
 91:       PC, pointer :: pc

 93:       PetscInt :: ith, nthreads
 94:       PetscErrorCode ierr

 96:       integer, parameter :: nz_per_row = 9
 97:       integer, parameter :: n =100
 98:       integer :: i,j,ij,ij2,ii,jj,nz,ip, dx,dy,icase
 99:       integer, dimension(n*n*nz_per_row) :: ilist,jlist
100:       PetscScalar :: aij
101:       PetscScalar, dimension(n*n*nz_per_row) :: alist
102:       logical :: isvalid_ii, isvalid_jj, is_diag

104:       PetscInt nrow
105:       PetscInt ncol
106:       PetscScalar, dimension(0:(n*n-1)) :: x, b
107:       real(8) :: err(NCASES)

109:       integer :: t1,t2,count_rate
110:       real :: ttime

112:       call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
113:       if (ierr .ne. 0) then
114:         print*,'Unable to initialize PETSc'
115:         stop
116:       endif

118:       nrow = n*n
119:       ncol = nrow

121:       nthreads = 1
122: !$omp parallel
123: !$omp master
124: !$      nthreads = omp_get_num_threads()
125: !$omp end master
126: !$omp end parallel
127:       print*,'nthreads = ', nthreads,' NCASES = ', NCASES

129:       call split_indices(NCASES,nthreads,ibeg,iend)


132: !$omp parallel do                                                        &
133: !$omp private(ith,ierr)                                                  &
134: !$omp private(col_f_mat,col_f_vecb,col_f_vecx,col_f_ksp)
135:       do ith=1,nthreads
136:          col_f_mat => Mcol_f_mat(ith)
137:          col_f_vecb => Mcol_f_vecb(ith)
138:          col_f_vecx => Mcol_f_vecx(ith)
139:          col_f_ksp => Mcol_f_ksp(ith)


142:          call MatCreateSeqAIJ( PETSC_COMM_SELF, nrow,ncol, nz_per_row,PETSC_NULL_INTEGER, col_f_mat, ierr)
143:          call assert(ierr.eq.0,'matcreateseqaij return ',ierr)

145:          call VecCreateSeqWithArray(PETSC_COMM_SELF,1,nrow,PETSC_NULL_SCALAR, col_f_vecb, ierr)
146:          call assert(ierr.eq.0,'veccreateseqwitharray return ierr',ierr)

148:          call VecDuplicate(col_f_vecb, col_f_vecx,ierr)
149:          call assert(ierr.eq.0,'vecduplicate return ierr',ierr)

151:          call KSPCreate(PETSC_COMM_SELF, col_f_ksp,ierr)
152:          call assert(ierr.eq.0,'kspcreate return ierr',ierr)

154:        enddo

156: !      -----------------------
157: !      setup sparsity pattern
158: !      -----------------------
159:        nz = 0
160:        do j=1,n
161:        do i=1,n
162:         ij = i + (j-1)*n
163:         do dx=-1,1
164:         do dy=-1,1
165:           ii = i + dx
166:           jj = j + dy

168:           ij2 = ii + (jj-1)*n
169:           isvalid_ii = (1 <= ii).and.(ii <= n)
170:           isvalid_jj = (1 <= jj).and.(jj <= n)
171:           if (isvalid_ii.and.isvalid_jj) then
172:              is_diag = (ij .eq. ij2)
173:              nz = nz + 1
174:              ilist(nz) = ij
175:              jlist(nz) = ij2
176:              if (is_diag) then
177:                aij = 4.0
178:              else
179:                aij = -1.0
180:              endif
181:              alist(nz) = aij
182:            endif
183:           enddo
184:           enddo
185:          enddo
186:          enddo

188:        print*,'nz = ', nz

190: !      ---------------------------------
191: !      convert from fortan to c indexing
192: !      ---------------------------------
193:        ilist(1:nz) = ilist(1:nz) - 1
194:        jlist(1:nz) = jlist(1:nz) - 1


197: !      --------------
198: !      setup matrices
199: !      --------------
200:        call system_clock(t1,count_rate)

202: !$omp  parallel do                                                      &
203: !$omp& private(ith,icase,ip,i,j,ii,jj,aij,ierr,x,b)                      &
204: !$omp& private(col_f_mat,col_f_vecb,col_f_vecx,col_f_ksp,pc)
205:        do ith=1,nthreads
206:          col_f_mat => Mcol_f_mat(ith)
207:          col_f_vecb => Mcol_f_vecb(ith)
208:          col_f_vecx => Mcol_f_vecx(ith)
209:          col_f_ksp => Mcol_f_ksp(ith)
210:          pc => MPC(ith)

212:         do icase=ibeg(ith),iend(ith)

214:          do ip=1,nz
215:            ii = ilist(ip)
216:            jj = jlist(ip)
217:            aij = alist(ip)
218:            call MatSetValue(col_f_mat,ii,jj,aij,INSERT_VALUES,ierr)
219:            call assert(ierr.eq.0,'matsetvalue return ierr',ierr)
220:          enddo
221:          call MatAssemblyBegin(col_f_mat,MAT_FINAL_ASSEMBLY,ierr)
222:          call assert(ierr.eq.0,'matassemblybegin return ierr',ierr)

224:          call MatAssemblyEnd(col_f_mat,MAT_FINAL_ASSEMBLY,ierr)
225:          call assert(ierr.eq.0,'matassemblyend return ierr',ierr)

227:          call KSPSetOperators(col_f_ksp,col_f_mat,col_f_mat,ierr)
228:          call assert(ierr.eq.0,'KSPSetOperators return ierr',ierr)

230:          ! set linear solver
231:          call KSPGetPC(col_f_ksp,pc,ierr)
232:          call assert(ierr.eq.0,'KSPGetPC return ierr ', ierr)

234:          call PCSetType(pc,PCLU,ierr)
235:          call assert(ierr.eq.0,'PCSetType return ierr ',ierr)

237:          ! associate petsc vector with allocated array
238:          x(:) = 1
239: !$omp    critical
240:          call VecPlaceArray(col_f_vecx,x,ierr)
241: !$omp    end critical
242:          call assert(ierr.eq.0,'VecPlaceArray col_f_vecx return ',ierr)

244:          b(:) = 0
245:          do ip=1,nz
246:            i = ilist(ip)
247:            j = jlist(ip)
248:            aij = alist(ip)
249:            b(i) = b(i) + aij*x(j)
250:          enddo
251:          x = 0
252: !$omp    critical
253:          call VecPlaceArray(col_f_vecb,b,ierr)
254: !$omp    end critical
255:          call assert(ierr.eq.0,'VecPlaceArray col_f_vecb return ',ierr)



259: !  -----------------------------------------------------------
260: !  key test, need to solve multiple linear systems in parallel
261: !  -----------------------------------------------------------
262:          call KSPSetFromOptions(col_f_ksp,ierr)
263:          call assert(ierr.eq.0,'KSPSetFromOptions return ierr ',ierr)

265:          call KSPSetUp(col_f_ksp,ierr)
266:          call assert(ierr.eq.0,'KSPSetup return ierr ',ierr)


269:          call KSPSolve(col_f_ksp,col_f_vecb,col_f_vecx,ierr)
270:          call assert(ierr.eq.0,'KSPSolve return ierr ',ierr)


273: !        ------------
274: !        check answer
275: !        ------------
276:          err(icase) = maxval(abs(x(:)-1))

278: !$omp    critical
279:          call VecResetArray(col_f_vecx,ierr)
280: !$omp    end critical
281:          call assert(ierr.eq.0,'VecResetArray return ierr ',ierr)

283: !$omp    critical
284:          call VecResetArray(col_f_vecb,ierr)
285: !$omp    end critical
286:          call assert(ierr.eq.0,'VecResetArray return ierr ',ierr)

288:        enddo
289:        enddo

291: !$omp parallel do                                                        &
292: !$omp private(ith,ierr)                                                  &
293: !$omp private(col_f_mat,col_f_vecb,col_f_vecx,col_f_ksp)
294:       do ith=1,nthreads
295:          col_f_mat => Mcol_f_mat(ith)
296:          col_f_vecb => Mcol_f_vecb(ith)
297:          col_f_vecx => Mcol_f_vecx(ith)
298:          col_f_ksp => Mcol_f_ksp(ith)


301:          call MatDestroy(col_f_mat, ierr)
302:          call assert(ierr.eq.0,'matdestroy return ',ierr)

304:          call VecDestroy(col_f_vecb, ierr)
305:          call assert(ierr.eq.0,'vecdestroy return ierr',ierr)

307:          call VecDestroy(col_f_vecx,ierr)
308:          call assert(ierr.eq.0,'vecdestroy return ierr',ierr)

310:          call KSPDestroy(col_f_ksp,ierr)
311:          call assert(ierr.eq.0,'kspdestroy return ierr',ierr)

313:        enddo

315:        call system_clock(t2,count_rate)
316:        ttime = real(t2-t1)/real(count_rate)
317:        write(*,*) 'total time is ',ttime

319:        write(*,*) 'maxval(err) ', maxval(err)
320:        do icase=1,NCASES
321:         if (err(icase) > tol) then
322:          write(*,*) 'icase,err(icase) ',icase,err(icase)
323:         endif
324:        enddo

326:        call PetscFinalize(ierr)
327:        call assert(ierr.eq.0,'petscfinalize return ierr',ierr)

329:        end program tpetsc

331: !/*TEST
332: !
333: !   build:
334: !      requires: double !complex !define(PETSC_USE_64BIT_INDICES)
335: !
336: !   test:
337: !      output_file: output/ex61f_1.out
338: !      TODO: Need to determine how to test OpenMP code
339: !
340: !TEST*/