!----- AGPL --------------------------------------------------------------------
!
! Copyright (C) Stichting Deltares, 2015.
!
! This file is part of Delft3D (D-Flow Flexible Mesh component).
!
! Delft3D is free software: you can redistribute it and/or modify
! it under the terms of the GNU Affero General Public License as
! published by the Free Software Foundation version 3.
!
! Delft3D is distributed in the hope that it will be useful,
! but WITHOUT ANY WARRANTY; without even the implied warranty of
! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
! GNU Affero General Public License for more details.
!
! You should have received a copy of the GNU Affero General Public License
! along with Delft3D. If not, see .
!
! contact: delft3d.support@deltares.nl
! Stichting Deltares
! P.O. Box 177
! 2600 MH Delft, The Netherlands
!
! All indications and logos of, and references to, "Delft3D",
! "D-Flow Flexible Mesh" and "Deltares" are registered trademarks of Stichting
! Deltares, and remain the property of Stichting Deltares. All rights reserved.
!
!-------------------------------------------------------------------------------
! $Id: solve_petsc.F90 42642 2015-10-21 11:34:20Z dam_ar $
! $HeadURL: https://repos.deltares.nl/repos/ds/trunk/additional/unstruc/src/solve_petsc.F90 $
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
module m_petsc
#include
use petsc
integer :: numrows ! number of rows in this domain
integer :: numallrows ! number of rows of whole system
integer, dimension(:), allocatable :: rowtoelem ! local row to local element list, dim(numrows)
! CRS matrices for PETSc/MatCreateMPIAIJWithSplitArrays
integer :: numdia ! number of non-zero entries in diagonal block
double precision, dimension(:), allocatable :: adia ! non-zero matrix entries, diagonal block
integer, dimension(:), allocatable :: idia, jdia ! column indices and row pointers of off-diagonal block
integer :: numoff ! number of non-zero entries in off-diagonal block
double precision, dimension(:), allocatable :: aoff ! non-zero matrix entries, diagonal block
integer, dimension(:), allocatable :: ioff, joff ! column indices and row pointers of off-diagonal block
integer, dimension(:), allocatable :: joffsav ! store of joff
integer, dimension(:), allocatable :: guusidxdia ! index in ccr or bbr array, >0: ccr, <0: bbr, diagonal block, dim(numdia)
integer, dimension(:), allocatable :: guusidxoff ! index in ccr or bbr array, >0: ccr, <0: bbr, off-diagonal block, dim(numoff)
integer :: numzerorows ! number of zero rows
integer, dimension(:), allocatable :: izerorow ! zero-rows in matrix (kfs=0)
double precision, dimension(:), allocatable :: rhs_val ! values in vector rhs
double precision, dimension(:), allocatable :: sol_val ! values in vector sol
double precision, dimension(:), allocatable :: res_val ! values in vector res
Vec :: res ! residual vector
Vec :: rhs ! right-hand side vector
Vec :: sol ! solution vector
Mat :: Amat ! PETSc-type matrix (will include dry nodes, set to zero)
KSP :: Solver ! Solver for the equation Amat * sol = rhs
! preconditioner
PC :: Preconditioner
KSP :: SubSolver
PC :: SubPrec
PCType :: PreconditioningType
PetscErrorCode, parameter :: PETSC_OK = 0
end module m_petsc
!> initialze PETSc
subroutine startpetsc()
#ifdef HAVE_PETSC
use m_petsc
implicit none
PetscErrorCode :: ierr = PETSC_OK
call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
call PetscLogBegin(ierr)
#endif
return
end subroutine startpetsc
!> initialze PETSc
subroutine stoppetsc()
#ifdef HAVE_PETSC
use m_petsc
use m_flowparameters, only: Icgsolver
implicit none
PetscErrorCode :: ierr = PETSC_OK
if (Icgsolver.eq.6) call killSolverPETSC()
call PetscFinalize(ierr)
#endif
return
end subroutine stoppetsc
!> allocate arrays for petsc matrix construction,
!> and get sparsity pattern in RCS format
subroutine ini_petsc(Ndx,Ndxi,ierror)
#include
use m_reduce
use m_partitioninfo
use petsc
use m_petsc
use MessageHandling
use m_flowgeom, only: xz, yz, wu, nd
implicit none
integer, intent(in) :: Ndx !< number of cells
integer, intent(in) :: Ndxi !< number of non-boundary cells
integer, intent(out) :: ierror !< error (1) or not (0)
integer, dimension(:), allocatable :: mask
integer, dimension(:), allocatable :: inonzerodia, inonzerooff ! number of nonzeros in diagonal and off-diagonal block, respectively
integer, dimension(:), allocatable :: idx, idum ! for sorting
integer :: istart, iend, num
integer :: i, irow, j, n
integer :: ndn_glob ! global cell number
integer :: ndn_glob_first ! global cell number of first active cell
integer :: L
logical :: Lactive
PetscInt, parameter :: singletonBlocks = 1
PetscErrorCode :: ierr = PETSC_OK
ierror = 1
! make global numbering
call get_global_numbers(nocg,noel(nogauss+1:nogauss+nocg), iglobal, numcells, 0)
if ( jampi.eq.1 ) then
! the number of cells in this domain
numrows = numcells(my_rank)
! the total number of rows
numallrows = sum(numcells(0:ndomains-1))
else
numrows = nocg
numallrows = nocg
end if
! allocate local variables
allocate(mask(Ndx))
allocate(inonzerodia(numrows))
allocate(inonzerooff(numrows))
! mark active cells
mask = 0
do n=nogauss+1,nogauss+nocg
mask(noel(n)) = 1
end do
! unmark all ghost cells
do i=1,numghost_sall
mask(ighostlist_sall(i)) = 0
end do
! unmark deactivated ghost cells
! open(6666,file='tmp'//sdmn//'.xyz')
! do n=nogauss+1,nogauss+nocg
! ndn = noel(n)
! if ( mask(ndn).eq.1 ) then
!! unmask cell if it is a deactivated ghost cell
! if ( idomain(ndn).ne.my_rank ) then
! Lactive = .false.
! do i=1,nd(ndn)%lnx
! L = iabs(nd(ndn)%ln(i))
! if ( wu(L).ne.0d0 ) then
! Lactive = .true.
! end if
! end do
! if ( .not.Lactive ) then
! mask(ndn) = 0
! write(6,"('disabled ghost cell, my_rank=', I3, ', ndn=', I5)") my_rank, ndn
! write(6666,"(3E17.5)") xz(ndn), yz(ndn), dble(my_rank)
! end if
! end if
! end if
!
!! if ( mask(ndn).eq.1 ) then
!! do i=1,row(ndn)%l
!! j=row(ndn)%j(i)
!!
!! if ( iglobal(j).eq.0 ) then
!! write(6,"('zero global cell number, my_rank=', I3, ', j=', I5)") my_rank, ndn
!! write(6666,"(3E17.5)") xz(j), yz(j), dble(my_rank)
!! end if
!! end do
!! end if
! end do
! close(6666)
! count nonzero elements
irow = 0
ndn_glob_first = 0
numdia = 0
numoff = 0
do n=nogauss+1,nogauss+nocg
ndn = noel(n) ! cell number
if ( mask(ndn).eq.1 ) then ! active cells only
irow = irow+1
ndn_glob = iglobal(ndn) ! global cell number
! check global cell numbering (safety)
if ( ndn_glob_first.eq.0 ) then
ndn_glob_first = ndn_glob
else
if ( ndn_glob.ne.ndn_glob_first+irow-1 ) then
call mess(LEVEL_ERROR, 'ini_petsc: global cell numbering error')
goto 1234
end if
end if
! diagonal element
numdia = numdia+1
! count non-zero row entries for this row
do i=1,row(ndn)%l
j=row(ndn)%j(i)
if ( iglobal(j).eq.0 ) cycle
if ( mask(j).eq.1 ) then ! in diagonal block
numdia = numdia+1
else ! in off-diagonal block
numoff = numoff+1
end if
end do
end if
end do
! allocate module variables
if ( allocated(rowtoelem) ) deallocate(rowtoelem)
if ( allocated(jdia) ) deallocate(jdia)
if ( allocated(idia) ) deallocate(idia)
if ( allocated(adia) ) deallocate(adia)
if ( allocated(joff) ) deallocate(joff)
if ( allocated(ioff) ) deallocate(ioff)
if ( allocated(aoff) ) deallocate(aoff)
if ( allocated(joffsav) ) deallocate(joffsav)
if ( allocated(guusidxdia) ) deallocate(guusidxdia)
if ( allocated(guusidxoff) ) deallocate(guusidxoff)
if ( allocated(izerorow) ) deallocate(izerorow)
if ( allocated(rhs_val) ) deallocate(rhs_val)
if ( allocated(sol_val) ) deallocate(sol_val)
if ( allocated(res_val) ) deallocate(res_val)
allocate(rowtoelem(numrows))
allocate(jdia(numdia))
allocate(idia(numrows+1))
allocate(adia(numdia))
allocate(joff(max(numoff,1)))
allocate(ioff(numrows+1))
allocate(aoff(max(numoff,1)))
allocate(joffsav(max(numoff,1)))
allocate(guusidxdia(numdia))
allocate(guusidxoff(numoff))
allocate(izerorow(numrows))
allocate(rhs_val(1:numrows))
allocate(sol_val(1:numrows))
allocate(res_val(1:numrows))
! make the RCS index arrays
irow = 0
numdia = 0
numoff = 0
idia = 0
ioff = 0
idia(1) = 1
ioff(1) = 1
guusidxdia = 0
guusidxoff = 0
do n=nogauss+1,nogauss+nocg
ndn=noel(n)
if ( mask(ndn).eq.1 ) then
irow = irow+1 ! global cell number
rowtoelem(irow) = ndn
! diagonal element
numdia = numdia+1
jdia(numdia) = iglobal(ndn)
guusidxdia(numdia) = -ndn
if ( iglobal(ndn).eq.0 ) then
write(6,*), '--> iglobal=0', my_rank, ndn
end if
! count non-zero row entries for this row
do i=1,row(ndn)%l
j=row(ndn)%j(i)
if ( iglobal(j).eq.0 ) cycle
if ( mask(j).eq.1 ) then ! in diagonal block
numdia = numdia+1
jdia(numdia) = iglobal(j)
guusidxdia(numdia) = row(ndn)%a(i)
else ! ghost cell: in off-diagonal block
numoff = numoff+1
joff(numoff) = iglobal(j)
guusidxoff(numoff) = row(ndn)%a(i)
end if
end do
! end if
idia(irow+1) = numdia+1
ioff(irow+1) = numoff+1
end if
end do
inonzerodia = idia(2:numrows+1)-idia(1:numrows)
if ( numoff.gt.0 ) then
inonzerooff = ioff(2:numrows+1)-ioff(1:numrows)
else
inonzerooff = 0
end if
! sort the row indices
num = max(maxval(inonzerodia),maxval(inonzerooff))
allocate(idx(num))
allocate(idum(num))
do n=1,numrows
istart = idia(n)
iend = idia(n+1)-1
num = iend-istart+1
if ( num.gt.0 ) then
idum(1:num) = jdia(istart:iend)
call indexxi(num,idum,idx)
jdia(istart:iend) = idum(idx(1:num))
idum(1:num) = guusidxdia(istart:iend)
guusidxdia(istart:iend) = idum(idx(1:num))
end if
end do
do n=1,numrows
istart = ioff(n)
iend = ioff(n+1)-1
num = iend-istart+1
if ( num.gt.0 ) then
idum(1:num) = joff(istart:iend)
call indexxi(num,idum,idx)
joff(istart:iend) = idum(idx(1:num))
idum(1:num) = guusidxoff(istart:iend)
guusidxoff(istart:iend) = idum(idx(1:num))
end if
end do
! make indices zero-based
idia = idia-1
jdia = jdia-1
ioff = ioff-1
joff = joff-1
! diagonal row-indices need to be local
if ( jampi.eq.1 ) then
jdia = jdia - iglobal(rowtoelem(1))+1
end if
! store
joffsav = joff
! create vectors
rhs_val = 0d0
sol_val = 0d0
res_val = 0d0
if (ierr == PETSC_OK) call VecCreateMPIWithArray( DFM_COMM_DFMWORLD, singletonBlocks, &
numrows, PETSC_DECIDE, rhs_val, rhs, ierr)
if (ierr == PETSC_OK) call VecCreateMPIWithArray( DFM_COMM_DFMWORLD, singletonBlocks, &
numrows, PETSC_DECIDE, sol_val, sol, ierr)
if (ierr == PETSC_OK) call VecCreateMPIWithArray( DFM_COMM_DFMWORLD, singletonBlocks, &
numrows, PETSC_DECIDE, res_val, res, ierr)
if (ierr == PETSC_OK) call VecAssemblyBegin(rhs,ierr)
if (ierr == PETSC_OK) call VecAssemblyBegin(sol,ierr)
if (ierr == PETSC_OK) call VecAssemblyBegin(res,ierr)
if (ierr == PETSC_OK) call VecAssemblyEnd(rhs,ierr)
if (ierr == PETSC_OK) call VecAssemblyEnd(sol,ierr)
if (ierr == PETSC_OK) call VecAssemblyEnd(res,ierr)
if (ierr == PETSC_OK) ierror = 0
1234 continue
! deallocate local variables
if ( allocated(mask) ) deallocate(mask)
if ( allocated(inonzerodia) ) deallocate(inonzerodia)
if ( allocated(inonzerooff) ) deallocate(inonzerooff)
if ( allocated(idx) ) deallocate(idx)
if ( allocated(idum) ) deallocate(idum)
return
end subroutine ini_petsc
!> compose the global matrix and solver for PETSc
!> it is assumed that the global cell numbers iglobal, dim(Ndx) are available
!> NO GLOBAL RENUMBERING, so the matrix may contain zero rows
subroutine setPETSCmatrixEntries()
#include
use m_reduce
use m_partitioninfo
use m_petsc
use MessageHandling
use m_flowgeom, only: kfs, Ndx
implicit none
integer :: i, n
integer :: ierr
integer :: irow, istart, iend, k, kk
logical :: Lstop
! count zero rows
numzerorows = 0
izerorow = 0
adia = 0d0
aoff = 0d0
Lstop = .false.
! fill matrix entries
do n=1,numdia
i = guusidxdia(n)
if ( i.lt.0 ) then ! diagonal entry in diagonal block
if ( kfs(-i).ne.0 ) then ! nonzero row
adia(n) = bbr(-i)
else ! zero row
numzerorows = numzerorows+1
izerorow(numzerorows) = iglobal(-i)-1 ! global row number, zero based
adia(n) = 1d0
end if ! if ( kfs(-i).ne.0 )
else ! off-diagonal entry in diagonal block
adia(n) = ccr(i)
end if
end do
! BEGIN DEBUG
!
! call MPI_barrier(DFM_COMM_DFMWORLD,ierr)
!
! if ( my_rank.eq.1 ) then
! do i=1,numghost_sall
! ndn = ighostlist_sall(i)
! if ( kfs(ndn).ne.0 ) write(6,*) ndn, 'kfs=', kfs(ndn)
! end do
! do i=1,numoff
! if ( joff(i).ne.joffsav(i) ) then
! write(6,*) 'unequal:', i, joff(i), joffsav(i)
! end if
! end do
! end if
!
! call MPI_barrier(DFM_COMM_DFMWORLD,ierr)
! END DEBUG
do irow=1,numrows
istart = ioff(irow)+1 ! ioff is zeros-based
iend = ioff(irow+1)
do n=istart,iend
i = guusidxoff(n)
if ( i.le.0 ) then
! should not happen
write(6,*) 'irow=', irow, 'istart=', istart, 'iend=', iend, 'numrows=', numrows, 'n=', n, 'i=', i
call mess(LEVEL_ERROR, 'conjugategradientPETSC: numbering error')
else
aoff(n) = ccr(i)
end if
end do
end do
! if ( numzerorows.gt.0 ) then
! call mess(LEVEL_ERROR, 'setPETSCmatrixEntries: zero rows not supported yet')
! call mess(LEVEL_INFO, 'number of nonzero rows:', numrows-numzerorows)
! call matZeroRowsColumns(Amat, numzerorows, izerorow, 0d0, ierr)
! end if
end subroutine setPETSCmatrixEntries
!> compose the global matrix and solver for PETSc
!> it is assumed that the global cell numbers iglobal, dim(Ndx) are available
!> NO GLOBAL RENUMBERING, so the matrix may contain zero rows
subroutine createPETSCPreconditioner(iprecnd)
#include
use petsc
use m_reduce
! use unstruc_messages
use m_partitioninfo
use m_petsc
! use petscksp; use petscdm
use MessageHandling
implicit none
integer, intent(inout) :: iprecnd !< preconditioner type, 0:default, 1: none, 2:incomplete Cholesky, 3:Cholesky, 4:GAMG (doesn't work)
integer :: i, n, jasucces
integer, save :: jafirst = 1
PetscErrorCode :: ierr = PETSC_OK
jasucces = 0
if ( iprecnd.eq.0 ) then
! call mess(LEVEL_INFO, 'default preconditioner')
else if ( iprecnd.eq.1 ) then
! call mess(LEVEL_INFO, 'no preconditioner')
PreconditioningType = PCNONE
else if ( iprecnd.eq.2 ) then
PreconditioningType = PCICC
else if ( iprecnd.eq.3 ) then
PreconditioningType = PCCHOLESKY
else if ( iprecnd.eq.4 ) then ! not supported
PreconditioningType = PCGAMG
else
call mess(LEVEL_ERROR,'conjugategradientPETSC: unsupported preconditioner')
goto 1234
end if
! Destroy the preconditioner and then create a new one
if (ierr == PETSC_OK) call KSPGetPC(Solver, Preconditioner, ierr)
if ( jafirst.eq.1 ) then
! do not destroy the preconditioner
jafirst = 0
else
if (ierr == PETSC_OK) call PCDestroy(Preconditioner, ierr)
end if
if (ierr == PETSC_OK) call PCCreate(DFM_COMM_DFMWORLD,Preconditioner, ierr)
! if (ierr == PETSC_OK) call PCSetOperators(Preconditioner, Amat, Amat, SAME_PRECONDITIONER,ierr)
if (ierr == PETSC_OK) call PCSetOperators(Preconditioner, Amat, Amat, DIFFERENT_NONZERO_PATTERN, ierr)
if (ierr == PETSC_OK) call KSPSetPC(Solver, Preconditioner, ierr)
! Configure the preconditioner
if ( iprecnd.ne.0 ) then
if (PreconditioningType == PCCHOLESKY .or. PreconditioningType == PCICC) then
if (ierr == PETSC_OK) call PCSetType(Preconditioner, PCASM, ierr)
if (ierr == PETSC_OK) call PCASMSetOverlap( Preconditioner, 2, ierr)
if (ierr == PETSC_OK) call KSPSetUp(Solver, ierr)
if (ierr == PETSC_OK) call PCASMGetSubKSP(Preconditioner, PETSC_NULL_INTEGER, PETSC_NULL_INTEGER, SubSolver, ierr)
if (ierr == PETSC_OK) call KSPGetPC(SubSolver, SubPrec, ierr)
if (ierr == PETSC_OK) call PCSetType(SubPrec, PreconditioningType, ierr)
else
if (ierr == PETSC_OK) call PCSetType(Preconditioner, PreconditioningType, ierr)
if (ierr == PETSC_OK) call KSPSetUp(Solver, ierr)
end if
end if
if ( ierr .ne. PETSC_OK ) then
call mess(LEVEL_ERROR, 'createPETSCPreconditioner: error')
end if
1234 continue
return
end subroutine createPETSCPreconditioner
!> compose the global matrix and solver for PETSc
!> it is assumed that the global cell numbers iglobal, dim(Ndx) are available
!> NO GLOBAL RENUMBERING, so the matrix may contain zero rows
subroutine preparePETSCsolver()
#include
use petsc
use m_reduce
! use unstruc_messages
use m_partitioninfo
use m_petsc
! use petscksp; use petscdm
implicit none
integer :: i, n, jasucces
PetscErrorCode :: ierr = PETSC_OK
PetscInt, parameter :: maxits = 4000
double precision, parameter :: RelTol = 1d-14
double precision, parameter :: AbsTol = 1d-14
double precision, parameter :: dTol = PETSC_DEFAULT_DOUBLE_PRECISION
jasucces = 0
! Restore joff with stored values
joff = joffsav
! Set ridiculous values so that it will be detected if the correct values are not
! filled in before use
adia = 123.4
aoff = 432.1
! the following will destroy joff
if (ndomains == 1) then
if (ierr == PETSC_OK) call MatCreateSeqAIJWithArrays(DFM_COMM_DFMWORLD, numrows, numrows, &
idia, jdia, adia, Amat, ierr)
else
! do i=0,ndomains-1
! if ( my_rank.eq.i ) then
! write(6,"('my_rank:', i5, ', numrows:', I5, ', numdia:', I5)") my_rank, numrows, numdia
! write(6,"('idia: ', 100000i5)") idia(1:numrows)
! write(6,"('jdia: ', 100000i5)") jdia(1:numrows)
! end if
! call flush(6)
!
! CALL MPI_BARRIER(DFM_COMM_DFMWORLD, I)
! end do
! stop
if (ierr == PETSC_OK) call MatCreateMPIAIJWithSplitArrays(DFM_COMM_DFMWORLD, numrows, numrows, &
PETSC_DETERMINE, PETSC_DETERMINE, idia, jdia, adia, ioff, joff, aoff, Amat, ierr)
end if
if (ierr == PETSC_OK) call MatAssemblyBegin(Amat,MAT_FINAL_ASSEMBLY,ierr)
if (ierr == PETSC_OK) call MatAssemblyEnd(Amat,MAT_FINAL_ASSEMBLY,ierr)
if (ierr /= PETSC_OK) print *,'conjugategradientPETSC: PETSC_ERROR (1)'
if (ierr /= PETSC_OK) go to 1234
! call writemesg('RHS and SOL vector are filled')
if (ierr == PETSC_OK) call KSPCreate(DFM_COMM_DFMWORLD, Solver, ierr)
if (ierr == PETSC_OK) call KSPSetOperators(Solver, Amat, Amat, SAME_NONZERO_PATTERN, ierr)
if (ierr == PETSC_OK) call KSPSetType(Solver, KSPCG, ierr)
! if (ierr == PETSC_OK) call KSPSetType(Solver, KSPGMRES, ierr)
if (ierr == PETSC_OK) call KSPSetInitialGuessNonzero(Solver, PETSC_TRUE, ierr)
if (ierr == PETSC_OK) call KSPSetTolerances(Solver, RelTol, AbsTol, dTol, maxits, ierr)
! Soheil: for imaginairy matrix entries use KSPCGSetType(Solver, ... )
1234 continue
end subroutine preparePETSCsolver
!> compose the global matrix and solve with PETSc
!> it is assumed that the global cell numbers iglobal, dim(Ndx) are available
!> NO GLOBAL RENUMBERING, so the matrix may contain zero rows
subroutine conjugategradientPETSC(s1,ndx,its,jacompprecond,iprecond)
!#include
use petsc
use m_reduce
! use unstruc_messages
use m_partitioninfo
use m_petsc
! use petscksp; use petscdm
use m_flowgeom, only: kfs
use MessageHandling
implicit none
integer, intent(in) :: ndx
double precision, dimension(Ndx), intent(inout) :: s1
integer, intent(out) :: its
integer, intent(in) :: jacompprecond !< compute preconditioner (1) or not (0)
integer, intent(in) :: iprecond !< preconditioner type
double precision :: rnorm ! residual norm
integer :: i, n, irank, jasucces
PetscScalar, dimension(1) :: dum
PetscOffset :: idum
integer :: merr
PetscErrorCode :: ierr = PETSC_OK
KSPConvergedReason :: Reason
character(len=100) :: message
jasucces = 0
its = 0
! fill matrix
call setPETSCmatrixEntries()
if ( jacompprecond.eq.1 ) then
! compute preconditioner
call createPETSCPreconditioner(iprecond)
end if
! fill vector rhs
if (ierr == PETSC_OK) call VecGetArray(rhs,dum,idum,ierr)
i = 0
rhs_val = 0d0
do n=nogauss+1,nogauss+nocg
ndn = noel(n)
if ( iglobal(ndn).gt.0 ) then
i = iglobal(ndn)-iglobal(rowtoelem(1))+1
! if ( kfs(ndn).ne.0 ) then
rhs_val(i) = ddr(ndn)
! else
! rhs_val(i) = 0d0
! end if
end if
end do
if (ierr == PETSC_OK) call VecRestoreArray(rhs,dum,idum,ierr)
! fill vector sol
if (ierr == PETSC_OK) call VecGetArray(sol,dum,idum,ierr)
sol_val = 0d0
do n=nogauss+1,nogauss+nocg
ndn = noel(n)
if ( iglobal(ndn).gt.0 ) then
i = iglobal(ndn)-iglobal(rowtoelem(1))+1
! if ( kfs(ndn).ne.0 ) then
sol_val(i) = s1(ndn)
! else
! sol_val(i) = 0d0
! end if
end if
end do
if ( ierr == PETSC_OK ) call VecRestoreArray(sol,dum,idum,ierr)
if ( ierr /= PETSC_OK ) print *,'conjugategradientPETSC: PETSC_ERROR (3)'
if ( ierr /= PETSC_OK ) go to 1234
! solve system
if (ierr == PETSC_OK) call KSPSolve(Solver, rhs, sol, ierr)
if (ierr == PETSC_OK) call KSPGetConvergedReason(Solver, Reason, ierr)
! check for convergence
if (ierr == PETSC_OK) then
if (reason==KSP_DIVERGED_INDEFINITE_PC) then
call mess(LEVEL_ERROR,'Divergence because of indefinite preconditioner')
else if (Reason<0) then
write(message,*) 'Other kinder of divergence: this should not happen, reason = ', Reason
call mess(LEVEL_ERROR,trim(message))
! see http://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/KSP/KSPConvergedReason.html for reason
else
call KSPGetIterationNumber(Solver, its, ierr)
! compute residual
call KSPGetResidualNorm(Solver,rnorm,ierr)
write(message,*) 'Solver converged in ',its,' iterations, res=', rnorm
if (ierr == PETSC_OK) then
if ( my_rank.eq.0 ) call mess(LEVEL_INFO, message)
end if
jasucces = 1
end if
end if
if (ierr /= PETSC_OK) call mess(LEVEL_ERROR, 'conjugategradientPETSC: PETSC_ERROR (after solve)')
if (ierr /= PETSC_OK) go to 1234
! begin debug
if (.false.) then
! equation: A*sol = rhs
! Residu is: res = A*sol-rhs
call MatMult(Amat, sol, res, ierr) ! res := Amat * sol
call mess(LEVEL_INFO,'MatMul is done')
call VecAxpy(res, -1.0d0, rhs, ierr) ! res -= rhs => res = Amat * sol - rhs
call mess(LEVEL_INFO,'VecAxpy is done')
do irank = 0, ndomains-1
if (irank == my_rank) then
do i = 1,numrows
print '(a,i6,a,i6,100(a,g15.8))','rank ',my_rank,', eq ',iglobal(i),', res= ',-res_val(i),' s1= ',sol_val(i),', rhs=',rhs_val(i)
end do
end if
if (ierr == PETSC_OK) call mpi_barrier(DFM_COMM_DFMWORLD,merr)
if (merr /= 0) ierr = PETSC_OK + 1
! call mpi_barrier(DFM_COMM_DFMWORLD,merr)
end do
if (ierr /= PETSC_OK) print *,'conjugategradientPETSC: PETSC_ERROR (4)'
stop
end if
! end debug
! fill vector sol
do n=nogauss+1,nogauss+nocg
ndn = noel(n)
if ( iglobal(ndn).gt.0 .and. kfs(ndn).gt.0 ) then
i = iglobal(ndn)-iglobal(rowtoelem(1))+1
s1(ndn) = sol_val(i)
end if
end do
! if ( its.gt.10 ) then
! call writesystem()
! call mess(LEVEL_ERROR, 'number of iterations exceeded limit')
! end if
1234 continue
! restore joff
! joff = joffsav
! clean up|
! mark fail by setting number of iterations to -999
if ( jasucces.ne.1 ) then
its = -999
call mess(LEVEL_ERROR,'conjugategradientPETSC: error')
end if
end subroutine conjugategradientPETSC
subroutine killSolverPETSC()
!#include
use petsc
use m_petsc
implicit none
PetscErrorCode :: ierr = PETSC_OK
call KSPDestroy(Solver, ierr)
end subroutine killSolverPETSC
!> write system to Matlab script
subroutine writesystem()
use m_partitioninfo
use m_flowgeom, only: kfs
use m_petsc
implicit none
integer :: i, j, irow, jcol, ifirstrow, n
integer :: iter, ierr
do iter=1,6 ! first matrix, then rhs, then rowtoelem, then iglobal (for row elements), then kfs, then construct matrix
do n=0,ndomains-1
if ( my_rank.eq.n ) then
! open file
if ( my_rank.eq.0 .and. iter.eq.1 ) then
open(1234,file='matrix.m')
else
open(1234,file='matrix.m',access='append')
end if
! write header
if ( my_rank.eq.0 ) then
if ( iter.eq.1 ) then
write(1234,"('dum = [')")
else if ( iter.eq.2 ) then
write(1234,"('rhs = [')")
else if ( iter.eq.3 ) then
write(1234,"('rowtoelem = [')")
else if ( iter.eq.4 ) then
write(1234,"('iglobal = [')")
else if ( iter.eq.5 ) then
write(1234,"('kfs = [')")
else if ( iter.eq.6 ) then
write(1234,"('N = max(dum(:,1));')")
write(1234,"('A = sparse(dum(:,1), dum(:,2), dum(:,3), N, N);')")
end if
end if
ifirstrow = iglobal(rowtoelem(1))
if ( iter.eq.1 ) then ! matrix
! write this part of the matrix to file
do i=1,numrows
irow = i+ifirstrow-1
do j=idia(i)+1,idia(i+1)
jcol = jdia(j) + ifirstrow
write(1234, "(2I7,E15.5)") irow, jcol, adia(j)
end do
do j=ioff(i)+1,ioff(i+1)
jcol = joffsav(j) + 1
write(1234, "(2I7,E15.5)") irow, jcol, aoff(j)
end do
end do
else if ( iter.eq.2 ) then ! rhs
! write this part of rhs to file
do i=1,numrows
write(1234, "(E15.5)") rhs_val(i)
end do
else if ( iter.eq.3 ) then ! rowtoelem
! write this part of rhs to file
do i=1,numrows
write(1234, "(I7)") rowtoelem(i)
end do
else if ( iter.eq.4 ) then ! iglobal
! write this part of rhs to file
do i=1,numrows
write(1234, "(I7)") iglobal(rowtoelem(i))
end do
else if ( iter.eq.5 ) then ! kfs
! write this part of rhs to file
do i=1,numrows
write(1234, "(I7)") kfs(rowtoelem(i))
end do
end if
if ( my_rank.eq.ndomains-1 ) then
! write footer
if ( iter.ne.6 ) then
write(1234,"('];')")
end if
end if
! close file
call flush(1234)
close(1234)
end if
call MPI_barrier(DFM_COMM_DFMWORLD, ierr)
end do
end do
return
end subroutine