Skip to content

Commit a74322d

Browse files
authored
Merge pull request #5 from fverdugo/bugfix_parallel_ksp
Fixing bug in MPI-based KSP solver
2 parents 8fddd94 + b3cd2de commit a74322d

File tree

1 file changed

+1
-3
lines changed

1 file changed

+1
-3
lines changed

src/ksp.jl

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -271,8 +271,6 @@ end
271271

272272
function VecCreateMPIWithArray_args(v::PVector,petsc_comm,block_size=1)
273273
@assert isa(partition(v),MPIArray)
274-
# TODO not asserted assumptions:
275-
# Assumes that global ids are ordered and that the vector is assembled
276274
rows = axes(v,1)
277275
N = length(rows)
278276
function setup(v_own)
@@ -330,7 +328,7 @@ function ksp_setup_parallel_impl(::MPIArray,x,A,b;
330328
@check_error_code MatAssemblyEnd(mat[],MAT_FINAL_ASSEMBLY)
331329
@check_error_code VecCreateMPIWithArray(args_b...,vec_b)
332330
@check_error_code VecCreateMPIWithArray(args_x...,vec_x)
333-
@check_error_code KSPCreate(MPI.COMM_SELF,ksp)
331+
@check_error_code KSPCreate(petsc_comm,ksp)
334332
@check_error_code KSPSetOperators(ksp[],mat[],mat[])
335333
low_level_setup(ksp)
336334
@check_error_code KSPSetUp(ksp[])

0 commit comments

Comments
 (0)