Skip to content

Commit

Permalink
Update in-place examples to use refactorization
Browse files Browse the repository at this point in the history
  • Loading branch information
amontoison committed Jul 30, 2024
1 parent 7403269 commit 94bbef5
Show file tree
Hide file tree
Showing 3 changed files with 9 additions and 9 deletions.
6 changes: 3 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ cudss_set(solver, A_gpu)
c_cpu = rand(T, n)
c_gpu = CuVector(c_cpu)

cudss("factorization", solver, x_gpu, c_gpu)
cudss("refactorization", solver, x_gpu, c_gpu)
cudss("solve", solver, x_gpu, c_gpu)

r_gpu = c_gpu - A_gpu * x_gpu
Expand Down Expand Up @@ -115,7 +115,7 @@ cudss_set(solver, A_gpu)
C_cpu = rand(T, n, p)
C_gpu = CuMatrix(C_cpu)

cudss("factorization", solver, X_gpu, C_gpu)
cudss("refactorization", solver, X_gpu, C_gpu)
cudss("solve", solver, X_gpu, C_gpu)

R_gpu = C_gpu - ( CuSparseMatrixCSR(A_cpu) + Diagonal(d_gpu) ) * X_gpu
Expand Down Expand Up @@ -160,7 +160,7 @@ cudss_set(solver, A_gpu)
C_cpu = rand(T, n, p)
C_gpu = CuMatrix(C_cpu)

cudss("factorization", solver, X_gpu, C_gpu)
cudss("refactorization", solver, X_gpu, C_gpu)
cudss("solve", solver, X_gpu, C_gpu)

R_gpu = C_gpu - ( CuSparseMatrixCSR(A_cpu) + Diagonal(d_gpu) ) * X_gpu
Expand Down
6 changes: 3 additions & 3 deletions src/generic.jl
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ function LinearAlgebra.lu!(solver::CudssSolver{T}, A::CuSparseMatrixCSR{T,Cint};
cudss_set(solver, A)
x = CudssMatrix(T, n)
b = CudssMatrix(T, n)
cudss("factorization", solver, x, b)
cudss("refactorization", solver, x, b)
return solver
end

Expand Down Expand Up @@ -80,7 +80,7 @@ function LinearAlgebra.ldlt!(solver::CudssSolver{T}, A::CuSparseMatrixCSR{T,Cint
cudss_set(solver, A)
x = CudssMatrix(T, n)
b = CudssMatrix(T, n)
cudss("factorization", solver, x, b)
cudss("refactorization", solver, x, b)
return solver
end

Expand Down Expand Up @@ -127,7 +127,7 @@ function LinearAlgebra.cholesky!(solver::CudssSolver{T}, A::CuSparseMatrixCSR{T,
cudss_set(solver, A)
x = CudssMatrix(T, n)
b = CudssMatrix(T, n)
cudss("factorization", solver, x, b)
cudss("refactorization", solver, x, b)
return solver
end

Expand Down
6 changes: 3 additions & 3 deletions test/test_cudss.jl
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,7 @@ function cudss_execution()
c_cpu = rand(T, n)
c_gpu = CuVector(c_cpu)

cudss("factorization", solver, x_gpu, c_gpu)
cudss("refactorization", solver, x_gpu, c_gpu)
cudss("solve", solver, x_gpu, c_gpu)

r_gpu = c_gpu - A_gpu * x_gpu
Expand Down Expand Up @@ -198,7 +198,7 @@ function cudss_execution()
C_cpu = rand(T, n, p)
C_gpu = CuMatrix(C_cpu)

cudss("factorization", solver, X_gpu, C_gpu)
cudss("refactorization", solver, X_gpu, C_gpu)
cudss("solve", solver, X_gpu, C_gpu)

R_gpu = C_gpu - ( CuSparseMatrixCSR(A_cpu) + Diagonal(d_gpu) ) * X_gpu
Expand Down Expand Up @@ -243,7 +243,7 @@ function cudss_execution()
C_cpu = rand(T, n, p)
C_gpu = CuMatrix(C_cpu)

cudss("factorization", solver, X_gpu, C_gpu)
cudss("refactorization", solver, X_gpu, C_gpu)
cudss("solve", solver, X_gpu, C_gpu)

R_gpu = C_gpu - ( CuSparseMatrixCSR(A_cpu) + Diagonal(d_gpu) ) * X_gpu
Expand Down

0 comments on commit 94bbef5

Please sign in to comment.