using LinearAlgebra
using PyPlot
len = 300
x = y = range(-2pi,stop=2pi,length=len)
uh = [sin(xx^2)+cos(yy)^2 for xx in x, yy in y]
fh = [sin(xx)+cos(yy) for xx in x, yy in y]
function solve_poisson!(uh, fh, iter=1000)
@inbounds begin
n = LinearAlgebra.checksquare(uh)
II = LinearIndices((n, n))
for _ in 1:iter, ii in 2:n-1, jj in 2:n-1
uh[II[ii,jj]] = ( fh[II[ii,jj]] + uh[II[ii-1,jj]] + uh[II[ii+1,jj]] + uh[II[ii,jj-1]] + uh[II[ii,jj+1]] )/4
end
return uh
end
end
plt[:subplot](2, 1, 1)
contour(x, y, uh)
plt[:title]("Initial Condition")
solve_poisson!(uh, fh)
plt[:subplot](2, 1, 2)
contour(x, y, uh)
plt[:title]("After 1000 Gauss-Seidel Iterations")
plt[:savefig]("Poisson.png", dpi=600)
我就不说怎么优化Jacobi迭代了,它收敛比Gauss-Seidel迭代慢。你要是能优化Gauss-Seidel迭代,就没必要牵挂Jacobi迭代了。而且简单情况下,加权Jacobi迭代还用Gauss-Seidel的两倍内存。你从下面的例子就能看出来Jacobi迭代远不如Gauss-Seidel迭代。当然,数学上你也可以证明它更慢。如果你好奇怎么证明的话,你可以看看 A. Ralston and P. Rabinowitz, A First Course in Numerical Analysis , 2nd edition, McGraw-Hill, New York, 1978. 的445页,Theorem 9.3。看不懂的话可以问我。
using LinearAlgebra
using PyPlot
len = 300
x = y = range(-2pi,stop=2pi,length=len)
uh = [sin(xx^2)+cos(yy)^2 for xx in x, yy in y]
fh = [sin(xx)+cos(yy) for xx in x, yy in y]
function solve_poisson(uh, fh, iter=1000, weight=0.5)
@inbounds begin
n = LinearAlgebra.checksquare(uh)
uhJ = zero(uh)
II = LinearIndices((n, n))
for _ in 1:iter, ii in 2:n-1, jj in 2:n-1
uhJ[II[ii,jj]] = ( fh[II[ii,jj]] + uh[II[ii-1,jj]] + uh[II[ii+1,jj]] + uh[II[ii,jj-1]] + uh[II[ii,jj+1]] )/4
end
for ii in eachindex(uhJ)
uhJ[ii] = weight*uh[ii] + (1-weight)*uhJ[ii]
end
return uhJ
end
end
plt[:subplot](2, 1, 1)
contour(x, y, uh)
plt[:title]("Initial Condition")
uhJ = solve_poisson(uh, fh)
plt[:subplot](2, 1, 2)
contour(x, y, uhJ)
plt[:title]("After 1000 weighted Jacobi Iterations")
plt[:savefig]("Poisson.png", dpi=400)