From db45ac0be0ae7ad9891156fe0a3e4c71e7d2d9d7 Mon Sep 17 00:00:00 2001 From: Yuning Date: Thu, 4 Apr 2024 23:47:42 +0200 Subject: [PATCH] performance optimize: covariance calculation --- src/stochastics.jl | 23 +++++-- test/teststochastics.jl | 130 ++++++++++++++++++++-------------------- 2 files changed, 84 insertions(+), 69 deletions(-) diff --git a/src/stochastics.jl b/src/stochastics.jl index 2b232b2..d22f1ba 100644 --- a/src/stochastics.jl +++ b/src/stochastics.jl @@ -107,6 +107,11 @@ function covariance(p₁::Vector{<:Real}, p₂::Vector{<:Real}, process::Ornstei process.σ^2 / prod(2 * process.θ) * exp(-dot(process.θ, abs.(p₁ - p₂))) end +function covariance(p₁::Tuple, p₂::Tuple, process::OrnsteinUhlenbeckField)::Real + process.σ^2 / 4*prod( process.θ) * exp(-dot(process.θ, abs.(p₁ .- p₂))) +end + + """ Covariance function of Pink-Brownian process. """ @@ -133,11 +138,10 @@ When `P₁!=P₂`, it is the cross-covariance matrix between two Gaussian random function covariancematrix(P₁::Matrix{<:Real}, P₂::Matrix{<:Real}, process::GaussianRandomField)::Matrix{Real} @assert size(P₁) == size(P₂) N = size(P₁, 1) - P₁ = P₁'; P₂ = P₂'; A = Matrix{Real}(undef, N, N) Threads.@threads for i in 1:N for j in 1:N - A[i, j] = covariance(P₁[:, i], P₂[:, j], process) + A[i, j] = covariance(P₁[i, :], P₂[j, :], process) end end return A @@ -148,11 +152,22 @@ Auto-Covariance matrix of a Gaussian random process. """ function covariancematrix(P::Matrix{<:Real}, process::GaussianRandomField)::Symmetric N = size(P, 1) - P = P' A = Matrix{Real}(undef, N, N) Threads.@threads for i in 1:N for j in i:N - A[i, j] = covariance(P[:, i], P[:, j], process) + A[i, j] = covariance(P[i,:], P[j, :], process) + end + end + return Symmetric(A) +end + + +function covariancematrix(P::Vector{<:Tuple{<:Real,<:Real}}, process::GaussianRandomField)::Symmetric + N = size(P, 1) + A = Matrix{Real}(undef, N, N) + Threads.@threads for i in 1:N + for j in i:N + A[i, j] = covariance(P[i], P[j], process) end end return Symmetric(A) diff --git a/test/teststochastics.jl b/test/teststochastics.jl index 2f48149..2eb1320 100644 --- a/test/teststochastics.jl +++ b/test/teststochastics.jl @@ -7,77 +7,77 @@ using Statistics: std, mean figsize=(400, 300) visualize=false -## -# @testset begin "test of random function" -# T=400; L=10; σ = sqrt(2) / 20; M = 10000; N=11; κₜ=1/20;κₓ=1/0.1; -# v=L/T; -# t=range(0, T, N) -# P=hcat(t, v.*t) -# B=OrnsteinUhlenbeckField(0,[κₜ,κₓ],σ) -# R=RandomFunction(P , B) -# @test R() isa Vector{<:Real} -# @test R.Σ isa Symmetric - -# t₀=T/5 - - -# P1=hcat(t, v.*t); P2=hcat(t, v.*(t.-t₀)) -# crosscov=covariancematrix(P1, P2, B) +# +@testset begin "test of random function" + T=400; L=10; σ = sqrt(2) / 20; M = 10000; N=11; κₜ=1/20;κₓ=1/0.1; + v=L/T; + t=range(0, T, N) + P=hcat(t, v.*t) + B=OrnsteinUhlenbeckField(0,[κₜ,κₓ],σ) + R=RandomFunction(P , B) + @test R() isa Vector{<:Real} + @test R.Σ isa Symmetric + + t₀=T/5 + + + P1=hcat(t, v.*t); P2=hcat(t, v.*(t.-t₀)) + crosscov=covariancematrix(P1, P2, B) + + if visualize + display(heatmap(collect(R.Σ), title="covariance matrix, test fig 1", size=figsize)) + display(plot([R(),R(),R()],title="random function, test fig 2", size=figsize)) + display(heatmap(crosscov, title="cross covariance matrix, test fig 3", size=figsize)) + end -# if visualize -# display(heatmap(collect(R.Σ), title="covariance matrix, test fig 1", size=figsize)) -# display(plot([R(),R(),R()],title="random function, test fig 2", size=figsize)) -# display(heatmap(crosscov, title="cross covariance matrix, test fig 3", size=figsize)) -# end + @test transpose(crosscov) == covariancematrix(P2, P1, B) -# @test transpose(crosscov) == covariancematrix(P2, P1, B) + P=vcat(P1,P2) + R=RandomFunction(P,B) + c=[1,1] + RΣ=sum(c'*c .* covariancepartition(R, 2)) + @test size(RΣ) == (N,N) -# P=vcat(P1,P2) -# R=RandomFunction(P,B) -# c=[1,1] -# RΣ=sum(c'*c .* covariancepartition(R, 2)) -# @test size(RΣ) == (N,N) + @test issymmetric(RΣ) + @test ishermitian(RΣ) -# @test issymmetric(RΣ) -# @test ishermitian(RΣ) + RC=CompositeRandomFunction(R, c) + if visualize + display(heatmap(sqrt.(RΣ))) + end +end -# RC=CompositeRandomFunction(R, c) -# if visualize -# display(heatmap(sqrt.(RΣ))) -# end -# end +# +@testset "trapezoid vs simpson for covariance matrix" begin + L=10; σ = sqrt(2) / 20; N=501; κₜ=1/20;κₓ=1/0.1; + v=20; + B=OrnsteinUhlenbeckField(0,[κₜ,κₓ],σ) -## -# @testset "trapezoid vs simpson for covariance matrix" begin -# L=10; σ = sqrt(2) / 20; N=501; κₜ=1/20;κₓ=1/0.1; -# v=20; -# B=OrnsteinUhlenbeckField(0,[κₜ,κₓ],σ) - -# M=30 -# err1=zeros(M) -# err2=zeros(M) -# i=1 -# for T in range(1, 50, length=M) -# t=range(0, T, N) -# P=hcat(t, v.*t) -# R=RandomFunction(P , B) -# dt=T/N -# f1=exp(-integrate(R.Σ[:,:], dt, dt, method=:trapezoid)/2) -# f2=exp(-integrate(R.Σ[:,:], dt, dt, method=:simpson)/2) -# @test isapprox(f1,f2,rtol=1e-2) -# f3=φ(T,L,B) -# err1[i]=abs(f1-f3) -# err2[i]=abs(f2-f3) -# i+=1 -# end -# println("mean 1st order:", mean(err1)) -# println("mean 2nd order:", mean(err2)) -# if visualize -# fig=plot(err1, xlabel="T", ylabel="error", label="trapezoid") -# plot!(err2, label="simpson") -# display(fig) -# end -# end + M=30 + err1=zeros(M) + err2=zeros(M) + i=1 + for T in range(1, 50, length=M) + t=range(0, T, N) + P=hcat(t, v.*t) + R=RandomFunction(P , B) + dt=T/N + f1=exp(-integrate(R.Σ[:,:], dt, dt, method=:trapezoid)/2) + f2=exp(-integrate(R.Σ[:,:], dt, dt, method=:simpson)/2) + @test isapprox(f1,f2,rtol=1e-2) + f3=φ(T,L,B) + err1[i]=abs(f1-f3) + err2[i]=abs(f2-f3) + i+=1 + end + println("mean 1st order:", mean(err1)) + println("mean 2nd order:", mean(err2)) + if visualize + fig=plot(err1, xlabel="T", ylabel="error", label="trapezoid") + plot!(err2, label="simpson") + display(fig) + end +end ## @testset "symmetric integration for covariance matrix" begin