Usage
Here we demonstrate how to use the SmoQyHankelCorrCleaner.jl package.
using SmoQyHankelCorrCleaner
using Statistics
using CairoMakie
CairoMakie.activate!(type = "svg")
As an initial example, we consider the atomic Hubbard model
\[H = U (\hat{n}_\uparrow - \tfrac{1}{2})(\hat{n}_\downarrow - \tfrac{1}{2}) - \mu (\hat{n}_\uparrow + \hat{n}_\downarrow)\]
with the correspoding imaginary-time Green's function given by
\[G_\sigma(\tau) = \langle \hat{c}_\sigma^{\dagger}(\tau) \hat{c}_\sigma^{\phantom\dagger}(0) \rangle = \frac{e^{\tau U/2 + \tau \mu - \beta U/4} + e^{-\tau U/2 + \tau \mu + \beta U/4 + \beta \mu}}{e^{-\beta U/4} + 2 e^{\beta (U/4+\mu)} + e^{-\beta(U/4-2\mu)}},\]
where $\tau \in [0,\beta)$ and $\beta = 1/T$ is the inverse temperature.
# Evaluate G(τ) for the atomic Hubbard model
function atomic_hubbard_greens(τ, β, U, μ)
Z = exp(-β*U/4) + 2*exp(β*(U/4+μ)) + exp(-β*(U/4-2*μ))
ZGτ = exp(τ*U/2 + τ*μ - β*U/4) + exp(-τ*U/2 + τ*μ + β*U/4 + β*μ)
Gτ = ZGτ / Z
return Gτ
end
atomic_hubbard_greens (generic function with 1 method)
Now we assign parameter values in our model, including the inverse temperature.
# Hubbard repulsion
U = 5.0
# Chemical potential
μ = 2.50
# Inverse temperature
β = 5.0;
Next we evaulate the Green's funciton on a regular imaginary-time $\tau$ grid.
# Discretization interval
Δτ = 0.05
# Imaginary-time values
τ = collect(range(start = 0.0, stop = β, step = Δτ))
# Evaluate G(τ)
Gτ_exact = atomic_hubbard_greens.(τ, β, U, μ)
101-element Vector{Float64}:
0.33333333333641946
0.33333333333773435
0.3333333333394226
0.33333333334159043
0.3333333333443739
0.33333333334794807
0.33333333335253734
0.3333333333584301
0.3333333333659964
0.3333333333757119
⋮
0.37844509441045227
0.39125798114833704
0.4077100533809225
0.42883493228474484
0.45595981372170324
0.49078885091139945
0.535510219901732
0.59293359435439
0.6666666666635803
Now let us define some synthetic noisy data.
# Add normal random noise to Green's function
Gτ_noisy = Gτ_exact + 0.007 * randn(length(τ))
# Reduce noise for G(τ = 0) point
Gτ_noisy[1] = Gτ_exact[1] + 0.002 * randn()
# Ensure that noisy data still satisty the sum rule G(τ=0) + G(τ=β) = 1
Gτ_noisy[end] = 1 - Gτ_noisy[1]
Gτ_noisy
101-element Vector{Float64}:
0.3328437848362903
0.3511753573191441
0.3224449043641833
0.3357976243344041
0.3268270336298307
0.33689623308347116
0.34026614687167045
0.32941654939152104
0.3361118663724648
0.3470628034459933
⋮
0.36319990635911
0.389117410956535
0.4120310276619907
0.4265760419646532
0.45748217943270286
0.49696175601175097
0.5376408093721259
0.5916933659374001
0.6671562151637097
Now let us denoise the noisy Green's function using the hankel_correlation_cleaner
method.
# Denoise imaginary-time Green's function
Gτ_clean, iter, err = hankel_correlation_cleaner(
Gτ_noisy,
maxiter = 100,
tol = 1e-3,
positive_curvature = true,
fixed_endpoints = true,
symmetric = false,
verbose = false
)
(iter, err)
(44, 0.0009804840099894093)
Now let us plot the result to see how this all looks.
fig = Figure(
size = (700, 700),
fonts = (; regular= "CMU Serif"),
figure_padding = 10
)
ax = Axis(fig[1, 1],
aspect = 1,
xlabel = L"\tau",
ylabel = L"G_\sigma(\tau)",
xlabelsize = 36,
ylabelsize = 36,
xticklabelsize = 30,
yticklabelsize = 30,
)
xlims!(ax, 0.0, β)
scatter!(
τ, Gτ_noisy,
color = :blue, markersize = 9, label = "Noisy"
)
lines!(
τ, Gτ_exact,
linewidth = 7, alpha = 1.0, color = :black, linestyle = :dash, label = "Exact"
)
lines!(
τ, Gτ_clean,
linewidth = 4, color = :red, linestyle = :solid, label = "Denoised"
)
axislegend(ax, halign = :left, valign = :top, labelsize = 34)
fig
Often noisy imaginary-time correlation data is generated by a quantum Monte Carlo simulation, or some other stochastic process, in which you have a set of samples or bins. In this case it is desirable to propagate errors through the denoising process to get a final error for the denoised correlation function. Below we will demonstrate two ways of doing this, one using the jackknife method and the other via bootstrap resampling.
To start let us generated binned noisy synthetic $G(\tau)$ data.
# Number of samples
N_bins = 32
# Generate synthetic binned data
Gτ_binned = hcat((Gτ_exact for b in 1:N_bins)...) + 0.08 * randn(length(τ), N_bins)
# Reduce noise for G(τ = 0)
@. Gτ_binned[1,:] = Gτ_exact[1] + 0.02 * randn()
# Ensure that noisy data still satisty the sum rule G(τ=0) + G(τ=β) = 1
@. Gτ_binned[end,:] = 1.0 - Gτ_binned[1,:]
Gτ_binned
101×32 Matrix{Float64}:
0.338524 0.339408 0.377109 0.350673 … 0.29762 0.34056 0.352436
0.369411 0.320603 0.417601 0.326273 0.271156 0.363993 0.428966
0.254064 0.444613 0.328821 0.388379 0.467306 0.189269 0.282214
0.267648 0.179984 0.366535 0.300279 0.324818 0.288644 0.259785
0.410863 0.468305 0.207458 0.459637 0.499573 0.408853 0.439395
0.608959 0.363517 0.318438 0.391369 … 0.257818 0.312294 0.269735
0.320748 0.458357 0.381127 0.327492 0.327503 0.308169 0.330515
0.25566 0.407335 0.352726 0.338186 0.322658 0.314449 0.307015
0.413345 0.154728 0.301967 0.4537 0.436066 0.297201 0.333189
0.469105 0.189427 0.332555 0.22911 0.389869 0.186064 0.384488
⋮ ⋱ ⋮
0.4036 0.375991 0.264566 0.282571 0.467744 0.34927 0.412087
0.360877 0.429851 0.468628 0.381673 0.358528 0.293567 0.424776
0.372212 0.34157 0.350242 0.451367 0.598485 0.422945 0.500402
0.49906 0.391639 0.410659 0.341979 … 0.314345 0.539132 0.4355
0.518648 0.293182 0.354095 0.489292 0.452409 0.330849 0.407106
0.55338 0.491522 0.459545 0.410798 0.539289 0.465082 0.480209
0.652357 0.562099 0.478753 0.60214 0.662534 0.703315 0.515136
0.547533 0.741507 0.542771 0.617437 0.744496 0.607858 0.402856
0.661476 0.660592 0.622891 0.649327 … 0.70238 0.65944 0.647564
Before we demonstrate how to propagate errors through the denoising process, let us first calculate mean and error of our binned $G(\tau)$ data first. Having calculated the mean and error of our synthetic binned $G(\tau)$ data, let us also plot it really quickly as a reference.
# Calculate mean of binned data
Gτ_avg = vec(mean(Gτ_binned, dims=2))
# Calculate standard deviation of the mean of binned data
Gτ_err = vec(std(Gτ_binned, dims=2)) / sqrt(N_bins)
# Get the average plus or minus the error
Gτ_avg_lower = Gτ_avg - Gτ_err
Gτ_avg_upper = Gτ_avg + Gτ_err;
# Now we plot the average with the one standard deviation confidence interval
fig = Figure(
size = (700, 700),
fonts = (; regular= "CMU Serif"),
figure_padding = 10
)
ax = Axis(fig[1, 1],
aspect = 1,
xlabel = L"\tau",
ylabel = L"G_\sigma(\tau)",
xlabelsize = 36,
ylabelsize = 36,
xticklabelsize = 30,
yticklabelsize = 30,
)
b_err = band!(
τ, Gτ_avg_lower, Gτ_avg_upper,
color = (:red, 0.2)
)
translate!(b_err, 0, 0, 0.0)
l_err = lines!(
τ, Gτ_exact,
linewidth = 3, alpha = 1.0, color = :black, linestyle = :solid, label = "Exact"
)
translate!(l_err, 0, 0, 2.0)
l_avg = lines!(
τ, Gτ_avg,
linewidth = 3, color = :red, linestyle = :solid, label = "Mean"
)
translate!(l_avg, 0, 0, 1.0)
l_lower = lines!(
τ, Gτ_avg_lower,
linewidth = 2, alpha = 0.6, color = :red, linestyle = :solid
)
translate!(l_lower, 0, 0, 1.0)
l_upper = lines!(
τ, Gτ_avg_upper,
linewidth = 2, alpha = 0.6, color = :red, linestyle = :solid
)
translate!(l_upper, 0, 0, 1.0)
xlims!(ax, 0.0, β)
axislegend(ax, halign = :left, valign = :top, labelsize = 34)
fig
Now let us denoise the binned $G(\tau)$ while propagating the errors using the jackknife_hankel_correlation_cleaner
method.
# Denoise and propagate errors with jackknife
Gτ_jackknife_avg, Gτ_jackknife_err, Gτ_jackknife_cov = jackknife_hankel_correlation_cleaner(
correlation_bins = Gτ_binned,
sign_bins = ones(N_bins),
maxiter = 100,
tol= 1e-3,
positive_curvature = true,
fixed_endpoints = true,
symmetric = false,
covariance = true
)
# Get the average plus or minus the error
Gτ_jackknife_avg_lower = Gτ_jackknife_avg - Gτ_jackknife_err
Gτ_jackknife_avg_upper = Gτ_jackknife_avg + Gτ_jackknife_err;
# Now we plot the average with the one standard deviation confidence interval
fig = Figure(
size = (700, 700),
fonts = (; regular= "CMU Serif"),
figure_padding = 10
)
ax = Axis(fig[1, 1],
aspect = 1,
xlabel = L"\tau",
ylabel = L"G_\sigma(\tau)",
xlabelsize = 36,
ylabelsize = 36,
xticklabelsize = 30,
yticklabelsize = 30,
)
b_err = band!(
τ, Gτ_jackknife_avg_lower, Gτ_jackknife_avg_upper,
color = (:red, 0.2)
)
translate!(b_err, 0, 0, 0.0)
l_err = lines!(
τ, Gτ_exact,
linewidth = 3, alpha = 1.0, color = :black, linestyle = :solid, label = "Exact"
)
translate!(l_err, 0, 0, 2.0)
l_avg = lines!(
τ, Gτ_jackknife_avg,
linewidth = 3, color = :red, linestyle = :solid, label = "Denoised Jackknife Mean"
)
translate!(l_avg, 0, 0, 1.0)
l_lower = lines!(
τ, Gτ_jackknife_avg_lower,
linewidth = 2, alpha = 0.6, color = :red, linestyle = :solid
)
translate!(l_lower, 0, 0, 1.0)
l_upper = lines!(
τ, Gτ_jackknife_avg_upper,
linewidth = 2, alpha = 0.6, color = :red, linestyle = :solid
)
translate!(l_upper, 0, 0, 1.0)
xlims!(ax, 0.0, β)
axislegend(ax, halign = :left, valign = :top, labelsize = 34)
fig
Lastly, let us denoise the binned $G(\tau)$ but instead propagate error using the bootstrap_hankel_correlation_cleaner
method.
# Denoise and propagate errors with bootstrap resampling
Gτ_bootstrap_avg, Gτ_bootstrap_err, Gτ_bootstrap_cov = bootstrap_hankel_correlation_cleaner(
correlation_bins = Gτ_binned,
sign_bins = ones(N_bins),
N_bootstrap = 100,
maxiter = 100,
tol= 1e-3,
positive_curvature = true,
fixed_endpoints = true,
symmetric = false,
covariance = true
)
# Get the average plus or minus the error
Gτ_bootstrap_avg_lower = Gτ_bootstrap_avg - Gτ_bootstrap_err
Gτ_bootstrap_avg_upper = Gτ_bootstrap_avg + Gτ_bootstrap_err;
# Now we plot the average with the one standard deviation confidence interval
fig = Figure(
size = (700, 700),
fonts = (; regular= "CMU Serif"),
figure_padding = 10
)
ax = Axis(fig[1, 1],
aspect = 1,
xlabel = L"\tau",
ylabel = L"G_\sigma(\tau)",
xlabelsize = 36,
ylabelsize = 36,
xticklabelsize = 30,
yticklabelsize = 30,
)
b_err = band!(
τ, Gτ_bootstrap_avg_lower, Gτ_bootstrap_avg_upper,
color = (:red, 0.2)
)
translate!(b_err, 0, 0, 0.0)
l_err = lines!(
τ, Gτ_exact,
linewidth = 3, alpha = 1.0, color = :black, linestyle = :solid, label = "Exact"
)
translate!(l_err, 0, 0, 2.0)
l_avg = lines!(
τ, Gτ_bootstrap_avg,
linewidth = 3, color = :red, linestyle = :solid, label = "Denoised Bootstrap Mean"
)
translate!(l_avg, 0, 0, 1.0)
l_lower = lines!(
τ, Gτ_bootstrap_avg_lower,
linewidth = 2, alpha = 0.6, color = :red, linestyle = :solid
)
translate!(l_lower, 0, 0, 1.0)
l_upper = lines!(
τ, Gτ_bootstrap_avg_upper,
linewidth = 2, alpha = 0.6, color = :red, linestyle = :solid
)
translate!(l_upper, 0, 0, 1.0)
xlims!(ax, 0.0, β)
axislegend(ax, halign = :left, valign = :top, labelsize = 34)
fig