using Distributions, Random, Printf
Random.seed!(123)
= randn(100)
data
= Normal(-3,0.5)
dist1 = Normal(-3,4)
dist2
= mean(logpdf.(dist1,data))
ll1 = mean(logpdf.(dist2,data))
ll2
= collect(-8:0.1:8)
line
= plot(line, pdf.(dist1,line), label="Model density", size=(1000,500),
p1 =(-0.02,1), title=@sprintf "Model LogLikelihood: %.3f" ll1)
ylimplot!(p1, line,pdf.(Normal(),line),color=:red, label="True density")
scatter!(p1, data, zeros(100), label="Sampled data")
vline!(p1, [-3], color=1, linestyle=:dash, lw=2, label = "Model distribution mean")
= plot(line, pdf.(dist2,line), label="Model density", size=(1000,500),
p2 =(-0.02,1), title=@sprintf "Model LogLikelihood: %.3f" ll2)
ylimplot!(p2, line,pdf.(Normal(),line),color=:red, label="True density")
scatter!(p2, data, zeros(100), label="Sampled data")
vline!(p2, [-3], color=1, linestyle=:dash, lw=2, label = "Model distribution mean")
plot(p1,p2, size=(1000,500), fmt=:png)
GARCH with varying coefficients
ll1
-19.364897862569727
ll2
-2.6042814335421416
using Plots, CSV, DataFrames
#https://de.finance.yahoo.com/quote/%5EGDAXI/history?period1=1515801600&period2=1673568000&interval=1d&filter=history&frequency=1d&includeAdjustedClose=true
= CSV.File("GDAXI.csv") |> DataFrame
df
= df[!,["Adj Close", "Date"]]
a_close_raw = a_close_raw[findall(a_close_raw[!,"Adj Close"].!= "null"),:]
a_close_nonull = parse.(Float32, a_close_nonull[!,"Adj Close"])
a_close
= diff(log.(a_close))
returns
= returns[1:end-99]
train = a_close_nonull[!,"Date"][2:end-99]
train_idx
= returns[end-99:end]
test = a_close_nonull[!,"Date"][end-99:end]
test_idx
plot(train_idx,train, label="^GDAXI daily returns - Train", size = (1200,600), fmt=:png)
plot!(test_idx, test, label="^GDAXI daily returns - Test")
using Flux, Distributions
struct VarCoeffGARCH
::Vector{Float32}
constant::Chain
net
::Vector{Float32}
x0end
@functor VarCoeffGARCH
Flux.
VarCoeffGARCH(net::Chain) = VarCoeffGARCH([-9], net, [0.0])
function garch_mean_ll(m::VarCoeffGARCH, y::Vector{Float32})::Float32
= garch_forward(m,y)
sigmas, _ = Normal.(0, sigmas)
conditional_dists
return mean(logpdf.(conditional_dists, y))
end
#Use functional implementation to calculate conditional stddev.
#Then, we don't need to store stddev_t to calculate stddev_t+1
#and thus avoid mutation, which doesn't work with Zygote
#(could use Zygote.Buffer, but it's often discouraged)
function garch_forward(m::VarCoeffGARCH, y::Vector{Float32})
= m(m.x0[1], sqrt(softplus(m.constant[1])))
sigma_1, params_1
= garch_forward_recurse(m, sigma_1, y, 1)
sigma_rec, params_rec
= vcat(sigma_1, sigma_rec)
sigmas_result = hcat(params_1, params_rec)
params_result
return sigmas_result, params_result
end
function garch_forward_recurse(m::VarCoeffGARCH, sigma_tm1::Float32, y::Vector{Float32}, t::Int64)
= m(y[t], sigma_tm1)
sigma_t, params_t
if t==length(y)-1
return sigma_t, params_t
end
= garch_forward_recurse(m, sigma_t, y, t+1)
sigma_rec, params_rec
= vcat(sigma_t, sigma_rec)
sigmas_result = hcat(params_t, params_rec)
params_result
return sigmas_result, params_result
end
function (m::VarCoeffGARCH)(y::Float32, sigma::Float32)
= vcat(y, sigma)
input_vec
= m.net(input_vec)
params = get_garch_stable_params(params) #to ensure stationarity of the resulting GARCH process
params_stable
return sqrt(softplus(m.constant[1]) + sum(input_vec.^2 .* params_stable)), params_stable
end
#transform both parameters to be >0 each and their sum to be <1
get_garch_stable_params(x::Vector{Float32}) = vcat(σ(x[1]), (1-σ(x[1]))*σ(x[2]))
get_garch_stable_params (generic function with 1 method)
using Random, Zygote
Random.seed!(123)
= VarCoeffGARCH(Chain(Dense(2,2,softplus), Dense(2,2,softplus), Dense(2,2)))
model = Flux.params(model)
params
= ADAM(0.05)
opt
for i in 1:500
= Zygote.gradient(()->-garch_mean_ll(model, train), params)
grads update!(opt,params,grads)
Flux.Optimise.
if i%50==0
println(garch_mean_ll(model,train))
end
end
3.038939
3.0580537
3.0621195
3.0658002
3.069259
3.0699775
3.0751026
3.0757551
3.0642645
3.080713
= garch_forward(model,train)
sigmas, params
= quantile.(Normal.(0,sigmas),0.05)
lower = quantile.(Normal.(0,sigmas),0.95)
upper
plot(train_idx, train, label="^GDAXI daily returns", size = (1200,600), title="In-Sample predictions", fmt=:png)
plot!(train_idx, zeros(length(lower)), ribbon=(upper,-lower),label = "90% CI")
function garch_forward_sample(m::VarCoeffGARCH, sigma_tm1::Float32, y_tm1::Float32, t::Int64, T::Int64=100)
= m(y_tm1, sigma_tm1)
sigma_t, params_t = randn(Float32)*sigma_t
sample_t
if t==T
return sigma_t, sample_t, params_t
end
= garch_forward_sample(m, sigma_t, sample_t, t+1, T)
sigma_rec, sample_rec, params_rec
= vcat(sigma_t, sigma_rec)
sigmas_result = vcat(sample_t, sample_rec)
sample_result = vcat(params_t, params_rec)
params_result
return sigmas_result, sample_result, params_result
end
Random.seed!(123)
= [garch_forward_sample(model, sigmas[end], train[end], 1) for _ in 1:25000]
mc_simulation
= hcat(map(x->x[1], mc_simulation)...)
sigma_sample = hcat(map(x->x[2], mc_simulation)...)
y_forecast_sample = hcat(map(x->x[3], mc_simulation)...)
params1_sample = hcat(map(x->x[3], mc_simulation)...)
params2_sample
= mean(y_forecast_sample,dims=2)[:]
y_forecast_mean = mapslices(x->quantile(x,0.05), y_forecast_sample, dims=2)[:]
y_forecast_lower = mapslices(x->quantile(x,0.95), y_forecast_sample, dims=2)[:]
y_forecast_upper
plot(test[1:100], size = (1200,600), title = "100 steps ahead forecast", label="Test set", fmt=:png)
plot!(y_forecast_mean, ribbon = (y_forecast_upper.-y_forecast_mean, y_forecast_mean.-y_forecast_lower), label="Forecast and 90% CI")
using ARCHModels
= fit(GARCH{1,1}, train)
garch_model = fit(GARCH{1,1}, train[1:end-1]) #to get latent variance of final training observation
garch_model_dummy
Random.seed!(123)
= predict(garch_model_dummy, :variance, 1)
var_T = train[end]
y_T
= garch_model.spec.coefs
garch_coefs = garch_model.meanspec.coefs[1]
mean_coef
= zeros(100,25000)
garch_sigma_sample = zeros(100,25000)
garch_forecast_sample
for i in 1:25000
= sqrt(garch_coefs[1] + garch_coefs[2]*var_T + garch_coefs[3]*(y_T-mean_coef)^2)
sigma_1 1,i] = sigma_1
garch_sigma_sample[
= randn()*sigma_1+mean_coef
forecast_sample 1,i] = forecast_sample
garch_forecast_sample[
for t in 2:100
= garch_sigma_sample[t-1,i]^2
var_tm1 = (garch_forecast_sample[t-1,i]-mean_coef)^2
eps_tm1
= sqrt(garch_coefs[1] + garch_coefs[2]*var_tm1 + garch_coefs[3]*eps_tm1)
sigma_t = sigma_t
garch_sigma_sample[t,i]
= randn()*sigma_t+mean_coef
forecast_sample = forecast_sample
garch_forecast_sample[t,i] end
end
= mean(garch_forecast_sample,dims=2)[:]
garch_forecast_mean = mapslices(x->quantile(x,0.05), garch_forecast_sample, dims=2)[:]
garch_forecast_lower = mapslices(x->quantile(x,0.95), garch_forecast_sample, dims=2)[:]
garch_forecast_upper
plot(test[1:100], size = (1200,600), title = "100 steps ahead forecast", label="Test set", fmt=:png)
plot!(y_forecast_mean, ribbon = (y_forecast_upper.-y_forecast_mean, y_forecast_mean.-y_forecast_lower), label="VarCoef GARCH forecast")
plot!(garch_forecast_mean,
= (garch_forecast_upper.-garch_forecast_mean, garch_forecast_mean.-garch_forecast_lower),
ribbon ="Standard GARCH forecast", alpha=0.5) label
using KernelDensity
= mean([log(pdf(kde(y_forecast_sample[t,:]),test[t])) for t in 1:100])
var_coef_ll = mean([log(pdf(kde(garch_forecast_sample[t,:]),test[t])) for t in 1:100])
standard_ll
println(var_coef_ll)
println(standard_ll)
3.012266797283187
3.0003596946242705
using LaTeXStrings
= plot(title = "Varying coefficient GARCH: "*L"\sigma^2_t=\omega + \alpha^{NN}(y_{t-1},\sigma_{t-1})y^2_{t-1}+\beta^{NN}(y_{t-1},\sigma_{t-1})σ^2_{t-1}", grid = false, showaxis = false)
title
= scatter(train[1:end-1], params[1,2:end], label=:none, guidefontsize=15)
p1 xlabel!(p1,L"y_{t-1}")
ylabel!(p1,L"\alpha_t")
hline!([garch_coefs[3]], color=:red, label="Parameter in GARCH model")
= scatter(train[1:end-1], params[2,2:end], label=:none, guidefontsize=15)
p2 xlabel!(p2,L"y_{t-1}")
ylabel!(p2,L"\beta_t")
hline!([garch_coefs[2]], color=:red, label="Parameter in GARCH model")
plot(title, p1, p2, layout = @layout([A{0.01h}; [B C]]), size = (1200,600), left_margin=10*Plots.mm, bottom_margin=5*Plots.mm,fmt=:png)