diff --git a/benchmarks/runbenchmarks.jl b/benchmarks/runbenchmarks.jl index 9edf516..7284b30 100644 --- a/benchmarks/runbenchmarks.jl +++ b/benchmarks/runbenchmarks.jl @@ -23,7 +23,9 @@ for est_t in [MCMCEstimator, LSQEstimator, LinearApproxEstimator] xs = rand($nsamplesdict[$size]) n = length(xs) corrnoise = let n = 2*n - 1/10 * I(n) * MvNormal(zeros(n), I(n) + 1/5*hermitianpart(rand(n, n))) + Σ = zeros(n, n) + while !isposdef(Σ); Σ .= I(n) + 1/5*hermitianpart(rand(n, n)); end + 1/10 * I(n) * MvNormal(zeros(n), Σ) end noisemodel = CorrGaussianNoiseModel(corrnoise) ysmeas = maybeflatten($f.(xs, [$θtrue])) .+ rand(corrnoise) diff --git a/test/runtests.jl b/test/runtests.jl index a26f180..4c4c503 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -92,8 +92,11 @@ end n = length(xs) # because observation are two dimensional corrnoise = let n = 2*n - 1/10 * I(n) * MvNormal(zeros(n), I(n) + 1/5*hermitianpart(rand(n, n))) + Σ = zeros(n, n) + while !isposdef(Σ); Σ .= I(n) + 1/5*hermitianpart(rand(n, n)); end + 1/10 * I(n) * MvNormal(zeros(n), Σ) end + noisemodel = CorrGaussianNoiseModel(corrnoise) ysmeas = reduce(vcat, f.(xs, [θtrue])) .+ rand(corrnoise)