## Backwards stable algorithms for variance computations!
# The following algorithm is a tempting idea to implement
# to make it easier to compute the variance of numbers in a
# single pass
function badvar1(x::Vector{Float64})
    ex2 = 0.0
    ex = 0.0
    n = length(x)
    for i=1:n
      ex2 = ex2 + x[i]^2
      ex = ex + x[i]
    end
    @show ex2, ex^2
    return 1.0/(n-1)*(ex2 - (ex)^2/n)
end
## It seems to work
using Statistics
x = randn(100)
basevar = x -> (length(x)/(length(x)-1))*mean((x .- mean(x)).^2)
@show badvar1(x)
@show basevar(x)
##
x = randn(10000).+1e4
@show badvar1(x)
@show basevar(x)
##
x = randn(10000).+1e6
@show badvar1(x)
@show basevar(x)
##
x = randn(10000).+1e8
@show badvar1(x)
@show basevar(x)
## Eek! That's the wrong value completely. This shows the routine is not
# backwards stable.
# But there is a better way to do this that keeps some extra data around.
## This is essentially what Julia does
function goodvar(x::Vector{Float64})
    n = length(x); mean = 0.0; m2 = 0.0; N = 0.0
    for i=1:n
        N = N + 1
        delta = x[i] - mean
        mean = mean + delta/N
        m2 = m2 + delta*(x[i]-mean)
    end
    return m2/(n-1)
end
x = randn(10000).+1e10
basevar = var
@show badvar1(x)
@show basevar(x)
@show goodvar(x)
@show var(x)
##
function norm2(x::Vector)
  sum2 = 0.0
  for i=1:size(x,1)
    sum2 += x[i].^2
  end
  return sqrt(sum2)
end
##
x = rand(10000).+1e300
@show norm2(x)
@show norm(x)