In [295]:
using Dierckx
using ParameterizedFunctions
using Distributions, Compat
using StatsBase
using MultivariateStats
using StaticArrays, BenchmarkTools
using DifferentialEquations
using LinearAlgebra
using Plots
using Images
using Random
using ODE
rng = MersenneTwister(1234);
$$ m \ddot{x} +c \dot{x} +kx=\mu $$$$\ddot{x}+ 2 \eta \omega_0 \dot{x}+\omega_0^2 x=\mu $$

natural frequency: $$ \omega_0 =\sqrt{\frac{k}{m}}$$ damping ration: $$\eta_0=\frac{c}{\sqrt{km}}$$ Non linear mass damper system:

$$\begin{bmatrix} \dot{x} \\ \ddot{x}\end{bmatrix} = \begin{bmatrix} 0 & 1 \\ -\omega_0^2 & -2 \eta \omega_0 \end{bmatrix} \begin{bmatrix} x \\ \dot{x} \end{bmatrix} + \begin{bmatrix} 0 \\1 \end{bmatrix} \hat{\mu}$$$$ \dot{z} =A z+\beta \mu$$

Euler:

$$ \frac{z_{n+1}-z_n}{\delta t}=A z+\beta \mu$$$$Az+\beta \mu=0$$$$ z_{n+1} =z_n +A z_n \delta t$$$$z_{n+1} = (I+ A \delta t)z_n $$$$z_{n+1}= W z_n+b $$

Nonlinear system: $$z_{n+1}= tanh(Wz_n+b) $$

6 parameters:

$$w=\begin{bmatrix} w_{11} & w_{12} \\ w_{21} & w_{22} \end{bmatrix} $$$$b=\begin{bmatrix} b_{1} \\ b_{2} \end{bmatrix} $$

Loop Steps

1- True system with noise

2- propagate with real alpha

3- Get Ensemble

4- propagate with guessed alpha

5- Get Error on performance (and parameter for reference)

6- get $C_{ax}$ and $C_{ax}$

7- do ensemble step (error correction)

8- update all alphas or the one that has more impact (information gain (mutual information) or fisher information)

In [296]:
function DampO(xnp,par)
    xnp = xnp[:];
    W = reshape(par[1:4],(2, 2));
    b = par[5:6];
    #discrete nonlinear spring-mass
    xosav = tanh.((W*xnp+b)/10.0); #nonlinear second order system
    return xosav
end
Out[296]:
DampO (generic function with 1 method)
In [297]:
m = 2; #mass
c = 0.5; #damper
k = 2; # spring constant
dt = 0.01; #time step
om0 = k/m; #natural frequency
eta0 = c/sqrt(m*k); # damping ratio

eta0 = 0.01; #made it into perfect damped oscillator

W = (zeros(2,2)+I)+[0 1; -om0^2 -eta0*om0]*dt; #Euler scheme
x0 = [1; 1]; #initial condition
b = [0; 0]; # no biases
par = [W[:]; b]; # parameters
In [298]:
alph0 = [W[:]; b];
#Guess a parameter ensemble
II = (zeros(2,2)+I);
alph1 = [II[:]; rand(1); rand(1)] .+ randn(length(alph0),100) .*10
alphst = mean(alph1,dims=2)
Out[298]:
6×1 Array{Float64,2}:
  0.9666156727657602
 -0.8669526482302405
  0.4976694691181926
  1.585899966589858 
 -1.0854000181336485
  1.7378041236310586
In [299]:
merr = Inf;
runT = 0; 
cxx = (zeros(2,2)+I);
mderr = Inf;

ensemble

In [ ]:

In [300]:
xensout =zeros(2,size(alph1)[2]) #later change for it not to be 2
nused=zeros(1000)
anim=Animation()
while (runT<1000 && mderr>1e-6)
    runT = runT + 1;
    
    #True system
    xthis = x0+randn(2,1);
    temp =DampO(xthis,alph0);
    xtrue = temp[:,end];  #+randn(2,1)*1e-12;
    #ensemble system
    xens = repeat(xthis,1 ,100);#+randn(2,100)*1e-16;
    for ens = 1:size(alph1)[2]
        temp =DampO(xens[:,ens],alph1[:,ens]);
        xensout[:,ens] = temp[:,end];
    end
    # Error truth
    merr = mean(abs.(mean(alph1,dims=2)-alph0));
    # Error on performance not on parameter
    derr = (xtrue .- xensout);
    mderr = (mean(abs.(derr[:])));


    errens = xtrue .- xensout;
    cax = (alph1 .- mean(alph1,dims=2))*(xensout .- mean(xensout,dims=2))'/99;
    cxx = cov(xensout');
    #Need to make this  efficient
    # note -- not the enkf update
    incrmnt = cax*pinv(cxx) *errens;
    sel = ones(6,1);
    #sel = FisherInfSel(alph1,exper);
    #sel = FisherInfSel(alph1);
    # sel = MutInfSel(alph1,errens);
    alph1 = alph1+ sel.*incrmnt;
    nused[runT] = sum(sel);
    
    
    #graph
    ss=fill(3.0,1,102)
    ss[1,1]=5.0
    ss[1,2]=3.0

    sh=fill(:circle,1,102)
    sh[1,1]=:hexagon
    sh[1,2]=:diamond

    ma=fill(0.3,1,102)
    ma[1,1]=1.0
    ma[1,2]=1.0

    l = @layout [a b]
    p1 = bar(abs.(mean(alph1,dims=2)-alph0),xlabel="parameters",ylabel="Aboslute Error",title = "Iter: $runT convergence",titlefont = font(10),label="",ylim=(0,3.0))
    p2 = scatter([alph0,alphst,alph1],label="",xlabel="parameters",ylabel="Solution Ensemble",markersize=ss,markershape=sh,titlefont = font(10),title = "Ensemble Parameter Estimation",markeralpha=ma,ylim=(-0.5,3))
    plot(p1, p2, layout = l)
    frame(anim)
end
nn=sum(nused)
println("number of unpdated parameters= $nn")
gif(anim,fps=2)
number of unpdated parameters= 90.0
┌ Info: Saved animation to 
│   fn = C:\Users\amira\Dropbox (MIT)\CBA\_Classes\4_Spring 2020\Machine Learning System Optimization\Project\tmp.gif
â”” @ Plots C:\Users\amira\.julia\packages\Plots\cc8wh\src\animation.jl:98
Out[300]:

fisher

In [180]:
function FisherInfSel(alph1,varargin)
 
    #calculate variance of alpha
    valph = var(alph1',dims=1)';  
    #Sort in descending order

    vi=sortperm(valph[:], rev=true)
    
    vs = sort(valph, rev=true,dims=1);
    
    nn = length(vs); svs = sum(vs,dims=1);

    # Estimate cost of including parameters of length n
    vsum = (svs .-cumsum(vs,dims=1)) ./svs .+(1:nn)'/nn;
    n = varargin

    sel = zeros(6,1);  
        
    sel[vi[1:n]] .= 1;
    
    return sel
end

function FisherInfSel(alph1)
 
    #calculate variance of alpha
    valph = var(alph1',dims=1)';  
    #Sort in descending order

    vi=sortperm(valph[:], rev=true)
    
    vs = sort(valph, rev=true,dims=1);
    
    nn = length(vs); svs = sum(vs,dims=1);

    # Estimate cost of including parameters of length n
    vsum = (svs .-cumsum(vs,dims=1)) ./svs .+(1:nn)'/nn;
    
    # pick the one that has minimum "energy"
    n =argmin(vsum[:]);
    
    sel = zeros(6,1);  
    sel[vi[1:n]] .= 1;
        
    return sel
end
Out[180]:
FisherInfSel (generic function with 2 methods)
In [194]:
runiter=zeros(6)
nn=zeros(6)
anim=Animation()

for exper = 1:6
    alph0 = [W[:]; b];
    #Guess a parameter ensemble
    II = (zeros(2,2)+I);
    alph1 = [II[:]; rand(1); rand(1)] .+ randn(length(alph0),100) .*10
    alphst = mean(alph1,dims=2)
    
    xensout =zeros(2,size(alph1)[2]) #later change for it not to be 2
    nused=zeros(1000)
    merr = Inf;
    runT = 0; 
    cxx = (zeros(2,2)+I);
    mderr = Inf;
    
    while (runT<1000 && mderr>1e-6)
        runT = runT + 1;

        #True system
        xthis = x0+randn(2,1);
        temp =DampO(xthis,alph0);
        xtrue = temp[:,end];  #+randn(2,1)*1e-12;
        #ensemble system
        xens = repeat(xthis,1 ,100);#+randn(2,100)*1e-16;
        for ens = 1:size(alph1)[2]
            temp =DampO(xens[:,ens],alph1[:,ens]);
            xensout[:,ens] = temp[:,end];
        end
        # Error truth
        merr = mean(abs.(mean(alph1,dims=2)-alph0));
        # Error on performance not on parameter
        derr = (xtrue .- xensout);
        mderr = (mean(abs.(derr[:])));


        errens = xtrue .- xensout;
        cax = (alph1 .- mean(alph1,dims=2))*(xensout .- mean(xensout,dims=2))'/99;
        cxx = cov(xensout');
        #Need to make this  efficient
        # note -- not the enkf update
        incrmnt = cax*pinv(cxx) *errens;
        
        sel = ones(6,1);
        sel = FisherInfSel(alph1,exper);
        #sel = FisherInfSel(alph1);
        # sel = MutInfSel(alph1,errens);
        alph1 = alph1+ sel.*incrmnt;
        nused[runT] = sum(sel);


        #graph
        ss=fill(3.0,1,102)
        ss[1,1]=5.0
        ss[1,2]=3.0

        sh=fill(:circle,1,102)
        sh[1,1]=:hexagon
        sh[1,2]=:diamond

        ma=fill(0.3,1,102)
        ma[1,1]=1.0
        ma[1,2]=1.0

        l = @layout [a b]
        p1 = bar(abs.(mean(alph1,dims=2)-alph0),xlabel="parameters",ylabel="Aboslute Error",title = "$exper parameters, Iter: $runT convergence",titlefont = font(10),label="",ylim=(0,3.0))
        p2 = scatter([alph0,alphst,alph1],label="",xlabel="parameters",ylabel="Solution Ensemble",markersize=ss,markershape=sh,titlefont = font(10),title = "Ensemble Parameter Estimation",markeralpha=ma,ylim=(-0.5,3))
        plot(p1, p2, layout = l)
        frame(anim)
    end
    nn[exper]=sum(nused)
    runiter[exper] = runT;
end
println("number of updated parameters= $nn")
println("number of Iterations= $runiter")
gif(anim,fps=10)
number of updated parameters= [150.0, 142.0, 90.0, 100.0, 75.0, 102.0]
number of Iterations= [150.0, 71.0, 30.0, 25.0, 15.0, 17.0]
┌ Info: Saved animation to 
│   fn = C:\Users\amira\Dropbox (MIT)\CBA\_Classes\4_Spring 2020\Machine Learning System Optimization\Project\tmp.gif
â”” @ Plots C:\Users\amira\.julia\packages\Plots\cc8wh\src\animation.jl:98
Out[194]:
In [192]:
plot(1:6,nn,label="number of paramters updates")
plot!(1:6,runiter,label="number of Iterations")
Out[192]:
1 2 3 4 5 6 25 50 75 100 125 number of paramters updates number of Iterations
In [182]:
alph0 = [W[:]; b];
#Guess a parameter ensemble
II = (zeros(2,2)+I);
alph1 = [II[:]; rand(1); rand(1)] .+ randn(length(alph0),100) .*10
alphst = mean(alph1,dims=2)

merr = Inf;
runT = 0; 
cxx = (zeros(2,2)+I);
mderr = Inf;

runiter=zeros(6)
anim=Animation()
# for exper = 1:6
    xensout =zeros(2,size(alph1)[2]) #later change for it not to be 2
    nused=zeros(1000)
    
    while (runT<1000 && mderr>1e-6)
        runT = runT + 1;

        #True system
        xthis = x0+randn(2,1);
        temp =DampO(xthis,alph0);
        xtrue = temp[:,end];  #+randn(2,1)*1e-12;
        #ensemble system
        xens = repeat(xthis,1 ,100);#+randn(2,100)*1e-16;
        for ens = 1:size(alph1)[2]
            temp =DampO(xens[:,ens],alph1[:,ens]);
            xensout[:,ens] = temp[:,end];
        end
        # Error truth
        merr = mean(abs.(mean(alph1,dims=2)-alph0));
        # Error on performance not on parameter
        derr = (xtrue .- xensout);
        mderr = (mean(abs.(derr[:])));


        errens = xtrue .- xensout;
        cax = (alph1 .- mean(alph1,dims=2))*(xensout .- mean(xensout,dims=2))'/99;
        cxx = cov(xensout');
        #Need to make this  efficient
        # note -- not the enkf update
        incrmnt = cax*pinv(cxx) *errens;
        
        sel = ones(6,1);
#         sel = FisherInfSel(alph1,exper);
        sel = FisherInfSel(alph1);
        # sel = MutInfSel(alph1,errens);
        alph1 = alph1+ sel.*incrmnt;
        nused[runT] = sum(sel);


        #graph
        ss=fill(3.0,1,102)
        ss[1,1]=5.0
        ss[1,2]=3.0

        sh=fill(:circle,1,102)
        sh[1,1]=:hexagon
        sh[1,2]=:diamond

        ma=fill(0.3,1,102)
        ma[1,1]=1.0
        ma[1,2]=1.0

        l = @layout [a b]
        p1 = bar(abs.(mean(alph1,dims=2)-alph0),xlabel="parameters",ylabel="Aboslute Error",title = "Iter: $runT convergence",titlefont = font(10),label="",ylim=(0,3.0))
        p2 = scatter([alph0,alphst,alph1],label="",xlabel="parameters",ylabel="Solution Ensemble",markersize=ss,markershape=sh,titlefont = font(10),title = "Ensemble Parameter Estimation",markeralpha=ma,ylim=(-0.5,3))
        plot(p1, p2, layout = l)
        frame(anim)
    end
#     runiter[exper] = runT;
# end
nn=sum(nused)
println("number of updated parameters= $nn")
gif(anim,fps=2)
number of unpdated parameters= 84.0
┌ Info: Saved animation to 
│   fn = C:\Users\amira\Dropbox (MIT)\CBA\_Classes\4_Spring 2020\Machine Learning System Optimization\Project\tmp.gif
â”” @ Plots C:\Users\amira\.julia\packages\Plots\cc8wh\src\animation.jl:98
Out[182]:

Mutual information

$$\sum_y\sum_x p(x,y)log\frac{p(x,y)}{p(x)p(y)}$$$$I(x,y)=- \frac{1}{2} ln(1-\rho^2)$$$$\rho= \frac{C_{ae}}{\sqrt{\sigma_p^2 \sigma_e^2}}$$
In [ ]:

In [272]:
function MutInfSel(parens,errens)

    dparens = parens .- mean(parens,dims=2);
    cae = (dparens * errens') ./100.0;
    evar = mean(errens.^2,dims=2);
    pvar = var(parens,dims=2);
    rh = cae./sqrt.((pvar .*evar'));
    
#     println(size(dparens))
#     println(1.0 .- rh.^2)
    mutinf = -0.5 .*log.(1.0 .- rh.^2);

    
#     valph = maximum(mutinf,dims=2)
#     vi=sortperm(valph[:], rev=true)
#     vs = sort(valph, rev=true,dims=1);
    
    
#     nn = length(vi);
#     svs = sum(vs,dims=1);
#     vsum = ((svs .-cumsum(vs,dims=1))./svs) .+((1.0:nn) ./nn);
    
    
#     n =argmin(vsum[:]);
#     sel = zeros(6,1); 
#     sel[vi[1:n]] .= 1;
    
    valph = maximum(mutinf,dims=2)[:]

    vi=sortperm(valph, rev=true)
    vs = sort(valph, rev=true,);


    nn = length(vi);
    svs = sum(vs);
    vsum = ((svs .-cumsum(vs))./svs) .+((1.0:nn) ./nn);


    n =argmin(vsum[:]);
    sel = zeros(6,1); 
    sel[vi[1:n]] .= 1
    
    
#     println(n)
    return sel
end
# MutInfSel(alph1,errens)
Out[272]:
MutInfSel (generic function with 1 method)
In [270]:
valph=[6 3 5 1]
vi=sortperm(valph[:], rev=true)
vs = sort(valph, rev=true,dims=1);
display(vs)

nn = length(vi);
svs = sum(vs);
display(svs)
vsum = ((svs .-cumsum(vs,dims=1))./svs) .+((1.0:nn) ./nn);


n =argmin(vsum[:]);
sel = zeros(6,1); 
sel[vi[1:n]] .= 1
sel
1×4 Array{Int64,2}:
 6  3  5  1
15
Out[270]:
6×1 Array{Float64,2}:
 1.0
 0.0
 0.0
 0.0
 0.0
 0.0
In [292]:
alph0 = [W[:]; b];

#Guess a parameter ensemble
II = (zeros(2,2)+I);
alph1 = [II[:]; rand(1); rand(1)] .+ randn(length(alph0),100) .*10
alphst = mean(alph1,dims=2)
errens=[]
merr = Inf;
runT = 0; 
cxx = (zeros(2,2)+I);
mderr = Inf;
mutinf=0
runiter=zeros(6)
# for exper = 1:6
xensout =zeros(2,size(alph1)[2]) #later change for it not to be 2

sel=0

for i in 1:16
    runT = runT + 1;

    #True system
    xthis = x0+randn(2,1);
    temp =DampO(xthis,alph0);
    xtrue = temp[:,end];  #+randn(2,1)*1e-12;
    #ensemble system
    xens = repeat(xthis,1 ,100);#+randn(2,100)*1e-16;
    for ens = 1:size(alph1)[2]
        temp =DampO(xens[:,ens],alph1[:,ens]);
        xensout[:,ens] = temp[:,end];
    end
    # Error truth
    merr = mean(abs.(mean(alph1,dims=2)-alph0));
    # Error on performance not on parameter
    derr = (xtrue .- xensout);
    mderr = (mean(abs.(derr[:])));


    errens = xtrue .- xensout;
    cax = (alph1 .- mean(alph1,dims=2))*(xensout .- mean(xensout,dims=2))'/99;
    cxx = cov(xensout');
    #Need to make this  efficient
    incrmnt = cax*pinv(cxx) *errens;


    sel = MutInfSel(alph1,errens);
    
    dparens = alph1 .- mean(alph1,dims=2);
    cae = (dparens * errens') ./100.0;
    evar = mean(errens.^2,dims=2);
    pvar = var(alph1,dims=2);
    rh = cae./sqrt.((pvar .*evar'));
    mutinf = -0.5 .*log.(1.0 .- rh.^2);
    

    
    alph1 = alph1+ sel.*incrmnt;
end
display(sel)
display(mutinf)
mutinf=round.(mutinf,digits=4)

errens
l = @layout [a b c;d e f]
p1 = scatter(alph1[1,:],errens[1,:],errens[2,:],label="",title="p1",xlabel="parameter value",ylabel="error",titlefont = font(10))
p2 = scatter(alph1[2,:],errens[1,:],errens[2,:],label="",title="p2",xlabel="parameter value",ylabel="error",titlefont = font(10))
p3 = scatter(alph1[3,:],errens[1,:],errens[2,:],label="",title="p3",xlabel="parameter value",ylabel="error",titlefont = font(10))
p4 = scatter(alph1[4,:],errens[1,:],errens[2,:],label="",title="p4",xlabel="parameter value",ylabel="error",titlefont = font(10))
p5 = scatter(alph1[5,:],errens[1,:],errens[2,:],label="",title="p5",xlabel="parameter value",ylabel="error",titlefont = font(10))
p6 = scatter(alph1[6,:],errens[1,:],errens[2,:],label="",title="p6",xlabel="parameter value",ylabel="error",titlefont = font(10))
display(plot(p1, p2,p3,p4,p5,p6, layout = l))

l = @layout [a b c;d e f]
p1 = scatter(alph1[1,:],errens[1,:],label="",title="e1 p1 $(mutinf[1,1])",xlabel="parameter value",ylabel="error",titlefont = font(10))
p2 = scatter(alph1[2,:],errens[1,:],label="",title="e1 p2 $(mutinf[2,1])",xlabel="parameter value",ylabel="error",titlefont = font(10))
p3 = scatter(alph1[3,:],errens[1,:],label="",title="e1 p3 $(mutinf[3,1])",xlabel="parameter value",ylabel="error",titlefont = font(10))
p4 = scatter(alph1[4,:],errens[1,:],label="",title="e1 p4 $(mutinf[4,1])",xlabel="parameter value",ylabel="error",titlefont = font(10))
p5 = scatter(alph1[5,:],errens[1,:],label="",title="e1 p5 $(mutinf[5,1])",xlabel="parameter value",ylabel="error",titlefont = font(10))
p6 = scatter(alph1[6,:],errens[1,:],label="",title="e1 p6 $(mutinf[6,1])",xlabel="parameter value",ylabel="error",titlefont = font(10))
display(plot(p1, p2,p3,p4,p5,p6, layout = l))

l = @layout [a b c;d e f]
p1 = scatter(alph1[1,:],errens[2,:],label="",title="e2 p1 $(mutinf[1,2])",xlabel="parameter value",ylabel="error",titlefont = font(10))
p2 = scatter(alph1[2,:],errens[2,:],label="",title="e2 p2 $(mutinf[2,2])",xlabel="parameter value",ylabel="error",titlefont = font(10))
p3 = scatter(alph1[3,:],errens[2,:],label="",title="e2 p3 $(mutinf[3,2])",xlabel="parameter value",ylabel="error",titlefont = font(10))
p4 = scatter(alph1[4,:],errens[2,:],label="",title="e2 p4 $(mutinf[4,2])",xlabel="parameter value",ylabel="error",titlefont = font(10))
p5 = scatter(alph1[5,:],errens[2,:],label="",title="e2 p5 $(mutinf[5,2])",xlabel="parameter value",ylabel="error",titlefont = font(10))
p6 = scatter(alph1[6,:],errens[2,:],label="",title="e2 p6 $(mutinf[6,2])",xlabel="parameter value",ylabel="error",titlefont = font(10))
display(plot(p1, p2,p3,p4,p5,p6, layout = l))

# l = @layout [a b c;d e f;g h i;j k l]
# p1 = scatter(alph1[1,:],errens[1,:],label="",title="e1 p1",xlabel="parameter value",ylabel="error",titlefont = font(10))
# p2 = scatter(alph1[2,:],errens[1,:],label="",title="e1 p2",xlabel="parameter value",ylabel="error",titlefont = font(10))
# p3 = scatter(alph1[3,:],errens[1,:],label="",title="e1 p3",xlabel="parameter value",ylabel="error",titlefont = font(10))
# p4 = scatter(alph1[4,:],errens[1,:],label="",title="e1 p4",xlabel="parameter value",ylabel="error",titlefont = font(10))
# p5 = scatter(alph1[5,:],errens[1,:],label="",title="e1 p5",xlabel="parameter value",ylabel="error",titlefont = font(10))
# p6 = scatter(alph1[6,:],errens[1,:],label="",title="e1 p6",xlabel="parameter value",ylabel="error",titlefont = font(10))

# p7 = scatter(alph1[1,:],errens[2,:],label="",title="e2 p1",xlabel="parameter value",ylabel="error",titlefont = font(10))
# p8 = scatter(alph1[2,:],errens[2,:],label="",title="e2 p2",xlabel="parameter value",ylabel="error",titlefont = font(10))
# p9 = scatter(alph1[3,:],errens[2,:],label="",title="e2 p3",xlabel="parameter value",ylabel="error",titlefont = font(10))
# p10 = scatter(alph1[4,:],errens[2,:],label="",title="e2 p4",xlabel="parameter value",ylabel="error",titlefont = font(10))
# p11 = scatter(alph1[5,:],errens[2,:],label="",title="e2 p5",xlabel="parameter value",ylabel="error",titlefont = font(10))
# p12 = scatter(alph1[6,:],errens[2,:],label="",title="e2 p6",xlabel="parameter value",ylabel="error",titlefont = font(10))
# plot(p1, p2,p3,p4,p5,p6,p7,p8,p9,p10,p11,p12, layout = l)
6×1 Array{Float64,2}:
 1.0
 1.0
 0.0
 0.0
 0.0
 0.0
6×2 Array{Float64,2}:
 0.541787   0.162641 
 0.116926   0.909652 
 0.24007    0.0468857
 0.0343878  0.380257 
 0.25106    0.0486248
 0.0350905  0.390066 
0.8 1.0 1.2 1.4 1.6 -0.100 -0.075 -0.050 -0.025 0.000 0.025 -0.075 -0.050 -0.025 0.000 0.025 0.050 0.075 p1 parameter value error -0.2 0.0 0.2 0.4 0.6 -0.100 -0.075 -0.050 -0.025 0.000 0.025 -0.075 -0.050 -0.025 0.000 0.025 0.050 0.075 p2 parameter value error 0.0 0.2 0.4 0.6 -0.100 -0.075 -0.050 -0.025 0.000 0.025 -0.075 -0.050 -0.025 0.000 0.025 0.050 0.075 p3 parameter value error 0.6 0.7 0.8 0.9 1.0 1.1 1.2 -0.100 -0.075 -0.050 -0.025 0.000 0.025 -0.075 -0.050 -0.025 0.000 0.025 0.050 0.075 p4 parameter value error -2.0 -1.5 -1.0 -0.5 0.0 -0.100 -0.075 -0.050 -0.025 0.000 0.025 -0.075 -0.050 -0.025 0.000 0.025 0.050 0.075 p5 parameter value error -0.5 0.0 0.5 1.0 1.5 -0.100 -0.075 -0.050 -0.025 0.000 0.025 -0.075 -0.050 -0.025 0.000 0.025 0.050 0.075 p6 parameter value error
0.8 1.0 1.2 1.4 1.6 -0.100 -0.075 -0.050 -0.025 0.000 0.025 e1 p1 0.5418 parameter value error -0.2 0.0 0.2 0.4 0.6 -0.100 -0.075 -0.050 -0.025 0.000 0.025 e1 p2 0.1169 parameter value error 0.0 0.2 0.4 0.6 -0.100 -0.075 -0.050 -0.025 0.000 0.025 e1 p3 0.2401 parameter value error 0.6 0.7 0.8 0.9 1.0 1.1 1.2 -0.100 -0.075 -0.050 -0.025 0.000 0.025 e1 p4 0.0344 parameter value error -2.0 -1.5 -1.0 -0.5 0.0 -0.100 -0.075 -0.050 -0.025 0.000 0.025 e1 p5 0.2511 parameter value error -0.5 0.0 0.5 1.0 1.5 -0.100 -0.075 -0.050 -0.025 0.000 0.025 e1 p6 0.0351 parameter value error
0.8 1.0 1.2 1.4 1.6 -0.075 -0.050 -0.025 0.000 0.025 0.050 0.075 e2 p1 0.1626 parameter value error -0.2 0.0 0.2 0.4 0.6 -0.075 -0.050 -0.025 0.000 0.025 0.050 0.075 e2 p2 0.9097 parameter value error 0.0 0.2 0.4 0.6 -0.075 -0.050 -0.025 0.000 0.025 0.050 0.075 e2 p3 0.0469 parameter value error 0.6 0.7 0.8 0.9 1.0 1.1 1.2 -0.075 -0.050 -0.025 0.000 0.025 0.050 0.075 e2 p4 0.3803 parameter value error -2.0 -1.5 -1.0 -0.5 0.0 -0.075 -0.050 -0.025 0.000 0.025 0.050 0.075 e2 p5 0.0486 parameter value error -0.5 0.0 0.5 1.0 1.5 -0.075 -0.050 -0.025 0.000 0.025 0.050 0.075 e2 p6 0.3901 parameter value error
In [293]:
alph0 = [W[:]; b];
#Guess a parameter ensemble
II = (zeros(2,2)+I);
alph1 = [II[:]; rand(1); rand(1)] .+ randn(length(alph0),100) .*10
alphst = mean(alph1,dims=2)
errens=[]
merr = Inf;
runT = 0; 
cxx = (zeros(2,2)+I);
mderr = Inf;

runiter=zeros(6)
anim=Animation()
# for exper = 1:6
    xensout =zeros(2,size(alph1)[2]) #later change for it not to be 2
    nused=zeros(1000)
    
    while (runT<1000 && mderr>1e-6)
        runT = runT + 1;

        #True system
        xthis = x0+randn(2,1);
        temp =DampO(xthis,alph0);
        xtrue = temp[:,end];  #+randn(2,1)*1e-12;
        #ensemble system
        xens = repeat(xthis,1 ,100);#+randn(2,100)*1e-16;
        for ens = 1:size(alph1)[2]
            temp =DampO(xens[:,ens],alph1[:,ens]);
            xensout[:,ens] = temp[:,end];
        end
        # Error truth
        merr = mean(abs.(mean(alph1,dims=2)-alph0));
        # Error on performance not on parameter
        derr = (xtrue .- xensout);
        mderr = (mean(abs.(derr[:])));


        errens = xtrue .- xensout;
        cax = (alph1 .- mean(alph1,dims=2))*(xensout .- mean(xensout,dims=2))'/99;
        cxx = cov(xensout');
        #Need to make this  efficient
        # note -- not the enkf update
        incrmnt = cax*pinv(cxx) *errens;
        
        sel = ones(6,1);
#         sel = FisherInfSel(alph1,exper);
#         sel = FisherInfSel(alph1);
        sel = MutInfSel(alph1,errens);
        alph1 = alph1+ sel.*incrmnt;
        nused[runT] = sum(sel);


        #graph
        ss=fill(3.0,1,102)
        ss[1,1]=5.0
        ss[1,2]=3.0

        sh=fill(:circle,1,102)
        sh[1,1]=:hexagon
        sh[1,2]=:diamond

        ma=fill(0.3,1,102)
        ma[1,1]=1.0
        ma[1,2]=1.0

        l = @layout [a b]
        p1 = bar(abs.(mean(alph1,dims=2)-alph0),xlabel="parameters",ylabel="Aboslute Error",title = "Iter: $runT convergence",titlefont = font(10),label="",ylim=(0,3.0))
        p2 = scatter([alph0,alphst,alph1],label="",xlabel="parameters",ylabel="Solution Ensemble",markersize=ss,markershape=sh,titlefont = font(10),title = "Ensemble Parameter Estimation",markeralpha=ma,ylim=(-0.5,3))
        plot(p1, p2, layout = l)
        frame(anim)
    end
#     runiter[exper] = runT;
# end
# println(nused)
nn=sum(nused)
println("number of unpdated parameters= $nn")
gif(anim,fps=2)
number of unpdated parameters= 155.0
┌ Info: Saved animation to 
│   fn = C:\Users\amira\Dropbox (MIT)\CBA\_Classes\4_Spring 2020\Machine Learning System Optimization\Project\tmp.gif
â”” @ Plots C:\Users\amira\.julia\packages\Plots\cc8wh\src\animation.jl:98
Out[293]:
In [290]:
plot(nused[1:runT])
Out[290]:
5 10 15 20 25 30 2.0 2.5 3.0 3.5 4.0 y1
In [ ]: