import numpy as np
import functools
import matplotlib.pyplot as plt
from functools import partial
19 Example 1: One-dimensional EKI
We implement here the original, iterative ensemble Kalman filter as formulated in (Iglesias, Law, and Stuart 2013).
Recall the inverse problem: recover the unknown \(u\) from noisy measurements \(y\) related by
\[ y = \mathcal{G}(u) + \eta, \] where the noise \(\eta \sim \mathcal{N}(0, \Gamma).\)
Start by introducing a pseudo time, \(h = 1/N,\) and then propagate an ensemble \(\{ u_n^{(j)}\}\) of \(J\) particles (ensemble members) from “time” \(nh\) to \((n+1)h\) according to
\[ u_{n+1}^{(j)} = u_n^{(j)} + C^{up}(u_n) \left[ C^{pp}(u_n) + \frac{1}{h} \Gamma \right]^{-1} \left( y_{n+1}^{(j)} - \mathcal{G}(u_n^{(j)}) \right), \]
where
\[\begin{align} C^{pp}(u) &= \frac{1}{J-1} \sum_{j=1}^{J} \left( \mathcal{G}(u^{(j)} - \hat{\mathcal{G}} \right) \otimes \left( \mathcal{G}(u^{(j)} - \hat{\mathcal{G}} \right) \\ C^{up}(u) &= \frac{1}{J-1} \sum_{j=1}^{J} \left( u^{(j)} - \hat{u} \right) \otimes \left( \mathcal{G}(u^{(j)} - \hat{\mathcal{G}} \right) \\ \hat{u} &= \frac{1}{J} \sum_{j=1}^{J} u^{(j)}, \qquad \hat{\mathcal{G}} = \frac{1}{J} \sum_{j=1}^{J} \mathcal{G}(u^{(j)}) . \end{align}\]
19.1 Implement the one-dimensional EKI for a linear forward operator \(\mathcal{G}\)
def eki_one_dim_lin(m_0, C_0, N, G, gamma, y, delt, h):
# Inputs:
# -------
# m_0, C_0: mean value and covariance of inital ensemble
# N: number of iterations
# G: one-dimensional forward operator of the model
# gamma: covariance of the noise in the data
# y: observed data
# h: discretization step
#
# Outputs:
# -------
# U: (JxN) matrix with the computed particles for each iteration
# m: vector of length N with the mean value of the particles
# C: vector of length N with the covariance of the particles
= np.zeros(N)
m = np.zeros(N)
C = np.zeros((J,N))
U
#Construct initial ensemble and estimator
= np.random.normal(m_0, C_0, J)
u_0 0] = u_0
U[:,0] = np.mean(U[:,0])
m[0] = (U[:,0] - m[0]) @ (U[:,0] - m[0]).T / (J-1)
C[
for n in range(1,N):
# Last iterate under forward operator:
= G*U[:, n-1]
G_u = np.mean(G_u)
Ghat = U[:,n-1] + h*(C[n-1] + delt)*G*(1/gamma)*((y - G_u))
U[:,n]
= np.mean(U[:,n])
m[n] = (U[:,n] - m[n]) @ (U[:,n] - m[n]).T / (J-1)
C[n]
return U,m,C
#Set Parameters
= 10
J = 1
gamma = 0
m_0 = 9e-1
C_0 = 0
m_true = C_0
c_true = 1.5
G = 10000
N = 1/100
h = 1
delt
# Construct data under true parameter
= np.random.normal(m_true,c_true)
u_true = G*u_true
y
= eki_one_dim_lin(m_0, C_0, N, G, gamma, y, delt, h) U,m,c
=N
it=list(range(1,(it+1)))
iterations'Iterations number n')
plt.xlabel('Error')
plt.ylabel(*np.ones(N) - m)**2/(u_true**2)),"r",label='$u^\dagger-m_n$')
plt.loglog(iterations,np.sqrt((u_true="upper right")
plt.legend(loc plt.show()
=300
it=list(range(1,it+1))
iterations'Iteration number n')
plt.xlabel('Covariance')
plt.ylabel(0:it],"r", label='$c_n$')
plt.plot(iterations,c[0:it]),iterations)[0:it],"g",label='$N^{-1}\sum_k^N c_n$')
plt.plot(iterations,np.divide(np.cumsum(c[="upper right")
plt.legend(loc plt.show()
19.2 Implement the one dimensional EKI for an arbitrary forward operator \(\mathcal{G}\)
def eki_one_dim(m_0, C_0, N, G, gamma, y, h):
# Inputs:
# -------
# m_0, C_0: mean value and covariance of inital ensemble
# N: number of iterations
# G: one-dimensional forward operator of the model
# gamma: covariance of the noise in the data
# y: observed data
# h: discretization step
#
# Outputs:
# -------
# U: (JxN) matrix with the computed particles for each iteration
# m: vector of length N with the mean value of the particles
# C: vector of length N with the covariance of the particles
= np.zeros(N)
m = np.zeros(N)
C = np.zeros((J,N))
U
#Construct initial ensemble and estimator
= np.random.normal(m_0, C_0, J)
u_0 0] = u_0
U[:,0] = np.mean(U[:,0])
m[0] = (U[:,0] - m[0]) @ (U[:,0] - m[0]).T / (J-1)
C[
for n in range(1,N):
# Last iterate under forward operator:
= G(U[:,n-1])
G_u = np.mean(U[:,n-1])
uhat = np.mean(G_u)
Ghat
= (U[:,n-1] - uhat) @ (G_u - Ghat).T / (J-1)
cov_up = (G_u - Ghat) @ (G_u - Ghat).T / (J-1)
cov_pp
= U[:,n-1] + cov_up*h/(h*cov_pp + gamma)*(y - G_u)
U[:,n]
= np.mean(U[:,n])
m[n] = (U[:,n]-m[n])@(U[:,n]-m[n]).T/(J-1)
C[n]
return U, m, C
#Set Parameters
= 10
J = 10
r = 10
k = 1
gamma = 0
m_0 = 9e-2
C_0 = 0
m_true = C_0
C_true = 10000
N = 1/100 # 1/N
h
def forward_log(z, k, r, h):
return k/(1 + np.exp(-r*k*h)*(k/z-1))
# Construct data under true parameter
= np.random.normal(m_true, C_true)
u_true = forward_log(u_true, k, r, h)
y
# Use partial function
= functools.partial(forward_log, k=10, r=10, h=1/100)
partial_log
= eki_one_dim(m_0, C_0, N, partial_log, gamma, y, h) U, m, C
= N
it =list(range(1,it+1))
iterations'Iterations number n')
plt.xlabel('Error')
plt.ylabel(*np.ones(N) - m)**2/(u_true**2)),"r", label='$u^\dagger-m_n$')
plt.plot(iterations,np.sqrt((u_true="upper right")
plt.legend(loc plt.show()
*np.ones(N), label="true")
plt.plot(iterations, u_true="inversion")
plt.plot(iterations, m, label
plt.legend() plt.show()
= 300
it = list(range(1,it+1))
iterations 'Iteration number n')
plt.xlabel('Covariance')
plt.ylabel(0:it],"r", label='$C_n$')
plt.plot(iterations,C[0:it]),iterations)[0:it],"g",label='$N^{-1}\sum_k^N C_n$')
plt.plot(iterations,np.divide(np.cumsum(C[="upper right")
plt.legend(loc plt.show()
19.3 Conclusions
The convergence is very slow, and not very accurate. This will be remedied by the mean-field approach.