Commit 492662b6 authored by conmccoid's avatar conmccoid
Browse files

Adding notes on Sidis proof of MPE=Arnoldi

parent e89b60b4
......@@ -20,6 +20,7 @@
\newcommand{\Set}[2]{\left \{ #1 \ \middle \vert \ #2 \right \}}
\newcommand{\vmat}[1]{\begin{vmatrix} #1 \end{vmatrix}}
\DeclareMathOperator{\sign}{sign}
\DeclareMathOperator{\Span}{span}
\newcommand{\bbn}{\mathbb{N}}
\newcommand{\bbz}{\mathbb{Z}}
......@@ -143,7 +144,7 @@ T_n = S \ \forall n \in \bbn \iff (S_n) \in \kernel.
(Somehow this already defines the kernel of the transformation; is this definition then vacuous?
It is stated in Brezinski that because of this definition any sequence transformation can be viewed as an extrapolation method.)
This relation may be expressed as
For some methods this relation may be expressed as
\begin{equation}
R(S_n, \dots, S_{n+q}, S) =0 \iff S = T_n = \sum_{i=0}^q a_i S_{n+i}.
\end{equation}
......@@ -171,4 +172,70 @@ T_n = \frac{S_n S_{n+2} - S_{n+1}^2}{S_{n+2} - 2 S_{n+1} + S_n}.
Note that $dR/dS = -(a_1 + a_2)$.
\end{example}
\section{E-algorithm and derivatives}
For the E-algorithm it is assumed that the relation $R$ has the form
\begin{equation}
S_n - S - \sum_{i=1}^k a_i g_i(n) = 0
\end{equation}
where $g_i(n)$ are some functions that depend on the indices $i$ and $n$ and the elements $\set{S_{n+j}}_{j=0}^k$.
By solving the system created by repeating this equation for $n$ to $n+k$ in the same manner as the previous example one arrives at the solution
\begin{equation}
S = T_k^{(n)} = \frac{\vmat{S_n & \dots & S_{n+k} \\ g_1(n) & \dots & g_1(n+k) \\ \vdots & & \vdots \\ g_k(n) & \dots & g_k(n+k)}}{\vmat{1 & \dots & 1 \\ g_1(n) & \dots & g_1(n+k) \\ \vdots & & \vdots \\ g_k(n) & \dots & g_k(n+k)}} .
\end{equation}
The kernel of this transformation is
\begin{equation}
S_n = S + \sum_{i=1}^k a_i g_i(n)
\end{equation}
which may be readily obtained by rearranging the relation $R$.
\section{Summary of Sidi proof of MPE=Arnoldi}
Let $(x_n)$ be a sequence to be accelerated, then we produce approximations
\begin{equation}
T_n^{(k)} = \sum_{j=0}^k \gamma_j^{(n,k)} x_{n+j}
\end{equation}
where
\begin{equation*}
\sum_{j=0}^k \gamma_j^{(n,k)} = 1 .
\end{equation*}
The $\gamma_j^{(n,k)}$ also satisfy
\begin{equation}
\sum_{j=0}^k \langle \Delta x_{n+i}, \Delta x_{n+j} \rangle \gamma_j^{(n,k)} = 0, \quad 0 \leq i \leq k-1
\end{equation}
where $\langle \cdot, \cdot \rangle$ is the L2 inner product.
The $\Delta$ operator has been defined previously but we reiterate it here:
\begin{equation*}
\Delta x_{n+i} = x_{n+i+1} - x_{n+i}.
\end{equation*}
We've seen previously that $T_n^{(k)}$ can be written as
\begin{equation*}
T_n^{(k)} = \frac{ \vmat{ x_n & \dots & x_{n+k} \\ \langle \Delta x_n, \Delta x_n \rangle & \dots & \langle \Delta x_n, \Delta x_{n+k} \rangle \\ \vdots & & \vdots \\ \langle \Delta x_{n+k-1}, \Delta x_n \rangle & \dots & \langle \Delta x_{n+k-1}, \Delta x_{n+k} \rangle }}{ \vmat{ 1 & \dots & 1 \\ \langle \Delta x_n, \Delta x_n \rangle & \dots & \langle \Delta x_n, \Delta x_{n+k} \rangle \\ \vdots & & \vdots \\ \langle \Delta x_{n+k-1}, \Delta x_n \rangle & \dots & \langle \Delta x_{n+k-1}, \Delta x_{n+k} \rangle}} .
\end{equation*}
The notion of the determinant is generalized here to allow for a vector result.
That is, we expand along the first row so that each $x_{n+i}$ is multiplied by the determinant of a submatrix.
That means each $\gamma_j^{(n,k)}$ is equal to
\begin{equation}
\gamma_j^{(n,k)} = \frac{\vmat{\langle \Delta x_n, \Delta x_n \rangle & \dots & \langle \Delta x_n, \Delta x_{n+j-1} \rangle & \langle \Delta x_n, \Delta x_{n+j+1} \rangle & \dots & \langle \Delta x_n, \Delta x_{n+k} \rangle \\
\vdots & & \vdots & \vdots & & \vdots \\
\langle \Delta x_{n+k-1}, \Delta x_n \rangle & \dots & \langle \Delta x_{n+k-1}, \Delta x_{n+j-1} \rangle & \langle \Delta x_{n+k-1}, \Delta x_{n+j+1} \rangle & \dots & \langle \Delta x_{n+k-1}, \Delta x_{n+k} \rangle
}}{
\vmat{ 1 & \dots & 1 \\
\langle \Delta x_n, \Delta x_n \rangle & \dots & \langle \Delta x_n, \Delta x_{n+k} \rangle \\
\vdots & & \vdots \\
\langle \Delta x_{n+k-1}, \Delta x_n \rangle & \dots & \langle \Delta x_{n+k-1}, \Delta x_{n+k} \rangle}
}
\end{equation}
Suppose the sequence $(x_n)$ is defined as $x_{n+1} = A x_n + b$ for some matrix $A$ and vector $b$.
The residual $r(x) = Ax + b - x$ satisfies $r(x_i) = \Delta x_i$ and $r(s)=0$ where $s$ is the limit of the sequence, $s=As+b$.
Moreover,
\begin{equation}
\Delta x_{i+1} = A x_i + b - x_i = A x_i + b - A x_{i-1} - b = A \Delta x_i = A^{i+1} \Delta x_0.
\end{equation}
Thus,
\begin{equation}
r(T_n^{(k)}) = \sum \gamma_i^{(n,k)} \Delta x_{n+i} \in \Span \set{\Delta x_n, A \Delta x_n, \dots , A^k \Delta x_n}.
\end{equation}
\end{document}
\ No newline at end of file
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment