% !TeX root = ../../script.tex \documentclass[../../script.tex]{subfiles} \begin{document} \section{Hilbert Spaces} \begin{defi} A mapping $\innerproduct{\cdot}{\cdot}: X \times X \longrightarrow \field$ with the properties \begin{enumerate}[(i)] \item $\innerproduct{x + y}{z} = \innerproduct{x}{z} + \innerproduct{y}{z}$ \item $\innerproduct{\alpha x}{y} = \alpha \innerproduct{x}{y}$ \item $\innerproduct{x}{y} = \conj{\innerproduct{y}{x}}$ \item $\innerproduct{x} \ge 0, \quad \innerproduct{x} = 0 \iff x = 0$ \end{enumerate} is called an inner product on $X$. A vector space $X$ with an inner product is said to be an inner product space. \end{defi} \begin{eg} Examples of inner product spaces are \begin{enumerate}[(i)] \item Euclidean space $\realn^n$ \[ \innerproduct{x}{y} = \xi_1 \eta_1 + \cdots + \xi_n \eta_n \] \item Unitary space $\cmpln^n$ \[ \innerproduct{x}{y} = \xi_1 \conj{\eta_1} + \cdots + \xi_n \conj{\eta_n} \] \item Sequence space $l^2 = \set[\sum_{k=1}^{\infty} \abs{\xi_k}^2 < \infty]{x = \anyseqdef[\xi]{\field}}$ \[ \innerproduct{x}{y} = \sum_{k=1}^{\infty} \xi_k \conj{\eta_k} \] \item Space of square-integrable functions $L^2(A) = \set[\int_A \abs{f(t)}^2 \dd{t} < \infty]{f: A \rightarrow \field}$ \[ \innerproduct{x}{y} = \int_A f(t) \conj{g(t)} \dd{t} \] \end{enumerate} \end{eg} \begin{defi} Define $\norm{x} = \sqrt{\innerproduct{x}}, ~x \in X$. This $\norm{\cdot}$ is a norm on $X$. A space $X$ with a norm induced by the inner product is called a normed space. \end{defi} \begin{lem} The Cauchy-Schwarz inequality holds \[ \forall x, y \in X: \quad \abs{\innerproduct{x}{y}} \le \norm{x}\norm{y} \] as well as the triangle inequality \[ \forall x, y \in X: \quad \norm{x + y} \le \norm{x} + \norm{y} \] \end{lem} \begin{proof} \noproof \end{proof} \begin{rem} Consider the parallelogram equality \[ \norm{x + y}^2 + \norm{x - y}^2 = 2(\norm{x}^2 + \norm{y}^2) \] The norm $\norm{x} = \sqrt{\innerproduct{x}}$ satisfies this equality (without proof). This implies that $l^p, L^p(A)$ and $C(A)$ are not inner product spaces (for $p \ne 2$). This can be shown explicitly for $l^p$. Consider the sequences \begin{align*} x &= (1, 1, 0, 0, \cdots) & y &= (1, -1, 0, 0, \cdots) \end{align*} Then $\norm{x} = \norm{y} = 2^{\frac{1}{p}}$ and $\norm{x + y} = \norm{x - y} = 2$. Thus the parallelogram equality doesn't hold \[ \norm{x + y}^2 + \norm{x - y}^2 = 2^2 + 2^2 \ne 2(2^{\frac{2}{p}} + 2^{\frac{2}{p}}) = 2(\norm{x}^2 + \norm{y}^2) \] unless $p = 2$. \end{rem} \begin{lem} Let $x_n \rightarrow x$ and $y_n \rightarrow y$ in $X$. Then $\innerproduct{x_n}{y_n} \rightarrow \innerproduct{x}{y}$. \end{lem} \begin{proof} \begin{equation} \begin{split} \abs{\innerproduct{x_n}{y_n} - \innerproduct{x}{y}} &= \abs{(\innerproduct{x_n}{y_n} - \innerproduct{x_n}{y}) + (\innerproduct{x_n}{y} - \innerproduct{x}{y})} \\ &\le \abs{\innerproduct{x_n}{y_n} - \innerproduct{x_n}{y}} + \abs{\innerproduct{x_n}{y} - \innerproduct{x}{y}} \\ &= \abs{\innerproduct{x_n}{y_n - y}} + \abs{\innerproduct{x_n - x}{y}} \\ &\le \norm{x_n} \norm{y_n - y} + \norm{x_n - x}\norm{y} \conv{} 0 \end{split} \end{equation} \end{proof} \begin{defi} An inner product space $X$ that is complete in the norm generated by the inner product is said to be a Hilbert space. A Hilbert space is a Banach space. A subspace $Y$ or an inner product space $X$ is defined to be a vector subspace of $X$, with the inner product restricted to $Y \times Y$. \end{defi} \begin{thm} Let $Y$ be a subspace of a Hilbert space $H$. Then \begin{enumerate}[(i)] \item $Y$ is complete $\iff$ $Y$ is closed in $H$ \item $Y$ is finite-dimensional $\implies$ $Y$ is complete \item $H$ is separable $\iff$ $Y$ is separable \end{enumerate} (A set $X$ is separable if $\exists M \subset X$ such that $M$ is dense in $X$) \end{thm} \begin{proof} \noproof \end{proof} \begin{defi} An element $x \in X$ is said to be orthogonal to an element $y \in X$ if $\innerproduct{x}{y} = 0$. One also says that $x$ and $y$ are orthogonal in that case, and it is denoted as $x \perp y$. Similarly, let $A, B \subset X$. Then \begin{align*} x \perp A &\iff \forall a \in A: \quad x \perp a \\ A \perp B &\iff \forall a \in A ~\forall b \in B: \quad a \perp b \end{align*} Let $M$ be a non-empty subset of $X$, then the distance between $x$ and $M$ is defined as \[ \delta = \inf_{y \in M} \norm{x - y} \] A subset $M \subset X$ is said to be convex if \[ \forall x, y \in M ~\forall \alpha \in [0, 1]: \quad (\alpha x + (1 - \alpha) y) \in M \] \end{defi} \begin{thm}\label{thm:17.11} Let $X$ be an inner product space and $M$ a non-empty, complete, convex subset of $X$. Then for every $x \in X$ there exists a unique $y \in M$ such that \[ \delta = \inf_{\tilde{y} \in M} \norm{x - \tilde{y}} = \norm{x - y} \] \end{thm} \begin{hproof} Consider a sequence $\anyseqdef[y]{M}$ such that $\delta_n = \norm{x - y_n} \conv{n \rightarrow \infty} \delta$. If we can show that this is a Cauchy sequence in $M$, then we can be sure that such a $y \in M$ exists and $y_n \conv{n \rightarrow \infty} y$. \end{hproof} \begin{cor}\label{cor:17.12} If $M = Y$, where $Y$ is a complete subspace of $X$ and $x \in X$ is fixed, then $z = x - y$ is orthogonal to $Y$. \end{cor} \begin{defi} Let $H$ be a Hilbert space and $Y$ a closed subspace of $H$. Then the set \[ Y^{\perp} = \set[z \perp Y]{z \in H} \] is the orthogonal complement of $Y$, which is a vector subspace of $H$. \end{defi} \begin{thm} Let $Y$ be a complete subspace of $X$. Then \[ \forall x \in X ~\exists! y \in Y, z \in Y^{\perp}: \quad x = y + z \] \end{thm} \begin{proof} The existence of $y$ and $z$ are ensured by \Cref{thm:17.11} and \Cref{cor:17.12}, if we choose a $y \in Y$ such that \begin{equation} \inf_{\tilde{y} \in Y} \norm{x - \tilde{y}} = \norm{x - y} \end{equation} and $z = x - y$. Then $z \in Y^{\perp}$, so \begin{equation} x = y + x - y = y + z \end{equation} To show that $y$ and $z$ are unique, assume that $x = y + z = y_1 + z_1$ with $y,y_1 \in Y$ and $z, z_1 \in Y^{\perp}$. Then $Y \ni y - y_1 = z_1 - z \in Y^{\perp}$ and \begin{equation} \innerproduct{y - y_1}{z_1 - z} = \innerproduct{y - y_1}{y - y_1} = 0 \end{equation} since $Y \perp Y^{\perp}$. This implies $y_1 = y$, and also $z_1 = z$. \end{proof} \begin{defi} A vector space $X$ is said to be adirect sum of two subspaces $Y$ and $Z$ of $X$, if \[ \forall x \in X ~\exists! y \in Y, z \in Z: \quad x = y + z \] It is notated as $X = Y \oplus Z$. \end{defi} \begin{rem} Let $Y$ be a closed subspace. Then $X = Y \oplus Y^{\perp}$. \end{rem} \begin{defi} An orthogonal set $M$ in $X$ is a subset of $X$ whose elements are pairwise orthogonal \[ \forall x, y \in M, ~x \ne y: \quad \innerproduct{x}{y} = 0 \] An orthogonal set $M$ is said to be orthonormal if \[ \innerproduct{x}{y} = \begin{cases} 1, & x = y \\ 0, & x \ne y \end{cases} \] \end{defi} \begin{eg} \begin{enumerate}[(i)] \item The sets \begin{align*} M &= \set{(1, 0, 0), (0, 1, 0), (0, 0, 1)} \\ M &= \set{\left(\rec{\sqrt{2}}, \rec{\sqrt{2}}, 0 \right), \left(\rec{\sqrt{2}}, -\rec{\sqrt{2}}, 0 \right), (0, 0, 1)} \end{align*} are orthonormal in $X = \realn^3$ \item Let $X = l^2$. Then the set $M = \set[n > 0]{e_n}$ (with $e_1 = (1, 0, 0, \cdots), e_2 = (0, 1, 0, \cdots)$ and so on) is an orthonormal set \item Let $X = L^2([0, 2\pi])$. Then the sets $M = \set[n \ge 0]{e_n}$ with \[ e_0(t) = \rec{\sqrt{2\pi}}, \quad e_n(t) = \frac{\cos nt}{\sqrt{\pi}} \] and $M = \set[n > 0]{e_n}$ with \[ e_n(t) = \frac{\sin nt}{\sqrt{\pi}} \] are orthonormal sets \end{enumerate} \end{eg} \begin{rem} Let $M = \set{e_1, \cdots, e_n}$ be a basis in $X$. Then \[ \forall x \in X ~\exists! \alpha_1, \cdots, \alpha_n: \quad x = \alpha_1 e_1 + \cdots + \alpha_n e_n \] If $M$ is orthonormal, i.e. $\innerproduct{e_k}{e_l} = \delta_{kl}$, then \begin{align*} \innerproduct{x}{e_k} &= \innerproduct{\alpha_1 e_1 + \cdots + \alpha_k e_k + \cdots + \alpha_n e_n}{e_k} \\ &= \alpha_1 \innerproduct{e_1}{e_k} + \cdots + \alpha_k \innerproduct{e_k}{e_k} + \cdots + \alpha_n \innerproduct{e_n}{e_k} = \alpha_k \end{align*} \end{rem} \begin{rem} The idea of the previous remark can be extended to infinite-dimensional inner product spaces. Let $\set{e_1, \cdots, e_n}$ be an orthonormal set in an infinite-dimensional space $X$. With some $x \in X$, consider \[ y := \sum_{k=1}^n \innerproduct{x}{e_k} e_k, \quad z := x - y \] By applying the Pythagorean theorem we get \begin{align*} \innerproduct{z}{y} &= \innerproduct{x - y}{y} \innerproduct{x}{y} - \innerproduct{y}{y} = \innerproduct{x}{\sum_{k=1}^n \innerproduct{x}{e_k} e_k} - \norm{\sum_{k=1}^n \innerproduct{x}{e_k} e_k}^2 \\ &= \sum_{k=1}^n \conj{\innerproduct{x}{e_k}} \innerproduct{x}{e_k} - \sum_{k=1}^n \norm{\innerproduct{x}{e_k} e_k}^2 = \sum_{k=1}^n \abs{\innerproduct{x}{e_k}}^2 - \sum_{k=1}^n \abs{\innerproduct{x}{e_k}}^2 \norm{e_k}^2 = 0 \end{align*} Again, by using the Pythagorean theorem it appears that \[ \norm{x}^2 = \norm{y}^2 + \norm{z}^2 \ge \norm{y}^2 = \sum_{k=1}^n \abs{\innerproduct{x}{e_k}}^2 \] \end{rem} \begin{thm}[Bessel Inequality] Let $\set[k > 0]{e_k}$ be an orthonormal sequence in an inner product space $X$. Then \[ \forall x \in X: \quad \sum_{k=1}^{\infty} \abs{\innerproduct{x}{e_k}}^2 \le \norm{x}^2 \] \end{thm} \begin{rem} Let $\set[n > 0]{x_n}$ be linearly independent. We want to construct a set $\set[n > 0]{e_n}$ with the property \[ \forall n > 0: \quad \spn\set{x_1, \cdots, x_n} = \spn\set{e_1, \cdots, x_n} \] This can be achieved using the Gram-Schmidt procedure: \[ e_1 := \frac{x_1}{\norm{x_1}} \] \[ v_2 := x_2 - \innerproduct{x_2}{e_1} e_1, \quad e_2 := \frac{v_2}{\norm{v_2}} \] and in general \[ v_n := x_n - \sum_{k=1}^{n-1} \innerproduct{x_n}{e_k} e_k, \quad e_n := \frac{v_n}{\norm{v_n}} \] \end{rem} \begin{thm} Let $\set[k > 0]{e_k}$ be an orthonormal set in a Hilbert space $H$. Then the series \[ \sum_{k=1}^{\infty} \alpha_k e_k, \quad \alpha_k \in \field \] converges in $H$ if and only if \[ \sum_{k=1}^{\infty} \abs{\alpha_k}^2 < \infty \] If the initial sequence converges and \[ x := \sum_{k=1}^{\infty} \alpha_k e_k \] then $\alpha_k = \innerproduct{x}{e_k}$. For every $x \in H$ the series \[ \sum_{k=1}^{\infty} \innerproduct{x}{e_k} e_k \] converges, but not necessarily to $x$. \end{thm} \begin{proof} Proving that the sequence in question converges if and only if $\sum_{k=1}^{\infty} \abs{\alpha_k}^2$ converges is equivalent to proving that $S_n = \alpha_1 e_1 + \cdots + \alpha_n e_n$ is Cauchy sequence if and only if $R_n = \abs{\alpha_1}^2 + \cdots + \abs{\alpha_n}^2$ is a Cauchy sequence. We can compute for $n < m$ \begin{equation} \norm{S_m - S_n}^2 = \norm{\alpha_{n+1} e_{n+1} + \cdots + \alpha_m e_m}^2 = \abs{\alpha_{n+1}}^2 + \cdots + \abs{\alpha_m}^2 = R_m - R_n \end{equation} This does prove that $(S_n)$ is a Cauchy sequence in $H$ if and only if $(R_n)$ is a Cauchy sequence in $\realn$. Now we want to prove the second statement. For this, let $x \in \sum_{k=1}^{\infty} \alpha_k e_k$. We can compute for $k \le n$ that $\innerproduct{S_n}{e_k} = \alpha_k$. Since $S_n \conv{n \rightarrow \infty} x$ by the continuity of the inner product, it follows that \begin{equation} \alpha_k = \innerproduct{S_n}{e_k} \conv{n \rightarrow \infty} \innerproduct{x}{e_k} \end{equation} The final statement follows from the Bessel inequality: \begin{equation} \sum_{k=1}^{\infty} \abs{\innerproduct{x}{e_k}}^2 \le \norm{x}^2 \implies \sum_{k=1}^{\infty} \abs{\innerproduct{x}{e_k}}^2 < \infty \implies \sum_{k=1}^{\infty} \innerproduct{x}{e_k} e_k < \infty \end{equation} \end{proof} \begin{defi}[Total Orthonormal Sets] A set $M \subset X$ is said to be a total orthonormal set, if $\closure{\spn{M}} = X$. Or in other words if $\spn{M}$ is dense in $X$. A total orthonormal family in $X$ is called an orthonormal basis. \end{defi} \begin{thm} In every Hilbert space $H$ there exists a total orthonormal set. \end{thm} \begin{proof} \noproof \end{proof} \begin{thm}[Parseval Equality] Let $M$ be an orthonormal set in a Hilbert space $H$. Then $M$ is total in $H$ if and only if \[ \forall x \in H: \quad \sum_k \abs{\innerproduct{x}{e_k}}^2 = \norm{x}^2 \] \end{thm} \begin{proof} \noproof \end{proof} \begin{thm} Let $H$ be a Hilbert space. Then \begin{enumerate}[(i)] \item If $H$ is separable, then every orthonormal set in $H$ is countable \item If $H$ contains a total orthonormal sequence, then $H$ is separable \end{enumerate} \end{thm} \begin{eg}[Examples of Orthonormal bases] \begin{enumerate}[(i)] \item Legendre Polynomials Consider the space $L^2([-1, 1])$, which is separable and is the space of all real-valued functions $x$ with the domain $[-1, 1]$, such that \[ \int_{-1}^1 \abs{x(t)}^2 \dd{t} < \infty \] We want to find an orthonormal basis of functions for this space. For that we will consider the linearly independent set of polynomials $M = \set[n \ge 0]{x_n}$, where $x_n(t) = t^n, ~t \in [-1, 1]$. Then $\closure{\spn{M}} = L^2([-1, 1])$, so $M$ is a total set. However it is not orthonormal because \[ \innerproduct{x_k}{x_k} = \int_{-1}^1 t^k t^l \dd{t} = \int_{-1}^1 t^{k+l} \ne 0 \] if $k + l$ is even. However we can use the Gram-Schmidt procedure to find an orthonormal set with the same span: \[ e_n(t) = \sqrt{\frac{2n + 1}{2}} P_n(t), \quad P_n(t) = \rec{2^n n!} \dv[n]{t} (t^2 - 1)^n \] These $P_n(t)$ are called the (unassociated) Legendre polynomials. The set $\set[n \ge 0]{e_n}$ constructed in this way is an orthonormal basis in $L^2([-1, 1])$: \[ x = \sum_{n=0}^{\infty} \innerproduct{x}{e_n} e_n, \quad \forall x \in L^2([-1, 1]) \] \item Hermite Polynomials Consider $L^2(\realn)$. We can see that $t^n \not\in L^2(\realn)$ because \[ \int_{\realn} \abs{t^n}^2 \dd{t} = \infty \] Instead, consider $M = \set[n \ge 0]{x_n}$ with \[ x_n(t) = t^n e^{-\frac{t^2}{2}}, \quad t \in \realn \] After normalizing these functions we find \[ e_n(t) = \rec{\sqrt{2^n n! \sqrt{n}}} e^{-\frac{t^2}{2}} H_n(t), \quad H_n(t) = (-1)^n e^{t^2} \dv[n]{t} e^{-t^2} \] where $H_n(t)$ are the Hermite polynomials. The set $\set[n \ge 0]{e_n}$ is an orthonormal basis in $L^2(\realn)$. \item Laguerre Polynomials Consider $L^2([0, \infty))$ and $M = \set[n \ge 0]{x_n}$ with \[ x_n(t) = t^n e^{-\frac{t^2}{2}}, \quad t \ge 0 \] Then we can find \[ e_n(t) = e^{-\frac{t}{2}} L_n(t), \quad L_n(t) = \frac{e^t}{n!} \dv[n]{t} (t^n e^{-t}) \] where $L_n(t)$ are called the Laguerre polynomials. The set $\set[n \ge 0]{e_n}$ is an orthonormal basis in $L^2([0, \infty))$ \end{enumerate} \end{eg} \end{document}