\section{The Adjoint of a Linear Operator} \begin{definition}\label{Definition 6.16} \hfill\\ For a linear operator $T$ on an inner product space $V$, we define a related linear operator on $V$ called the \textbf{adjoint} of $T$, whose matrix representation with respect to any orthonormal basis $\beta$ for $V$ is $[T]_\beta^*$. \end{definition} \begin{theorem} \hfill\\ Let $V$ be a finite-dimensional inner product space over $\F$, and let $\mathsf{g}: V \to \F$ be a linear transformation. Then there exists a unique vector $y \in V$ such that $\mathsf{g}(x) = \lr{x,y}$ for all $x \in V$. \end{theorem} \begin{theorem} \hfill\\ Let $V$ be a finite-dimensional inner product space, and let $T$ be a linear operator on $V$. Then there exists a unique function $T^*: V \to V$ such that $\lr{T(x), y} = \lr{x, T^*(y)}$ for all $x,y \in V$. Furthermore, $T^*$ is linear. \end{theorem} \begin{remark} \hfill\\ \textbf{Important Note:} For the remainder of this chapter we adopt the convention that a reference to the adjoint of a linear operator on an infinite-dimensional inner product space assumes its existence. \end{remark} \begin{theorem} \hfill\\ Let $V$ be a finite-dimensional inner product space, and let $\beta$ be an orthonormal basis for $V$. If $T$ is a linear operator on $V$, then \[[T^*]_\beta = [T]_\beta^*.\] \end{theorem} \begin{corollary} \hfill\\ Let $A$ be an $n \times n$ matrix. Then $L_{A^*} = (L_A)^*$. \end{corollary} \begin{theorem} \hfill\\ Let $V$ be an inner product space, and let $T$ and $U$ be linear operators on $V$. Then \begin{enumerate} \item $(T+U)^* = T^* + U^*$; \item $(cT)^* = \overline{c}T^*$ for any $c \in \F$; \item $(TU)^* = U^*T^*$; \item $T^{**} = T$; \item $I^* = I$. \end{enumerate} \end{theorem} \begin{corollary} \hfill\\ Let $A$ and $B$ be $n \times n$ matrices. Then \begin{enumerate} \item $(A + B)^* = A^* + B^*$; \item $(cA)^* = \overline{c}A^*$ for all $c \in \F$; \item $(AB)^* = B^*A^*$; \item $A^{**} = A$; \item $I^* = I$. \end{enumerate} \end{corollary} \begin{lemma} \hfill\\ Let $A \in M_{m \times n}(\F), x \in F^n$, and $y \in F^m$. Then \[\lr{Ax,y}_m = \lr{x,A^*y}_n.\] \end{lemma} \begin{lemma} \hfill\\ Let $A \in M_{m \times n}(\F)$. Then $\rank{A^*A} = \rank{A}$. \end{lemma} \begin{corollary} \hfill\\ If $A$ is an $m \times n$ matrix such that $\rank{A} = n$, then $A^*A$ is invertible. \end{corollary} \begin{theorem} \hfill\\ Let $A \in M_{m \times n}(\F)$ and $y \in F^m$. Then there exists $x_0 \in F^n$ such that $(A^*A)x_0 = A^*y$ and $||Ax_0 = y|| \leq ||Ax - y||$ for all $x \in F^n$. Furthermore, if $\rank{A} = n$, then $x_0 = (A^*A)^{-1}A^*y$. \end{theorem} \begin{definition} \hfill\\ A solution $s$ to a system of linear equations $Ax = b$ is called a \textbf{minimal solution} if $||s|| \leq ||u||$ for all other solutions $u$. \end{definition} \begin{theorem} \hfill\\ Let $A \in M_{m \times n}(\F)$ and $b \in F^m$. Suppose that $Ax = b$ is consistent. Then the following statements are true. \begin{enumerate} \item There exists exactly one minimal solution $s$ of $Ax = b$, and $x \in \range{L_{A^*}}$. \item The vector $s$ is the only solution to $Ax = b$ that lies in $\range{L_{A^*}}$; that is, if $u$ satisfies $(AA^*)u = b$, then $s = A^*u$. \end{enumerate} \end{theorem} \begin{definition} \hfill\\ Let $T: V \to W$ be a linear transformation, where $V$ and $W$ are finite-dimensional inner product spaces with inner products $\lr{\cdot, \cdot}_1$ and $\lr{\cdot, \cdot}_2$, respectively. A function $T^*: W \to V$ is called an \textbf{adjoint} of $T$ if $\lr{T(x),y}_2 = \lr{x,T^*{y}}_1$ for all $x \in V$ and $y \in W$.\\ This is an extension of the definition of the \textit{adjoint} of a linear operator (\autoref{Definition 6.16}) \end{definition} \begin{definition} \hfill\\ In physics, \textit{Hooke's law} states that (within certain limits) there is a linear relationship between the length $x$ of a spring and the force $y$ applied to (or exerted by) the spring. That is, $y = cx + d$, where $c$ is called the \textbf{spring constant}. \end{definition}