Created the Real Analysis Theorems and Definitions packet

This commit is contained in:
2024-01-09 13:01:34 -07:00
commit d19e454f27
65 changed files with 3114 additions and 0 deletions
+30
View File
@@ -0,0 +1,30 @@
\section{Absolute Convergence}
\begin{definition}
Let $X:=(x_n)$ be a sequence in $\R$. We say that the series $\sum x_n$ is \textbf{absolutely convergent} if the series $\sum |x_n|$ is convergent in $\R$. A series is said to be \textbf{conditionally} ( or \textbf{nonabsolutely}) \textbf{convergent} if it is convergent, but it is not absolutely convergent.
\end{definition}
\begin{theorem}
If a series in $\R$ is absolutely convergent, then it is convergent.
\end{theorem}
\begin{theorem}
If a series $\sum x_n$ is convergent, then any series obtained from it by grouping the terms is also convergent and to the same value.
\end{theorem}
\begin{definition}
A series $\sum y_k$ in $\R$ is a \textbf{rearrangement} of a series $\sum x_n$ if there is a bijection $f$ of $\N$ onto $\N$ such that $y_k=x_{f(k)}$ for all $k \in \N$.
\end{definition}
\begin{theorem}[\textbf{Rearrangement Theorem}]
Let $\sum x_n$ be an absolutely convergent series in $\R$. Then any rearrangement $\sum y_k$ of $\sum x_n$ converges to the same value.
\end{theorem}
\begin{theorem}
If $\sum a_n$ is conditionally convergent, then there exists a rearrangement of $\sum a_n$ such that
\begin{enumerate}
\item The rearrangement converges to any real number $\alpha$
\item The rearrangement diverges to $\pm \infty$
\item The rearrangement oscillates between any two real numbers.
\end{enumerate}
\end{theorem}
+5
View File
@@ -0,0 +1,5 @@
\chapter{Infinite Series}
\subimport{./}{absolute-convergence.tex}
\subimport{./}{tests-for-absolute-convergence.tex}
\subimport{./}{tests-for-nonabsolute-convergence.tex}
\subimport{./}{series-of-functions.tex}
+100
View File
@@ -0,0 +1,100 @@
\section{Series of Functions}
\begin{definition}
If $(f_n)$ is a sequence of functions defined on a subset $D$ of $\R$ with values in $\R$, the sequence of \textbf{partial sums} $(s_n)$ of the infinite series $\sum f_n$ is defined for $x$ in $D$ by
\begin{align*}
s_1(x) & :=f_1(x) \\
s_2(x) & :=s_1(x)+f_2(x) \\
\dots\dots\dots & \dots\dots\dots\dots\dots\dots \\
s_{n+1}(x) & :=s_n(x)+f_{n+1}(x) \\
\dots\dots\dots & \dots\dots\dots\dots\dots\dots
\end{align*}
In case the sequence $(s_n)$ of functions converges on $D$ to a function $f$, we say that the infinite series of functions $\sum f_n$ \textbf{converges} to $f$ on $D$. We will often write
\[\sum f_n\ \ \text{or}\ \ \sum\limits_{n=1}^{\infty}f_n\]
to denote either the series or the limit function, when it exists.
\\\\If the series $\sum |f_n(x)|$ converges for each $x$ in $D$, we say that $\sum f_n$ is \textbf{absolutely convergent} on $D$. If the sequence $(s_n)$ of partial sums is uniformly convergent on $D$ to $f$, we say that $\sum f_n$ is \textbf{uniformly convergent} on $D$, or that it \textbf{converges to $f$ uniformly on $D$}.
\end{definition}
\begin{theorem}
If $f_n$ is continuous on $D \subseteq \R$ to $\R$ for each $n \in \N$ and if $\sum f_n$ converges to $f$ uniformly on $D$, then $f$ is continuous on $D$.
\end{theorem}
\begin{theorem}
Suppose that the real-valued functions $f_n,\ n \in \N$ are Riemann integrable on the interval $J:=[a,b]$. If the series $\sum f_n$ converges to $f$ uniformly on $J$, then $f$ is Riemann integrable and
\[\displaystyle\int_{a}^{b}f=\sum\limits_{n=1}^{\infty}\displaystyle\int_{a}^{b}f_n\]
\end{theorem}
\begin{theorem}
For each $n \in \N$, let $f_n$ be a real-valued function on $J:=[a,b]$ that has a derivative $f'_n$ on $J$. Suppose that the series $\sum f_n$ converges for at least one point of $J$ and that the series of derivatives $\sum f'_n$ converges uniformly on $J$.
\\Then there exists a real-valued function $f$ on $J$ such that $\sum f_n$ converges uniformly on $J$ to $f$. In addition, $f$ has a derivative on $J$ and $f'=\sum f'_n$.
\end{theorem}
\begin{theorem}[\textbf{Cauchy Criterion}]
Let $(f_n)$ be a sequence of functions on $D\subseteq \R$ to $\R$. The series $\sum f_n$ is uniformly convergent on $D$ if and only if for every $\varepsilon >0$ there exists an $M(\varepsilon)$ such that if $m>n\geq M(\varepsilon)$, then
\[|f_{n+1}(x)+\dots+f_m(x)|<\varepsilon\ \forall\ x \in D\]
\end{theorem}
\begin{theorem}[\textbf{Weierstrass M-Test}]
Let $(M_n)$ be a sequence of positive real numbers such that $|f_n(x)|\leq M_n$ for $x \in D$, $n \in \N$. If the series $\sum M_n$ is convergent, then $\sum f_n$ is uniformly convergent on $D$, $\sum |f_n|$ is uniformly convergent on $D$, and $\sum f_n$ is absolutely convergent on $D$.
\end{theorem}
\begin{definition}
A series of real functions $\sum f_n$ is said to be a \textbf{power series around $x=c$} if the function $f_n$ has the form
\[f_n(x)=a_n(x-c)^n\]
where $a_n$ and $c$ belong to $\R$ and where $n=0,1,2,\dots$.
\end{definition}
\begin{definition}
Let $\sum a_nx^n$ be a power series. If the sequence $(|a_n|^{1/n})$ is bounded, we set $\rho := \lim\sup(|a_n|^{1/n});$ if this sequence is not bounded we set $\rho=+\infty$. We define the \textbf{radius of convergence} of $\sum a_nx^n$ to be given by
\[R:=\begin{cases}
0 & \text{if } \rho = +\infty \\
1/\rho & \text{if } 0<\rho<+\infty \\
+\infty & \text{if } \rho=0
\end{cases}\]
The \textbf{interval of convergence} is the open interval $(-R, R)$.
\end{definition}
\begin{theorem}[\textbf{Cauchy-Hadamard Theorem}]
If $R$ is the radius of convergence of the power series $\sum a_nx^n$, then the series is absolutely convergent if $|x|<R$ and is convergent if $|x|>R$.
\end{theorem}
\begin{remark}
It will be noted that the Cauchy-Hadamard Theorem makes no statement as to whether the power series converges when $|x|=R$. Indeed, anything can happen, as the examples
\[\sum x^n,\ \sum \frac{1}{n}x^n,\ \sum \frac{1}{n^2}x^n\]
show. Since $\lim(n^{1/n})=1$, each of these power series has a radius of convergence equal to 1. The first power series converges at neither of the points $x=-1$ and $x=+1$; the second series converges at $x=-1$ but diverges at $x=+1$; and the third power series converges at both $x=-1$ and $x=+1$. (Find a power series with $R=1$ that converges at $x=+1$ but diverges at $x=-1$.)
\end{remark}
\begin{theorem}
Let $R$ be the radius of convergence of $\sum a_nx^n$ and let $K$ be a closed and bounded interval contained in the interval of convergence $(-R, R)$. Then the power series converges uniformly on $K$.
\end{theorem}
\begin{theorem}
Let $\sum a_n(x-c)^n$ be a power series. Then either
\begin{enumerate}
\item The series is absolutely convergent on $\R$
\item The series converges only at one point, $x=c$
\item There exists $R \in \R$ such that $\sum a_n(x-c)^n$ is absolutely convergent for all $|x-c|<R$, and is divergent for all $|x-c|>R$. (Note that the endpoints must be tested separately.)
\end{enumerate}
\end{theorem}
\begin{theorem}
The limit of a power series is continuous on the interval of convergence. A power series can be integrated term-by-term over any closed and bounded interval contained in the interval of convergence.
\end{theorem}
\begin{theorem}[\textbf{Differentiation Theorem}]
A power series can be differentiated term-by-term within the interval of convergence. In fact, if
\[f(x)=\sum\limits_{n=0}^{\infty}a_nx^n,\ \ \text{ then }\ \ f'(x)=\sum\limits_{n=1}^{\infty}na_nx^{n-1}\ \ \text{ for }\ \ |x|<R.\]
Both series have the same radius of convergence.
\end{theorem}
\begin{remark}
It is to be observed that the theorem makes no assertion about the endpoints of the interval of convergence. If a series is convergent at an endpoint, then the differentiated series may or may not be convergent at this point.
\end{remark}
\begin{theorem}[\textbf{Uniqueness Theorem}]
If $\sum a_nx^n$ and $\sum b_nx^n$ converge on some interval $(-r,r)$, $r>0$, to the same function $f$, then
\[a_n=b_n\ \forall\ n \in \N\]
\end{theorem}
The Taylor Series is
\[f(x)=\sum\limits_{n=0}^{\infty}\frac{f^{(n)}(c)}{n!}(x-c)^n\]
@@ -0,0 +1,72 @@
\section{Tests for Absolute Convergence}
\begin{theorem}[\textbf{Limit Comparison Test, II}]
Suppose that $X:=(x_n)$ and $Y:=(y_n)$ are nonzero real sequences and suppose that the following limit exists in $\R$:
\[r:=\lim\abs{\frac{x_n}{y_n}}\]
\begin{enumerate}
\item If $r \neq 0$, then $\sum x_n$ is absolutely convergent if and only if $\sum y_n$ is absolutely convergent.
\item If $r=0$ and if $\sum y_n$ is absolutely convergent, then $\sum x_n$ is absolutely convergent.
\end{enumerate}
\end{theorem}
\begin{theorem}[\textbf{Root Test}]
Let $X:=(x_n)$ be a sequence in $\R$.
\begin{enumerate}
\item If there exist $r \in \R$ with $r<1$ and $K \in \N$ such that
\[|x_n|^{1/n}\leq r\ \ \text{for}\ \ n \geq K,\]
then the series $\sum x_n$ is absolutely convergent.
\item If there exists $K \in \N$ such that
\[|x_n|^{1/n} \geq 1\ \ \text{for}\ \ n \geq K,\]
then the series $\sum x_n$ is divergent.
\end{enumerate}
\end{theorem}
\begin{corollary}
Let $X:=(x_n)$ be a sequence in $\R$ and suppose that the limit
\[r:=\lim |x_n|^{1/n}\]
exists in $\R$. Then $\sum x_n$ is absolutely convergent when $r < 1$ and is divergent when $r > 1$.
\end{corollary}
\begin{theorem}[\textbf{Ratio Test}]
Let $X := (x_n)$ be a sequence of nonzero real numbers.
\begin{enumerate}
\item If there exist $r \in \R$ with $0<r<1$ and $K \in \N$ such that
\[\abs{\frac{x_{n+1}}{x_n}}\leq r\ \ \text{for}\ \ n \geq K,\]
then the series $\sum x_n$ is absolutely convergent.
\item If there exists $K \in \N$ such that
\[\abs{\frac{x_{n+1}}{x_n}}\geq 1\ \ \text{for}\ \ n \geq K,\]
then the series $\sum x_n$ is divergent.
\end{enumerate}
\end{theorem}
\begin{corollary}
Let $X :=(x_n)$ be a nonzero sequence in $\R$ and suppose that the limit
\[r:=\lim\abs{\frac{x_{n+1}}{x_n}}\]
exists in $\R$. Then $\sum x_n$ is absolutely convergent when $r<1$ and is divergent when $r>1$.
\end{corollary}
\begin{theorem}[\textbf{Integral Test}]
Let $f$ be a positive, decreasing function on $\{t:t\geq 1\}$. Then the series $\sum\limits_{k=1}^{\infty}f(k)$ converges if and only if the improper integral
\[\displaystyle\int_{1}^{\infty}f(t)dt=\lim\limits_{b \to \infty}\displaystyle\int_{1}^{b}f(t)dt\]
exists. In the case of convergence, the partial sum $s_n=\sum\limits_{k=1}^{n}f(k)$ and the sum $s=\sum\limits_{k=1}^{\infty} f(k)$ satisfy the estimate
\[\displaystyle\int_{n+1}^{\infty}f(t)dt\leq s-s_n \leq \displaystyle\int_{n}^{\infty}f(t)dt\]
\end{theorem}
\begin{theorem}[\textbf{Raabe's Test}]
Let $X:=(x_n)$ be a sequence of nonzero real numbers.
\begin{enumerate}
\item If there exist numbers $a >1$ and $K \in \N$ such that
\[\abs{\frac{x_{n+1}}{x_n}}\leq 1 - \frac{a}{n}\ \ \text{for}\ \ n \geq K,\]
then $\sum x_n$ is absolutely convergent.
\item If there exist real numbers $a \leq 1$ and $K \in \N$ such that
\[\abs{\frac{x_{n+1}}{x_n}}\geq 1-\frac{a}{n}\ \ \text{for}\ \ n \geq K,\]
then $\sum x_n$ is not absolutely convergent.
\end{enumerate}
\end{theorem}
\begin{corollary}
Let $X:=(x_n)$ be a nonzero sequence in $\R$ and let
\[a:=\lim \left(n \left(1-\abs{\frac{x_{n+1}}{x_n}}\right)\right)\]
whenever this limit exists. Then $\sum x_n$ is absolutely convergent when $a > 1$ and is not absolutely convergent when $a <1$.
\end{corollary}
@@ -0,0 +1,22 @@
\section{Tests for Nonabsolute Convergence}
\begin{definition}
A sequence $X:=(x_n)$ of nonzero real numbers is said to be \textbf{alternating} if the terms $(-1)^{n+1}$, $n \in \N$, are all positive (or all negative) real numbers. If the sequence $X:=(x_n)$ is alternative, we say that the series $\sum x_n$ it generates is an \textbf{alternating series}.
\end{definition}
\begin{theorem}[\textbf{Alternating Series Test}]
Let $Z:=(z_n)$ be a decreasing sequence of strictly positive numbers with $\lim (z_n)=0$. Then the alternating series $\sum (-1)^{n+1} z_n$ is convergent.
\end{theorem}
\begin{lemma}[\textbf{Abel's Lemma}]
Let $X:=(x_n)$ and $Y:=(y_n)$ be sequences in $\R$ and let the partial sums of $\sum y_n$ be denoted by $(s_n)$ with $s_0:=0$. If $m >n$, then
\[\sum\limits_{k=n+1}^{m}x_ky_k=(x_ms_m-x_{n+1}s_n)+\sum\limits_{k=n+1}^{m-1}(x_k-x_{k+1})s_k\]
\end{lemma}
\begin{theorem}[\textbf{Dirichlet's Test}]
If $X:=(x_n)$ is a decreasing sequence with $\lim x_n=0$, and if the partial sums $(s_n)$ of $\sum y_n$ are bounded, then the series $\sum x_ny_n$ is convergent.
\end{theorem}
\begin{theorem}[\textbf{Abel's Test}]
If $X:=(x_n)$ is a convergent monotone sequence and the series $\sum y_n$ is convergent, then the series $\sum x_ny_n$ is also convergent.
\end{theorem}