diff --git a/ma1522/1522 Notes.pdf b/ma1522/1522 Notes.pdf index dc3bd31..cc71dee 100644 Binary files a/ma1522/1522 Notes.pdf and b/ma1522/1522 Notes.pdf differ diff --git a/ma1522/1522 Notes.tex b/ma1522/1522 Notes.tex index aabd79d..e13cf01 100644 --- a/ma1522/1522 Notes.tex +++ b/ma1522/1522 Notes.tex @@ -54,6 +54,34 @@ \hr \input{lec_06.tex} \newpage +\section{Lecture 7} +\hr +\input{lec_07.tex} +\newpage +\section{Lecture 8} +\hr +\input{lec_08.tex} +\newpage +\section{Lecture 9} +\hr +\input{lec_09.tex} +\newpage +\section{Lecture 10} +\hr +\input{lec_10.tex} +\newpage +\section{Lecture 11} +\hr +\input{lec_11.tex} +\newpage +\section{Lecture 12} +\hr +\input{lec_12.tex} +\newpage +\section{Lecture 13} +\hr +\input{lec_13.tex} +\newpage \section{Reference} diff --git a/ma1522/lec_04.tex b/ma1522/lec_04.tex index 5187958..bedf46b 100644 --- a/ma1522/lec_04.tex +++ b/ma1522/lec_04.tex @@ -63,6 +63,7 @@ \item \textbf{Upper Triangular} if all entries \textbf{below} diagonal are zero. \subitem $A = (a_{ij})_{n \times n}$ is upper triangular $\iff a_{ij} = 0 \text{ if } i > j$ \item \textbf{Lower Triangular} if all entries \textbf{above} diagonal are zero. + \label{def:ltm} \subitem $A = (a_{ij})_{n \times n}$ is lower triangular $\iff a_{ij} = 0 \text{ if } i < j$ \subitem if Matrix is both Lower and Upper triangular, its a Diagonal Matrix. \end{itemize} diff --git a/ma1522/lec_07.tex b/ma1522/lec_07.tex new file mode 100644 index 0000000..695f449 --- /dev/null +++ b/ma1522/lec_07.tex @@ -0,0 +1,107 @@ +\begin{theorem} Main Theorem for Invertible Matrices \\ + Let $A$ be a square matrix. Then the following are equivalent + \begin{enumerate} + \item $A$ is an invertible matrix. + \item Linear System $Ax = b$ has a unique solution + \item Linear System $Ax = 0$ has only the trivial solution + \item RREF of $A$ is $I$ + \item A is the product of elementary matrices + \end{enumerate} +\end{theorem} + +\begin{theorem} Find Inverse + \begin{itemize} + \item Let $A$ be an invertible Matrix. + \item RREF of $(A | I)$ is $(I | A^{-1})$ + \end{itemize} + + How to identify if Square Matrix is invertible? + + \begin{itemize} + \item Square matrix is invertible + \subitem $\iff$ RREF is $I$ + \subitem $\iff$ All columns in its REF are pivot + \subitem $\iff$ All rows in REF are nonzero + \item Square matrix is singular + \subitem $\iff$ RREF is \textbf{NOT} $I$ + \subitem $\iff$ Some columns in its REF are non-pivot + \subitem $\iff$ Some rows in REF are zero. + \item $A$ and $B$ are square matrices such that $AB = I$ + \subitem then $A$ and $B$ are invertible + \end{itemize} +\end{theorem} + +\begin{defn}[LU Decomposition with Type 3 Operations]\ \\ + \begin{itemize} + \item Type 3 Operations: $(R_i + cR_j, i > j)$ + \item Let $A$ be a $m \times n$ matrix. Consider Gaussian Elimination $A \dashrightarrow R$ + \item Let $R \dashrightarrow A$ be the operations in reverse + \item Apply the same operations to $I_m \dashrightarrow L$. Then $A = LR$ + \item $L$ is a \hyperref[def:ltm]{lower triangular matrix} with 1 along diagonal + \item If $A$ is square matrix, $R = U$ + \end{itemize} + + Application: + \begin{itemize} + \item $A$ has LU decomposition $A = LU$, $Ax = b$ i.e., $LUx = b$ + \item Let $y = Ux$, then it is reduced to $Ly = b$ + \item $Ly = b$ can be solved with forward substitution. + \item $Ux = y$ is the REF of A. + \item $Ux = y$ can be solved using backward substitution. + + \end{itemize} +\end{defn} + +\begin{defn}[LU Decomposition with Type II Operations]\ \\ + \begin{itemize} + \item Type 2 Operations: $(R_i \leftrightarrow R_j)$, where 2 rows are swapped + \item $A \xrightarrow[]{E_1} \bullet \xrightarrow[]{E_2}\bullet \xrightarrow[E_3]{R_i \iff R_j}\bullet \xrightarrow[]{E_4}\bullet \xrightarrow[]{E_5} R$ + \item $A = E^{-1}_1E^{-1}_2E^{}_3E^{-1}_4E^{-1}_5R$ + \item $E_3A = (E_3E^{-1}_1E^{-1}_2E_3)E^{-1}_4E^{-1}_5R$ + \item $P = E_3, L = (E_3E^{-1}_1E^{-1}_2E_3)E^{-1}_4E^{-1}_5, R = U$, $PA = LU$ + + \end{itemize} +\end{defn} + +\begin{defn}[Column Operations]\ \\ + \begin{itemize} + \item Pre-multiplication of Elementary matrix $\iff$ Elementary row operation + \subitem $A \to B \iff B = E_1E_2...E_kA$ + \item Post-Multiplication of Elementary matrix $\iff$ Elementary Column Operation + \subitem $A \to B \iff B = AE_1E_2...E_k$ + \item If $E$ is obtained from $I_n$ by single elementary column operation, then + \subitem $I \xrightarrow[]{kC_i}E \iff I \xrightarrow[]{kR_i}E$ + \subitem $I \xrightarrow[]{C_i \leftrightarrow C_j}E \iff I \xrightarrow[]{R_i \leftrightarrow R_j}E$ + \subitem $I \xrightarrow[]{C_i + kC_j}E \iff I \xrightarrow[]{R_j + kR_i}E$ + \end{itemize} +\end{defn} + +\subsection{Determinants} + +\begin{defn}[Determinants of $2 \times 2$ Matrix]\ \\ + \begin{itemize} + \item Let $A = \begin{pmatrix} a & b \\ c & d \end{pmatrix}$ + \item $\det(A) = |A| = ad - bc$ + \item $\det(I_2) = 1$ + \item $A \xrightarrow{cR_i} B \implies \det(B) = c\det(A)$ + \item $A \xrightarrow{R_1 \leftrightarrow R_2} B \implies \det(B) = -\det(A)$ + \item $A \xrightarrow{R_i + cR_j} B \implies \det(B) = \det(A), i \neq j$ + \end{itemize} + Solving Linear equations with determinants for $2 \times 2$ + \begin{itemize} + \item $x_1 = + \dfrac{\begin{vmatrix} b_1 & a_{12} \\ b_2 & a_{22} \end{vmatrix}} + {\begin{vmatrix}a_{11} & a_{12} \\ a_{21} & a_{22} \end{vmatrix}}$, $x_2 = + \dfrac{\begin{vmatrix} a_{11} & b_1 \\ a_{21} & b_2 \end{vmatrix}} + {\begin{vmatrix}a_{11} & a_{12} \\ a_{21} & a_{22} \end{vmatrix}}$ + \end{itemize} +\end{defn} + +\begin{defn}[Determinants of $3 \times 3$ Matrix]\ \\ + \begin{itemize} + \item Suppose $A$ is invertible, then there exists EROs such that + \item $A \xrightarrow{ero_1} A_1 \rightarrow ... \rightarrow A_{k-1} \xrightarrow{ero_k}A_k = I$ + \item Then $\det(A)$ can be evaluated backwards. + \subitem E.g. $A \xrightarrow{R_1 \leftrightarrow R_3} \bullet \xrightarrow{3R_2} \bullet \xrightarrow{R_2 + 2R_4} I \implies det(A) = 1 \to 1 \to \frac{1}{3} \to -\frac{1}{3}$ + \end{itemize} +\end{defn}