447 lines
17 KiB
TeX
447 lines
17 KiB
TeX
\subsection{Introduction}
|
|
|
|
\begin{defn}[Matrix]\ \\
|
|
\begin{itemize}
|
|
\item $\begin{pmatrix}
|
|
a_{11} & a_{12} & ... & a_{1n} \\
|
|
a_{21} & a_{22} & ... & a_{2n} \\
|
|
\vdots \\
|
|
a_{m1} & a_{m2} & ... & a_{mn}
|
|
\end{pmatrix}$
|
|
\item $m$ is no of rows, $n$ is no of columns
|
|
\item size is $m \times n$
|
|
\item $A = (a_{ij})_{m \times n}$
|
|
|
|
\end{itemize}
|
|
\end{defn}
|
|
|
|
\subsubsection{Special Matrix}
|
|
|
|
\begin{note}[Special Matrices]\ \\
|
|
\begin{itemize}
|
|
\item Row Matrix : $\begin{pmatrix} 2 & 1 & 0 \end{pmatrix}$
|
|
\item Column Matrix
|
|
\subitem $\begin{pmatrix} 2 \\ 1 \\ 0 \end{pmatrix}$
|
|
\item \textbf{Square Matrix}, $n \times n$ matrix / matrix of order $n$.
|
|
\subitem Let $A = (a_{ij})$ be a square matrix of order $n$
|
|
\item Diagonal of $A$ is $a_{11}, a_{22}, ..., a_{nn}$.
|
|
\item \textbf{Diagonal Matrix} if Square Matrix and non-diagonal entries are zero
|
|
\subitem Diagonals can be zero
|
|
\subitem \textbf{Identity Matrix} is a special case of this
|
|
\item \textbf{Square Matrix} if Diagonal Matrix and diagonal entries are all the same.
|
|
\item \textbf{Identity Matrix} if Scalar Matrix and diagonal = 1
|
|
\subitem $I_n$ is the identity matrix of order $n$.
|
|
\item \textbf{Zero Matrix} if all entries are 0.
|
|
\subitem Can denote by either $\overrightarrow{0}, 0$
|
|
\item Square matrix is \textbf{symmetric} if symmetric wrt diagonal
|
|
\subitem $A = (a_{ij})_{n \times n}$ is symmetric $\iff a_{ij} = a_{ji},\ \forall i, j$
|
|
\item \textbf{Upper Triangular} if all entries \textbf{below} diagonal are zero.
|
|
\subitem $A = (a_{ij})_{n \times n}$ is upper triangular $\iff a_{ij} = 0 \text{ if } i > j$
|
|
\item \textbf{Lower Triangular} if all entries \textbf{above} diagonal are zero.
|
|
\label{def:ltm}
|
|
\subitem $A = (a_{ij})_{n \times n}$ is lower triangular $\iff a_{ij} = 0 \text{ if } i < j$
|
|
\subitem if Matrix is both Lower and Upper triangular, its a Diagonal Matrix.
|
|
\end{itemize}
|
|
\end{note}
|
|
|
|
\subsection{Matrix Operations}
|
|
|
|
\begin{defn}[Matrix Operations]\ \\
|
|
Let $A = (a_{ij})_{m \times n}, B = (b_{ij})_{m \times n}$
|
|
\begin{itemize}
|
|
\item Equality: $B = (b_{ij})_{p \times q}$, $A = B \iff m = p \ \& \ n = q \ \& \ a_{ij} = b_{ij} \forall i,j$
|
|
\item Addition: $A + B = (a_{ij} + b_{ij})_{m \times n}$
|
|
\item Subtraction: $A - B = (a_{ij} - b_{ij})_{m \times n}$
|
|
\item Scalar Mult: $cA = (ca_{ij})_{m \times n}$
|
|
\end{itemize}
|
|
\end{defn}
|
|
|
|
\begin{defn}[Matrix Multiplication] \ \\
|
|
Let $A = (a_{ij})_{m \times p}, B = (b_{ij})_{p \times n}$
|
|
\begin{itemize}
|
|
\item $AB$ is the $m \times n$ matrix s.t. $(i,j)$ entry is $$a_{i1}b_{1j} + a_{i2}b_{2j} + ... + a_{ip}b_{pj} = \sum^p_{k=1}a_{ik}b_{kj}$$
|
|
\item No of columns in $A$ = No of rows in $B$.
|
|
\item Matrix multiplication is \textbf{NOT commutative}
|
|
\end{itemize}
|
|
\end{defn}
|
|
|
|
\begin{theorem}[Matrix Properties]\ \\
|
|
Let $A, B, C$ be $m \times p, p \times q, q \times n$ matrices
|
|
\begin{itemize}
|
|
\item Associative Law: $A(BC) = (AB)C$
|
|
\item Distributive Law: $A(B_1 + B_2) = AB_1 + AB_2$
|
|
\item Distributive Law: $(B_1 + B_2)A = B_1A + B_2A$
|
|
\item $c(AB) = (cA)B = A(cB)$
|
|
\item $A\textbf{0}_{p \times n} = \textbf{0}_{m \times n}$
|
|
\item $A\textbf{I}_{n} = \textbf{I}_{n}A = A$
|
|
\end{itemize}
|
|
\end{theorem}
|
|
|
|
|
|
\begin{defn}[Powers of Square Matricss]\ \\
|
|
Let $A$ be a $m \times n$.
|
|
|
|
$AA$ is well defined $\iff m = n \iff A$ is square.
|
|
|
|
\textbf{Definition.} Let $A$ be square matrix of order $n$. Then Powers of a are
|
|
$$
|
|
A^k = \begin{cases}
|
|
I_n & \text{if } k = 0 \\
|
|
AA...A & \text{if } k \geq 1.
|
|
\end{cases}
|
|
$$
|
|
|
|
\textbf{Properties.}
|
|
\begin{itemize}
|
|
\item $A^mA^n = A^{m+n}, (A^m)^n = A^{mn}$
|
|
\item $(AB)^2 = (AB)(AB) \neq A^2B^2 = (AA)(BB)$
|
|
\end{itemize}
|
|
\end{defn}
|
|
|
|
Matrix Multiplication Example:
|
|
|
|
\begin{itemize}
|
|
\item Let $A = \begin{pmatrix} 1 & 2 & 3 \\ 4 & 5 & 6 \end{pmatrix}$ and $B = \begin{pmatrix} 1 & 1 \\ 2 & 3 \\ -1 & -2 \end{pmatrix}$
|
|
\item Let $a_1 = \begin{pmatrix}1 & 2 & 3 \end{pmatrix}, a_2 = \begin{pmatrix}4 & 5 & 6 \end{pmatrix}$
|
|
\item $AB = \begin{pmatrix} a_1 & a_2 \end{pmatrix}B = \begin{pmatrix} a_1B \\ a_2B \end{pmatrix}$.
|
|
\item $\begin{pmatrix}
|
|
\begin{pmatrix}1 & 2 & 3 \end{pmatrix} & \begin{pmatrix} 1 & 1 \\ 2 & 3 \\ -1 & -2 \end{pmatrix} \\
|
|
\begin{pmatrix}4 & 5 & 6 \end{pmatrix} & \begin{pmatrix} 1 & 1 \\ 2 & 3 \\ -1 & -2 \end{pmatrix}
|
|
\end{pmatrix} = \begin{pmatrix}
|
|
\begin{pmatrix}2 & 1\end{pmatrix} \\
|
|
\begin{pmatrix}8 & 7\end{pmatrix} \\
|
|
\end{pmatrix}
|
|
$
|
|
\end{itemize}
|
|
|
|
\begin{note}[Representation of Linear System] \ \\
|
|
\begin{itemize}
|
|
\item $\begin{cases}
|
|
a_{11}x_1 + a_{12}x_2 + ... + a_{1n}x_n & = b_1 \\
|
|
a_{21}x_1 + a_{22}x_2 + ... + a_{2n}x_n & = b_2 \\
|
|
\vdots & \vdots \\
|
|
a_{m1}x_1 + a_{m2}x_2 + ... + a_{mn}x_n & = b_m \\
|
|
\end{cases}$
|
|
|
|
\item A = $\begin{pmatrix}
|
|
a_{11} & a_{12} & ... & a_{1n} \\
|
|
a_{21} & a_{22} & ... & a_{2n} \\
|
|
\vdots & \vdots & & \vdots \\
|
|
a_{m1} & a_{m2} & ... & a_{mn} \\
|
|
\end{pmatrix}$, Coefficient Matrix, $A_{m\times n}$
|
|
\item $x = \begin{pmatrix}
|
|
x_{1} \\
|
|
\vdots \\
|
|
x_{n} \\
|
|
\end{pmatrix}$, Variable Matrix, $x_{n \times 1}$
|
|
\item $b = \begin{pmatrix}
|
|
b_{1} \\
|
|
\vdots \\
|
|
b_{m} \\
|
|
\end{pmatrix}$, Constant Matrix, $b_{m \times 1}$. Then $Ax = b$
|
|
\item $A = (a_{ij})_{m\times n} $
|
|
\item $m$ linear equations in $n$ variables, $x_1, ..., x_n$
|
|
\item $a_{ij}$ are coefficients, $b_i$ are the constants
|
|
\item Let $u = \begin{pmatrix} u_1 \\ \vdots \\ u_n \end{pmatrix}$.
|
|
\subitem $x_1 = u_1, \hdots, x_n = u_n$ is a solution to the system
|
|
\subitem $\iff Au = b \iff u$ is a solution to $Ax = b$
|
|
\item Let $a_j$ denote the $j$th column of $A$. Then
|
|
\subitem $b = Ax = x_1a_1 + ... + x_na_n = \sum^n_{j=1}x_ja_j$
|
|
\end{itemize}
|
|
\end{note}
|
|
|
|
\begin{defn}[Transpose]\ \\
|
|
\begin{itemize}
|
|
\item Let $A = (a_{ij})_{m\times n}$
|
|
\item The transpose of $A$ is $A^T = (a_{ji})_{n \times m}$
|
|
\item $(A^T)^T = A$
|
|
\item A is symmetric $\iff A = A^T$
|
|
\item Let $B$ be $m \times n$, $(A+B)^T = A^T + B^T$
|
|
\item Let $B$ be $n \times p$, $(AB)^T = B^TA^T$
|
|
\end{itemize}
|
|
\end{defn}
|
|
|
|
\begin{defn}[Inverse]\ \\
|
|
\begin{itemize}
|
|
\item Let $A, B$ be matrices of same size
|
|
\subitem $A + X = B \implies X = B - A = B + (-A)$
|
|
\subitem $-A$ is the \textit{additive inverse} of $A$
|
|
\item Let $A_{m\times n}, B_{m\times p}$ matrix.
|
|
\subitem $AX = B \implies X = A^{-1}B$.
|
|
\end{itemize}
|
|
|
|
|
|
Let A be a \textbf{square matrix} of order $n$.
|
|
\begin{itemize}
|
|
\item If there exists a square matrix $B$ of order $N$ s.t. $AB = I_{n}$ and $BA = I_{n}$, then $A$ is \textbf{invertible} matrix and $B$ is inverse of $A$.
|
|
\item If $A$ is not invertible, A is called singular.
|
|
\item suppose $A$ is invertible with inverse $B$
|
|
\item Let $C$ be any matrix having the same number of rows as $A$.
|
|
$$\begin{aligned}
|
|
AX = C &\implies B(AX) = BC \\
|
|
&\implies (BA)X = BC \\
|
|
&\implies X = BC.
|
|
\end{aligned}$$
|
|
\end{itemize}
|
|
|
|
|
|
\end{defn}
|
|
|
|
\begin{theorem}[Properties of Inversion]\ \\
|
|
Let $A$ be a square matrix.
|
|
\begin{itemize}
|
|
\item Let $A$ be an invertible matrix, then its inverse is unique.
|
|
\item Cancellation Law: Let $A$ be an invertible matrix
|
|
\subitem $AB_1 = AB_2 \implies B_1 = B_2$
|
|
\subitem $C_1A = C_2A \implies C_1 = C_2$
|
|
\subitem $AB = 0 \implies B = 0, CA = 0 \implies C = 0$ ($A$ is invertible, A cannot be 0)
|
|
\subitem This fails if $A$ is singular
|
|
\item Let $A = \begin{pmatrix} a & b \\ c & d \end{pmatrix}$
|
|
\subitem $A$ is invertible $\iff ad - bc \neq 0$
|
|
\subitem $A$ is invertible $A^{-1} = \dfrac{1}{ad - bc} \begin{pmatrix}d & -b \\ -c & a \end{pmatrix}$
|
|
|
|
\end{itemize}
|
|
Let $A$ and $B$ be invertible matrices of same order
|
|
\begin{itemize}
|
|
\item Let $c \neq 0$. Then $cA$ is invertible, $(cA^{-1} = \frac{1}{c}A^{-1}$
|
|
\item $A^T$ is invertible, $(A^T)^{-1} = (A^{-1})^T$
|
|
\item $AB$ is invertible, $(AB)^{-1} = (B^{-1}A^{-1})$
|
|
\end{itemize}
|
|
|
|
Let $A$ be an invertible matrix.
|
|
|
|
\begin{itemize}
|
|
\item $A^{-k} = (A^{-1})^k$
|
|
\item $A^{m+n} = A^mA^n$
|
|
\item $(A^m)^n = A^{mn}$
|
|
\end{itemize}
|
|
|
|
|
|
\end{theorem}
|
|
|
|
\begin{defn}[Elementary Matrices] If it can be obtained from $I$ by performing single elementary row operation
|
|
\begin{itemize}
|
|
\item $cRi, c \neq 0: \begin{pmatrix}
|
|
1 & 0 & 0 & 0 \\
|
|
0 & 1 & 0 & 0 \\
|
|
0 & 0 & c & 0 \\
|
|
0 & 0 & 0 & 1
|
|
\end{pmatrix}(cR_3)$
|
|
\item $R_i \leftrightarrow R_j, i \neq j,: \begin{pmatrix}
|
|
1 & 0 & 0 & 0 \\
|
|
0 & 0 & 0 & 1 \\
|
|
0 & 0 & 1 & 0 \\
|
|
0 & 1 & 0 & 0
|
|
\end{pmatrix}(R_2 \leftrightarrow R_4)$
|
|
\item $R_i + cR_j, i \neq j,: \begin{pmatrix}
|
|
1 & 0 & 0 & 0 \\
|
|
0 & 1 & 0 & c \\
|
|
0 & 0 & 1 & 0 \\
|
|
0 & 0 & 0 & 1
|
|
\end{pmatrix}(R_2 + cR_4)$
|
|
\item Every elementary Matrix is invertible
|
|
\end{itemize}
|
|
\end{defn}
|
|
|
|
$A = \begin{pmatrix}
|
|
a_{11}&a_{12}&a_{13}\\
|
|
a_{21}&a_{22}&a_{23}\\
|
|
a_{31}&a_{32}&a_{33}\\
|
|
a_{41}&a_{42}&a_{43}
|
|
\end{pmatrix}$, $E = \begin{pmatrix}
|
|
1&0&0&0\\
|
|
0&1&0&0\\
|
|
0&0&c&0\\
|
|
0&0&0&1
|
|
\end{pmatrix}(cR_3)$, $EA = \begin{pmatrix}
|
|
a_{11}&a_{12}&a_{13}\\
|
|
a_{21}&a_{22}&a_{23}\\
|
|
ca_{31}&ca_{32}&ca_{33}\\
|
|
a_{41}&a_{42}&a_{43}
|
|
\end{pmatrix}$
|
|
|
|
\begin{theorem} Main Theorem for Invertible Matrices \\
|
|
Let $A$ be a square matrix. Then the following are equivalent
|
|
\begin{enumerate}
|
|
\item $A$ is an invertible matrix.
|
|
\item Linear System $Ax = b$ has a unique solution
|
|
\item Linear System $Ax = 0$ has only the trivial solution
|
|
\item RREF of $A$ is $I$
|
|
\item A is the product of elementary matrices
|
|
\end{enumerate}
|
|
\end{theorem}
|
|
|
|
\begin{theorem} Find Inverse
|
|
\begin{itemize}
|
|
\item Let $A$ be an invertible Matrix.
|
|
\item RREF of $(A | I)$ is $(I | A^{-1})$
|
|
\end{itemize}
|
|
|
|
How to identify if Square Matrix is invertible?
|
|
|
|
\begin{itemize}
|
|
\item Square matrix is invertible
|
|
\subitem $\iff$ RREF is $I$
|
|
\subitem $\iff$ All columns in its REF are pivot
|
|
\subitem $\iff$ All rows in REF are nonzero
|
|
\item Square matrix is singular
|
|
\subitem $\iff$ RREF is \textbf{NOT} $I$
|
|
\subitem $\iff$ Some columns in its REF are non-pivot
|
|
\subitem $\iff$ Some rows in REF are zero.
|
|
\item $A$ and $B$ are square matrices such that $AB = I$
|
|
\subitem then $A$ and $B$ are invertible
|
|
\end{itemize}
|
|
\end{theorem}
|
|
|
|
\begin{defn}[LU Decomposition with Type 3 Operations]\ \\
|
|
\begin{itemize}
|
|
\item Type 3 Operations: $(R_i + cR_j, i > j)$
|
|
\item Let $A$ be a $m \times n$ matrix. Consider Gaussian Elimination $A \dashrightarrow R$
|
|
\item Let $R \dashrightarrow A$ be the operations in reverse
|
|
\item Apply the same operations to $I_m \dashrightarrow L$. Then $A = LR$
|
|
\item $L$ is a \hyperref[def:ltm]{lower triangular matrix} with 1 along diagonal
|
|
\item If $A$ is square matrix, $R = U$
|
|
\end{itemize}
|
|
|
|
Application:
|
|
\begin{itemize}
|
|
\item $A$ has LU decomposition $A = LU$, $Ax = b$ i.e., $LUx = b$
|
|
\item Let $y = Ux$, then it is reduced to $Ly = b$
|
|
\item $Ly = b$ can be solved with forward substitution.
|
|
\item $Ux = y$ is the REF of A.
|
|
\item $Ux = y$ can be solved using backward substitution.
|
|
|
|
\end{itemize}
|
|
\end{defn}
|
|
|
|
\begin{defn}[LU Decomposition with Type II Operations]\ \\
|
|
\begin{itemize}
|
|
\item Type 2 Operations: $(R_i \leftrightarrow R_j)$, where 2 rows are swapped
|
|
\item $A \xrightarrow[]{E_1} \bullet \xrightarrow[]{E_2}\bullet \xrightarrow[E_3]{R_i \iff R_j}\bullet \xrightarrow[]{E_4}\bullet \xrightarrow[]{E_5} R$
|
|
\item $A = E^{-1}_1E^{-1}_2E^{}_3E^{-1}_4E^{-1}_5R$
|
|
\item $E_3A = (E_3E^{-1}_1E^{-1}_2E_3)E^{-1}_4E^{-1}_5R$
|
|
\item $P = E_3, L = (E_3E^{-1}_1E^{-1}_2E_3)E^{-1}_4E^{-1}_5, R = U$, $PA = LU$
|
|
|
|
\end{itemize}
|
|
\end{defn}
|
|
|
|
\begin{defn}[Column Operations]\ \\
|
|
\begin{itemize}
|
|
\item Pre-multiplication of Elementary matrix $\iff$ Elementary row operation
|
|
\subitem $A \to B \iff B = E_1E_2...E_kA$
|
|
\item Post-Multiplication of Elementary matrix $\iff$ Elementary Column Operation
|
|
\subitem $A \to B \iff B = AE_1E_2...E_k$
|
|
\item If $E$ is obtained from $I_n$ by single elementary column operation, then
|
|
\subitem $I \xrightarrow[]{kC_i}E \iff I \xrightarrow[]{kR_i}E$
|
|
\subitem $I \xrightarrow[]{C_i \leftrightarrow C_j}E \iff I \xrightarrow[]{R_i \leftrightarrow R_j}E$
|
|
\subitem $I \xrightarrow[]{C_i + kC_j}E \iff I \xrightarrow[]{R_j + kR_i}E$
|
|
\end{itemize}
|
|
\end{defn}
|
|
|
|
\subsection{Determinants}
|
|
|
|
\begin{defn}[Determinants of $2 \times 2$ Matrix]\ \\
|
|
\begin{itemize}
|
|
\item Let $A = \begin{pmatrix} a & b \\ c & d \end{pmatrix}$
|
|
\item $\det(A) = |A| = ad - bc$
|
|
\end{itemize}
|
|
Solving Linear equations with determinants for $2 \times 2$
|
|
\begin{itemize}
|
|
\item $x_1 =
|
|
\dfrac{\begin{vmatrix} b_1 & a_{12} \\ b_2 & a_{22} \end{vmatrix}}
|
|
{\begin{vmatrix}a_{11} & a_{12} \\ a_{21} & a_{22} \end{vmatrix}}$, $x_2 =
|
|
\dfrac{\begin{vmatrix} a_{11} & b_1 \\ a_{21} & b_2 \end{vmatrix}}
|
|
{\begin{vmatrix}a_{11} & a_{12} \\ a_{21} & a_{22} \end{vmatrix}}$
|
|
\end{itemize}
|
|
\end{defn}
|
|
|
|
\begin{defn}[Determinants]\ \\
|
|
\begin{itemize}
|
|
\item Suppose $A$ is invertible, then there exists EROs such that
|
|
\item $A \xrightarrow{ero_1} A_1 \rightarrow ... \rightarrow A_{k-1} \xrightarrow{ero_k}A_k = I$
|
|
\item Then $\det(A)$ can be evaluated backwards.
|
|
\subitem E.g. $A \xrightarrow{R_1 \leftrightarrow R_3} \bullet \xrightarrow{3R_2} \bullet \xrightarrow{R_2 + 2R_4} I \implies det(A) = 1 \to 1 \to \frac{1}{3} \to -\frac{1}{3}$
|
|
\item Let $M_{ij}$ be submatrix where the $i$th row and $j$th column are deleted
|
|
\item Let $A_{ij} = (-1)^{i+j}\det(M_{ij})$, which is the $(i, j)$-cofactor
|
|
\item $\det(A) = a_{11}A_{11} + a_{12}A_{12} + ... + a_{1n}A_{1n}$
|
|
|
|
\item $\det(I) = 1$
|
|
\item $A \xrightarrow{cR_i} B \implies \det(B) = c\det(A)$
|
|
\subitem $I \xrightarrow{cR_i} E \implies \det(E) = c$
|
|
\item $A \xrightarrow{R_1 \leftrightarrow R_2} B \implies \det(B) = -\det(A)$
|
|
\subitem $I \xrightarrow{R_1 \leftrightarrow R_2} E \implies \det(E) = -1$
|
|
\item $A \xrightarrow{R_i + cR_j} B \implies \det(B) = \det(A), i \neq j$
|
|
\subitem $I \xrightarrow{R_i + cR_j} E \implies \det(E) = 1$
|
|
\item $\det(EA) = \det(E)\det(A)$
|
|
\end{itemize}
|
|
|
|
Calculating determinants easier
|
|
\begin{itemize}
|
|
\item Let $A$ be square matrix. Apply Gaussian Elimination to get REF $R$
|
|
\item $A \xrightarrow{E_1} \bullet \xrightarrow{E_2} \bullet ... \bullet \xrightarrow{E_k} R$
|
|
\item $A \xleftarrow{E^{-1}_1} \bullet \xleftarrow{E^{-1}_2} \bullet ... \bullet \xleftarrow{E^{-1}_k} R$
|
|
\item Since $E_i$ and $E^{-1}_k$ is type $II$ or $III$, $\det(E_i) = -1 / 1$
|
|
\subitem $\det(A) = (-1)^t\det(R)$, where $t$ is no of type $II$ or $III$ operations
|
|
\item If $A$ is singluar, then $R$ has a zero row, and then $det(A) = 0$
|
|
\item If A is invertible, then all rows of $R$ are nonzero
|
|
\subitem $\det(R) = a_{11}a_{22}...{a_nn}$, the product of diagonal entries.
|
|
\end{itemize}
|
|
\end{defn}
|
|
|
|
\subsection{Recap}
|
|
\begin{itemize}
|
|
\item If A has a REF
|
|
\subitem If there is a zero row => Singular matrix
|
|
\subitem All rows are nonzero => invertible Matrix
|
|
\item If A is invertible, Using Gauss Jordan Elim $(A | I) \to (I | A^{-1})$
|
|
\item
|
|
\end{itemize}
|
|
|
|
\subsection{More about Determinants}
|
|
|
|
\begin{defn}[Determinant Properties]\ \\
|
|
$A$ is a Square Matrix
|
|
\begin{itemize}
|
|
\item $\det(A) = 0 \implies A$ is singular
|
|
\item $\det(A) \neq 0 \implies A$ is invertible
|
|
\item $\det(A) = \det(A^T)$
|
|
\item $\det(cA) = c^n\det(A)$, where $n$ is the order of the matrix
|
|
\item If $A$ is triangular, $\det(A)$ product of diagonal entries
|
|
\item $\det(AB) = \det(A)\det(B)$
|
|
\item $\det(A^{-1}) = [\det(A)]^{-1}$
|
|
\end{itemize}
|
|
|
|
Cofactor Expansion:
|
|
\begin{itemize}
|
|
\item To eavluate determinant using cofactor expansion, expand row/column with most no of zeros.
|
|
\end{itemize}
|
|
\end{defn}
|
|
|
|
\subsection{Finding Determinants TLDR}
|
|
\begin{defn}[Finding Determinants]\ \\
|
|
\begin{itemize}
|
|
\item If $A$ has zero row / column, $\det(A) = 0$
|
|
\item If $A$ is triangular, $det(A) = a_{11}a_{22}...a_{nn}$
|
|
\item If Order $n = 2 \to \det(A) = a_{11}a_{22} - a_{12}a_{21}$
|
|
|
|
\item If row/column has many 0, use cofactor expansion
|
|
\item Use Gaussian Elimination to get REF
|
|
\subitem $\det(A) = (-1)^t\det(R), t$ is no of type $II$ operations
|
|
\end{itemize}
|
|
\end{defn}
|
|
|
|
\begin{defn}[Finding Inverse with Adjoint Matrix]\ \\
|
|
\begin{itemize}
|
|
\item $\text{adj}(A) = (A_{ji})_{n\times n} = (A_{ij})^T_{n\times n}$
|
|
\item $A^{-1} = [\det(A)]^{-1}\text{adj}(A)$
|
|
\end{itemize}
|
|
\end{defn}
|
|
|
|
\begin{defn}[Cramer's Rule] Suppose $A$ is an invertible matrix of order $n$
|
|
\begin{itemize}
|
|
\item Liner system $Ax = b$ has unique solution
|
|
\item $x = \dfrac{1}{\det(A)}\begin{pmatrix}\det(A_1) \\ \vdots \\ det(A_n) \end{pmatrix}$,
|
|
\item $A_j$ is obtained by replacing the $j$th column in $A$ with $b$.
|
|
\end{itemize}
|
|
\end{defn}
|