This commit is contained in:
bluepython508
2024-01-22 10:56:12 +00:00
parent d320a956f8
commit dc42625cf1
20 changed files with 293 additions and 10 deletions

View File

@@ -45,7 +45,7 @@ Solutions invariant.
\end{align*}
\section*{As Matrices}
\begin{align*}
\systeme{
& \systeme{
x + 2y = 1,
2x - y = 3
}
@@ -53,8 +53,8 @@ Solutions invariant.
\begin{pmatrix}[cc|c]
1 & 2 & 1 \\
2 & -1 & 3
\end{pmatrix}
& \systeme{
\end{pmatrix} \\
& \systeme{
x - y + z = -2,
2x + 3y + z = 7,
x - 2y - z = -2
@@ -347,12 +347,107 @@ Inversion of a linear transformation is equivalent to inversion of its represent
\[ \forall A: \text{$n$x$n$ matrix}.\quad P_A\paren{\lambda} = \det{\paren{A - \lambda I_n}} \tag{characteristic polynomial in $\lambda$}\]
Eigenvalues of $A$ are the solutions of $P_A\paren{\lambda} = 0$
\begin{align*}
& A\vec{x} = \lambda\vec{x} & x \neq 0\\
\iff & A\vec{x} - \lambda\vec{x} = 0 \\
\iff & (A - \lambda I_n)\vec{x} = 0 \\
\iff & \det{\paren{A - \lambda I_n}} = 0 \\
& \quad \text{ or $\paren{A - \lambda I_n}$ is invertible and $x = 0$ }
& A\vec{x} = \lambda\vec{x} & x \neq 0 \\
\iff & A\vec{x} - \lambda\vec{x} = 0 \\
\iff & (A - \lambda I_n)\vec{x} = 0 \\
\iff & \det{\paren{A - \lambda I_n}} = 0 \\
& \quad \text{ or $\paren{A - \lambda I_n}$ is invertible and $x = 0$ }
\end{align*}
\[ P_{R\theta}(\lambda) = \frac{2\cos{\theta} \pm \sqrt{-4\lambda^2\sin^2{\theta}}}{2}\]
\[ P_{R\theta}(\lambda) \text{ has roots } \frac{2\cos{\theta} \pm \sqrt{-4\lambda^2\sin^2{\theta}}}{2}\]
\[ R_\theta \text{ has eigenvalues }\iff \sin{\theta} = 0 \]
\end{document}
\subsubsection*{Example}
\begin{align*}
A & = \begin{pmatrix} 4 & 0 & 1 \\ -2 & 1 & 0 \\ -2 & 0 & 1 \end{pmatrix} \\
P_A(\lambda) & = \det{A - \lambda I_3} = \det{\begin{pmatrix} 4 - \lambda & 0 & 1 \\ -2 & 1 - \lambda & 0 \\ -2 & 0 & 1 - \lambda \end{pmatrix}} = (1 - \lambda)\det{\begin{pmatrix}4 - \lambda & 1 \\ -2 & 1 - \lambda \end{pmatrix}} \\
& = (1 - \lambda)\paren{(4 - \lambda)(1 - \lambda) + 2} = (1 - \lambda)(\lambda^2 - 5\lambda + 6) \\
& = (1 - \lambda)(2 - \lambda)(3 - \lambda) \\
\lambda & = 1, 2, 3 \\
A\vec{x} & = \lambda\vec{x}~\forall. \text{ eigenvectors } \vec{x} \\
(A - \lambda I_n)\vec{x} & = 0 \\
& \begin{pmatrix} 3 & 0 & 1 \\ -2 & 0 & 0 \\ -2 & 0 & 0 \end{pmatrix}\vec{x} = \begin{pmatrix} 0 \\ 0 \\ 0 \end{pmatrix} \\
& \text{ eigenvectors with eigenvalue 1 are } s\vec{e}_2~~\forall~s \in \R, \neq 0 \\
(A - 2I_n)\vec{x} & = 0 \\
& \begin{pmatrix} 2 & 0 & 1 \\ -2 & -1 & 0 \\ -2 & 0 & -1 \end{pmatrix}\vec{x} = \begin{pmatrix} 0 \\ 0 \\ 0\end{pmatrix} \\
& \systeme{
2x_1 + x_3 = 0,
-2x_1 - x_2 = 0,
-2x_1 -x_3 = 0
}: s\begin{pmatrix} 1 \\ -2 \\ -2 \end{pmatrix} \\ \\
B & = \begin{pmatrix} 5 & 3 & 3 \\ -3 & -1 & -3 \\ -1 & -3 & -1 \end{pmatrix}
\end{align*}
Repeated roots of the characteristic polynomial lead to multiple variables.
\[ \forall \text{ matrices } A.~A \text{ is invertible } \iff 0 \text{ is not an eigenvalue }\]
\begin{align*}
\text{If $0$ is an eigenvalue, } P_A(0) = 0 \therefore \det{\paren{A - 0I_n}} = 0
\end{align*}
\[ P_A(\lambda) = \det{\paren{(-I_n)(\lambda I_n - A)}} = \det{-I_n}\det{\lambda I_n - A}\]
\[ = (-1)^n \lambda^n + c_{n-1}\lambda^{n-1} ... \]
\begin{description}
\item[Trace] The sum of the diagonal of a matrix
\[ c_{n - 1} = (-1)^{n+1}\operatorname{tr}A \]
\item[Cayley-Hamilton Theorem:] \( P_A(A) = 0_n \)
\end{description}
\subsubsection*{Example}
\begin{align*}
A & = \begin{pmatrix} 1 & 4 \\ 3 & 2 \end{pmatrix} \\
P_A(\lambda) & = (1 - \lambda)(2 - \lambda) - 12 \\
& = 2 - 3\lambda + \lambda^2 - 12 \\
& = \lambda^2 - 3\lambda - 10 \\
P_A & (5 \text{ or } -2) = 0 \\
P_A(A) & = 0 \\
A^2 & = 3A - 10I_2 \\
A^3 & = (3A - 10I_2)A \\
A^{n + 2} & = 3A^{n+1} + 10A^n
\end{align*}
\subsection*{Diagonalization}
\begin{description}
\item[Similarity of matrices] $A$ and $B$ are \emph{similar} iff there exists an invertible matrix $P$ such that $B = P^{-1}AP$
\[ T_\alpha \text{ is similar to } T_0 (P = R_{-\alpha}) \]
\end{description}
For similar $A$, $B$:
\begin{itemize}
\item $\det{A}$ = $\det{B}$
\item $P_A(\lambda) = P_B(\lambda)$
\[\det{\paren{A - \lambda I_n}} = \det{\paren{PBP^{-1} - \lambda PP^{-1}}} = \det{\paren{P(B - \lambda I_n)P^{-1}}}\]
\[ (A - \lambda I_n) \text{ and } (B - \lambda I_n) \text{ are similar }\]
\item eigenvalues are the same
\item trace is the same
\end{itemize}
\begin{description}
\item[Diagonalizable Matrix] a square matrix that is similar to a diagonal matrix
\[ P^{-1}AP = D \]
$P$ diagonalizes $A$ \quad
(P is not necessarily unique)
\end{description}
An $n$x$n$ matrix $A$ is dagonalizable iff there exists a matrix $P = (\vec{x}_1, \vec{x}_2, ...)$, where $\vec{x}_i$ is an eigenvector of $A$
\[ P^{-1}AP = \begin{pmatrix} \lambda_1 & 0 & \cdots & \cdots \\ 0 & \lambda_2 & 0 & \cdots \\ \vdots & \vdots & \ddots & \cdots \end{pmatrix} \]
i.e. it's diagonal, with the $ii^\text{th}$ element equal to the eigenvalue corresponding to $\vec{x}_i$
\subsubsection*{Example}
\begin{align*}
A & = \begin{pmatrix} 4 & 0 & 1 \\ -2 & 1 & 0 \\ -2 & 0 & 1 \end{pmatrix} \\
\lambda_1 & = \begin{pmatrix} 0 \\ s \\ 0 \end{pmatrix} \\ \lambda_2 & = \begin{pmatrix} t \\ -2t \\ -2t \end{pmatrix} \\ \lambda_3 & = \begin{pmatrix} u \\ -u \\ -u \end{pmatrix} \\
P & = \begin{pmatrix}
0 & 1 & 1 \\
1 & -2 & -1 \\
0 & -2 & -1
\end{pmatrix} \\
P^{-1} & = \begin{pmatrix}
0 & 1 & -1 \\ -1 & 0 & -1 \\ 2 & 0 & 1
\end{pmatrix} \\
P^{-1}AP & = \begin{pmatrix}
1 & 0 & 0 \\ 0 & 2 & 0 \\ 0 & 0 & 3
\end{pmatrix}
\end{align*}
\subsection*{Linear Independence}
For any set of vectors $V = { v_i }~\forall_{i<n}$ in $R^n$, $v_i$ are linearly independent if
\[ \sum k_i v_i = 0 \implies \forall i.~ k_i = 0 \quad (k_i \in \R) \]
This implies that no vector in $V$ can be written as a sum of scalar multiples of the others. \\
A square matrix is invertible iff its columns are linearly independent in $R^n$
\\ An $n$x$n$ matrix $A$ is diagonalizable iff it admits $n$ linearly independent eigenvectors \\
For $\vec{x}_i$ eigenvectors of $A$ corresponding to distinct eigenvalues $\lambda_i$, $\{ \vec{x}_i \}$ is linearly independent. \\
Note: the inverse is \emph{not} true: \( \exists \text{ matrices } $A$\) with repeated eigenvalues and linearly independent eigenvectors.
\[ \forall k \in \N, A = PDP^{-1}.~A^k = (PDP^{-1})^k = PD^kP^{-1} \]
Exponentiation of a diagonal matrix is elementwise
\end{document}