-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathchap8.tex
227 lines (218 loc) · 9.99 KB
/
chap8.tex
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
%!TEX root = ./main.tex
\section{Operators on Complex Vector Spaces}
\setcounter{subsection}{2}
\subsection{Characteristic and Minimal Polynomial}
\begin{definition}
The number of times an eignevalue $\lambda$ appears in the matrix is called the algebragic multiplicity of $\lambda$.
\end{definition}
\begin{example}
Suppose $V$ is a complex vector space and let $T \in \c L(V)$. Supppose $T$ has the following matrix presentation
\[ \bml
2 & 1 & 0 \\
0 & 2 & 1 \\
0 & 0 & 2 \\
&&& 3 & 1 \\
&&& 0 & 3 \\
&&&&& 2 \bmr\]
We can see that $\lambda = 2$ has a multiplicity of $4$ and $\lambda = 3$ has a algebraic multiplicaity of $2$.
\end{example}
\begin{definition}
Suppose $V$ is a complex vector space and $T \in \c L(V)$. Suppose $T$ has eigenvalues $\li \lambda n$ with algebraic multiplicity of $\li dn$. Then the polynomial
\[ p_{\text{char}}(z) = \prod_{j} (z - \lambda_j)^{d_j} \] is the characteristic polynomial of $T$.
\end{definition}
\begin{theorem}[The Cayley-Hamilton Theorem]
Suppose V is a complex vector space and $T \in \c L(V)$. Then $p(T) = 0$, where $p$ is the characteristic polynomial.
\end{theorem}
\begin{proof}
Trivial by Jordan Normal Form in section 8.d.
\end{proof}
\begin{definition}
A minimal polynomial for $T \in \c L(V)$ os a monoic polynomial of the smallest degree that annihilates $T$. i.e. $q(T) = 0$ and $q$ is of smallest degree with this property of leading coefficient $1$.
\end{definition}
\begin{example}
Consider $T$ in example 8.2. Take the largest block of eahc eigenvalue and raise each term to the size of the block will yield the minimal polynomial
\[ p_{text{min}} (z) = (z - 2)^3(z - 3)^2\]
\end{example}
\begin{corollary}
Suppose $h(T) = 0$ for some polynomial $h \not\equiv 0$. Then $h(z) = p_{\text{min}} (z)q(x)$ for some $q$.
\end{corollary}
\begin{proof}
By the remainder theorem we have
\[ h(z) = p_{\text{min}}(z)q(z) + r(z)\]
where $\deg r < \deg p_{\text{min}}$. By the minimality of $p_{\text{min}}$, $r \equiv 0$.
\end{proof}
\subsection{Jordan Form}
\subsubsection*{Goal} To find te the sparest matrix representation for an arbitary linear operator on a finite dimensional vector space over $\b C$.
\subsubsection{Observation}
``Rough'' decomposition 1
\[ \left[\begin{array}{ccc|ccc}
\ast & \cdots & \ast & 0 & \cdots & 0 \\
\vdots & \ddots & \vdots & \vdots & \ddots & \vdots \\
\ast & \cdots & \ast & 0 & \cdots & 0 \\
\hline
0 & \cdots & 0 & \ast & \cdots & \ast \\
\vdots & \ddots & \vdots & \vdots & \ddots & \vdots\\
0 & \cdots & 0 & \ast & \cdots & \ast \\
\end{array}\right] = \c M(T)\]
Notice that $T$ has two invariant subspaces that are direct sums of each other.
\begin{definition}
An operator is called nilpotent if some power of it equals to $0$.
\end{definition}
\begin{proposition}
For any $T \in \c L(V)$, there exists two subsapces, $V_s$ and $V_r$ such that $V = V_s \oplus V_r$ and $V_s,V_r$ are both $T$-invariant, and $T\vert_{V_s}$ is nilpotent and $T\vert_{V_r}$ is invertible.
\end{proposition}
\begin{proof}
Consider
\[ \lb v \rb \subseteq \nul T \subseteq \nul T^2 \subseteq \cdots\]
Because $\dim V \leq \infty$, we must be able to find $q \in \b N$ such that $T^q$ and $T^{q+h}$ for any $h \in \b N$ have the same null space. In other words
\[ \exists q\in \b N : \nul T^q = \nul T^{q + h} \qquad \forall k \in \b N\]
Take $V_s : = \nul T^q$ and $V_r = \range T^q$. Obsrve that $V_s$ and $V_r$ ae $T$-invariant.
\noindent Next we want to check that $V_s \cap V_r = \lb \vec 0 \rb$. \\
Suppose $\vec v \in V_s \cap V_r$. Then $T^q \vec v = \vec 0$, and $T^q \vec w = \vec v$ for some $\vec w \in V$. So $T^{2q}\vec w = \vec 0$. Hnec eby the choice of $q$ we have $\vec w \in \nul T^q$, so \[T^q \vec w = \vec 0 = \vec v\]
So $\vec v = 0$, and $V_s \cap V_r = \lb \vec 0 \rb$. By Rank-Nullity, $V = V_s \oplus V_r$. \\
$T\vert_{V_s}$ is nilpotent sicne $\left(T\vert_{V_s}\right)^2$ is zero. \\
$T\vert_{V_r}$ is invertible since for any $\vec w \in V_r$ such that $T \vec w = \vec 0$ will also satify $T^q \vec w = \vec 0$, hence $\vec w = \vec 0$, and $T\vert_{V_r}$ being injective implies invertiablity.
\end{proof}
\subsubsection*{Zoom in to the nilpotent part}
Say, the whole space $V$ satifies the condition $T^q = 0$ and without the loss of generality we can take $q$ minimal with this property. This means there exists $\vec v_0 \in V$ such that $T^{q - 1} \vec v_0 \neq \vec 0$. Take
\[ \vec v_0 = \spa \lb \vec v_0 , T\vec v_0, \ldots, T^{q-1} \vec v_0 \rb\]
Since there exists a vector such that $T^{q - 1} \vec v_0 \neq \vec 0$ we can also there exists $\vec w_0 \in V$ such that $\la T^{q-1}\vec v_0, \vec w_0\ra \neq 0$. Take the following matrix
\[ \left( \la T^{j-1}\vec v_0 , T^{*^{q-i}} \vec w_0 \ra\right)_{i,j = 1}^q = \left( \la T^{q + j - i - 1} \vec v_0, \vec w_0 \ra\right)_{i,j = 1}^q\]
Notice that this is a lower triangular matrix with nonzero diagonal matrix.
\begin{corollary}
The list $\vec v_0 , T\vec v_0, \ldots, T^{q-1} \vec v_0$ is linearly indepedent and so is the list $\vec w_0 , T^*\vec w_0, \ldots, T^{*^{q-1}} \vec w_0 $
\end{corollary}
\begin{proof}
Take $V_1 := \left( \spa \left( \vec w_0 , T^*\vec w_0, \ldots, T^{*^{q-1}} \vec w_0 \right) \right)^{\perp}$. Notice that if $W$ is $T*$-invariant, $W^\perp$ is $T$-invariant. Indeed, for any $\vec v \in W^\perp$ and any $\vec w \in W$, we have \[\la T \vec v, \vec w \ra =\la \vec v, T^* \vec w \ra = 0\]
Hence we have $V = V_0 \oplus V_1$, where $V_0, V_1$ are both $T$-invariant. To see that the sum is direct
Suppose \[\alpha_0 \vec v_0 + \alpha_1 T\vec v_0 + \cdots + \alpha^{q-1}T^{q-1}\vec v\]
is orthogonal to $\vec w_0 , T^*\vec w_0, \ldots, T^{*^{q-1}} \vec w_0 $. Thn the matrix
\[ \left( \la T^{j-1} \vec v_0, T^{*^{q-1}} \vec w_0 \ra\right)\]
being invertible gurantees that
\[ \alpha_0 = \alpha_1 = \cdots = \alpha_{q-1} = 0\]
\end{proof}
\subsubsection*{fine decomposition}
We look at $\c M\left( T\vert_{V_1}\right)$ with respect to the basis $\vec v_0 , T\vec v_0, \ldots, T\vec v_0$. Hence we have
\[ \bml
0 & 0 & 0 & \cdots & 0 & 0\\
1 & 0 & 0 & \cdots & 0 & 0\\
0 & 1 & 0 & \cdots & 0 & 0\\
\vdots & \vdots & \vdots & \ddots & \vdots & \vdots\\
0 & 0 & 0 & \cdots & 1 & 0\bmr\]
\subsubsection*{Warp-up}
Repeat the process many gives
\[V = V_1 \oplus V_2 \oplus \cdots \oplus V_n \]
This guarantees a bock-diagonal form where each block looks like
\[ \bml
\lambda_j & 1 \\
& \lambda_j & 1 \\
&& \lambda_j & 1 \\
&&& \lambda_j & 1 \\
&&&& \ddots & \ddots \\
&&&&& \lambda_j & 1 \\
&&&&&& \lambda_j
\bmr\]
\begin{example}
Consider
\[ \c M(T) = \bml
3 & 1 & 0 & 0 & 0 & 0 & 0 & 0 & 0 & 0 & 0 & 0\\
0 & 3 & 1 & 0 & 0 & 0 & 0 & 0 & 0 & 0 & 0 & 0\\
0 & 0 & 3 & 0 & 0 & 0 & 0 & 0 & 0 & 0 & 0 & 0\\
0 & 0 & 0 & 3 & 1 & 0 & 0 & 0 & 0 & 0 & 0 & 0\\
0 & 0 & 0 & 0 & 3 & 0 & 0 & 0 & 0 & 0 & 0 & 0\\
0 & 0 & 0 & 0 & 0 & 3 & 0 & 0 & 0 & 0 & 0 & 0 \\
0 & 0 & 0 & 0 & 0 & 0 & -2 & 1 & 0 & 0 & 0 & 0 \\
0 & 0 & 0 & 0 & 0 & 0 & 0 & -2 & 0 & 0 & 0 & 0 & \\
0 & 0 & 0 & 0 & 0 & 0 & 0 & 0 & 1 & 1 & 0 & 0\\
0 & 0 & 0 & 0 & 0 & 0 & 0 & 0 & 0 & 1 & 0 & 0\\
0 & 0 & 0 & 0 & 0 & 0 & 0 & 0 & 0 & 0 & 0 & 1 \\
0 & 0 & 0 & 0 & 0 & 0 & 0 & 0 & 0 & 0 & 0 & 0 \bmr\]
Where the empty entries are zero. We can see that the $T$ has eigenvalue $3,-2,1,0$. \\
We can see that \begin{align*} \dim \nul (T - 3 \b I)^j &= 3,5,6,6,6, \ldots \\
\dim \nul (T + 2\b I)^j &= 1,2, \ldots \\
\dim \nul (T - 1 \b I)^j &= 1,2,2,\ldots \\
\dim \nul (T - 0 \b I)^j &= 1,2,2,2, \ldots \\
j = 1,2,3, \ldots & \qquad \forall\ j \in \b N
\end{align*}
\end{example}
\begin{example}
Suppose $V:= \c P_4(\b C)$. Let $D$ be the differetiation operator. Construct the Jordan Normal Form of $D$ and the Jordan Basis of $V$. \\
We can compute for $\c M(T)$ with the standard basis
\[ \bml
0 & 1 & 0 & 0 & 0 \\
0 & 0 & 2 & 0 & 0 \\
0 & 0 & 0 & 3 & 0 \\
0 & 0 & 0 & 0 & 4 \\
0 & 0 & 0 & 0 & 0
\bmr\]
with some algebraic manipulation we then can see that $D$ has jodran normal form of
\[ \bml
0 & 1 & 0 & 0 & 0 \\
0 & 0 & 1 & 0 & 0 \\
0 & 0 & 0 & 1 & 0 \\
0 & 0 & 0 & 0 & 1 \\
0 & 0 & 0 & 0 & 0
\bmr\]
with basis $\displaystyle 1, x, \frac 12 x^2, \frac 13 x^3, \frac 14 x^4$.
\end{example}
\begin{example}
Suppose $V = \spa \left( e^{ikt} : |k| \leq 3 \right)$. Let $D$ be the differetiation operator. Construct the Jordan Normal Form of $D$ and the Jordan Basis of $V$. \\
We can see that $V = \spa (e^{-i3t},e^{-i2t},e^{-it},e^{0},e^{it},e^{i2t},e^{i3t})$. We can compute for the matrxi representation with repsect to the standard basis
\[ \c M(T) = \bml
-3 \\
& -2 \\
&& -1 \\
&&& 0 \\
&&&& 1 \\
&&&&& 2 \\
&&&&&& 3
\bmr\]
Notice that if we use basis $\displaystyle V = \spa \left(\frac{1}{-3i} e^{-i3t}, \frac{1}{-2i}e^{-i2t},\frac{1}{-i}e^{-it}, e^{0}, \frac{1}{i}e^{it}, \frac{1}{2i}e^{i2t}, \frac{1}{3i}e^{i3t}\right)$ we can obtain the Jordan Normal Form
\[ \bml 1 \\
& 1 \\
&& 1 \\
&&& 0 \\
&&&& 1 \\
&&&&& 1 \\
&&&&&& 1 \bmr\]
\end{example}
\newpage
\subsubsection*{Reverse Engineering of the Jordan Normal Form}
\[ \begin{array}{|c|c|c|c|c|c|}
\hline
& \dim (T - \lambda \b I) & \dim (T - \lambda \b I)^2 & \dim (T - \lambda \b I)^3 & \dim (T - \lambda \b I)^4 & \dim (T - \lambda \b I)^j, j \geq 5 \\
\hline
\hline
\lambda = i & 3 & 6 & 7 & 7 & 7 \\
\lambda = -i & 2 & 4 & 6 & 8 & 8 \\
\lambda = 1 & 1 & 2 & 3 & \not5 \ \ 4 & 5 \\
\hline
\end{array}\]
What is the Jordan Normal form og $T$ based on this info? Assume all eigenvalues of $T$ are given above.
\begin{proof}[Solution] Notice that the difference in the sequence denotes the number of $1$'s that get send to $0$, hence we have the following matrix
\[ \bml
i & 1\\
& i & 1\\
&& i \\
&&& i & 1\\
&&&& i \\
&&&&& i & 1\\
&&&&&& i \\
&&&&&&& -i & 1 \\
&&&&&&&& -i & 1 \\
&&&&&&&&& -i & 1 \\
&&&&&&&&&& -i \\
&&&&&&&&&&& -i & 1 \\
&&&&&&&&&&&& -i & 1 \\
&&&&&&&&&&&&& -i & 1\\
&&&&&&&&&&&&&& -i \\
&&&&&&&&&&&&&&& 1 & 1 \\
&&&&&&&&&&&&&&&& 1 & 1 \\
&&&&&&&&&&&&&&&&& 1 & 1 \\
&&&&&&&&&&&&&&&&&& 1 & 1 \\
&&&&&&&&&&&&&&&&&&& 1 \bmr\]
\end{proof}
\vfill
\begin{center}
Last updated: \today
\end{center}