forked from jubnoske08/linear_algebra
-
Notifications
You must be signed in to change notification settings - Fork 0
/
chapter_3.tex
1981 lines (1882 loc) · 103 KB
/
chapter_3.tex
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
\documentclass{extarticle}
\sloppy
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% PACKAGES %
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\usepackage[10pt]{extsizes}
\usepackage{amsfonts}
\usepackage{amsthm}
\usepackage{amssymb}
\usepackage[shortlabels]{enumitem}
\usepackage{microtype}
\usepackage{amsmath}
\usepackage{mathtools}
\usepackage{commath}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% PROBLEM ENVIRONMENT %
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\usepackage{tcolorbox}
\tcbuselibrary{theorems, breakable, skins}
\newtcbtheorem{prob}% environment name
{Problem}% Title text
{enhanced, % tcolorbox styles
attach boxed title to top left={xshift = 4mm, yshift=-2mm},
colback=blue!5, colframe=black, colbacktitle=blue!3, coltitle=black,
boxed title style={size=small,colframe=gray},
fonttitle=\bfseries,
separator sign none
}%
{}
\newenvironment{problem}[1]{\begin{prob*}{#1}{}}{\end{prob*}}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% THEOREMS/LEMMAS/ETC. %
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\newtheorem{thm}{Theorem}
\newtheorem*{thm-non}{Theorem}
\newtheorem{lemma}[thm]{Lemma}
\newtheorem{corollary}[thm]{Corollary}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% MY COMMANDS %
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\newcommand{\Z}{\mathbb{Z}}
\newcommand{\R}{\mathbb{R}}
\newcommand{\C}{\mathbb{C}}
\newcommand{\F}{\mathbb{F}}
\newcommand{\bigO}{\mathcal{O}}
\newcommand{\Real}{\mathcal{Re}}
\newcommand{\poly}{\mathcal{P}}
\newcommand{\mat}{\mathcal{M}}
\DeclareMathOperator{\Span}{span}
\newcommand{\Hom}{\mathcal{L}}
\DeclareMathOperator{\Null}{null}
\DeclareMathOperator{\Range}{range}
\newcommand{\defeq}{\vcentcolon=}
\newcommand{\restr}[1]{|_{#1}}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% SECTION NUMBERING %
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\renewcommand\thesection{\Alph{section}:}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% DOCUMENT START %
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\title{\vspace{-2em}Chapter 3: Linear Maps}
\author{\emph{Linear Algebra Done Right}, by Sheldon Axler}
\date{}
\begin{document}
\maketitle
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% SECTION A
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\section{The Vector Space of Linear Maps}
% Problem 1
\begin{problem}{1}
Suppose $b,c\in\R$. Define $T:\R^3\to\R^2$ by
\begin{equation*}
T(x,y,z) = (2x - 4y + 3z + b, 6x + cxyz).
\end{equation*}
Show that $T$ is linear if and only if $b = c = 0$.
\end{problem}
\begin{proof}
$(\Leftarrow)$ Suppose $b = c = 0$. Then
\begin{equation*}
T(x,y,z) = (2x - 4y + 3z, 6x).
\end{equation*}
Let $(x_1,y_1,z_1), (x_2,y_2,z_2)\in\R^3$. Then
\begin{align*}
T((x_1,y_1,z_1) + (x_2,y_2,z_2)) &= T(x_1 + x_2, y_1 + y_2, z_1 + z_2)\\
&= (2(x_1 + x_2) - 4(y_1 + y_2) + 3(z_1 + z_2), 6(x_1 + x_2))\\
&= (2x_1 + 2x_2 - 4y_1 - 4y_2 + 3z_1 + 3z_2, 6x_1 + 6x_2)\\
&= (2x_1 - 4y_1 + 3z_1, 6x_1) + (2x_2 - 4y_2 + 3z_2, 6x_2)\\
&= T(x_1, y_1, z_1) + T(x_2, y_2,z_2).
\end{align*}
Now, for $\lambda\in\F$ and $(x,y,z)\in\R^3$, we have
\begin{align*}
T(\lambda(x,y,z)) &= T(\lambda x, \lambda y, \lambda z)\\
&= (2(\lambda x) - 4(\lambda y) + 3(\lambda z), 6(\lambda x))\\
&= (\lambda(2x - 4y + 3z), \lambda(6x))\\
&= \lambda(2x - 4y + 3z, 6x)\\
&= \lambda T(x, y, z),
\end{align*}
and thus $T$ is a linear map.
\par ($\Rightarrow$) Supose $T$ is a linear map. Then
\begin{equation}\tag{$\dagger$}
T(x_1 + x_2,y_1 + y_2, z_1 + z_2) = T(x_1, y_1, z_1) + T(x_2, y_2, z_2)
\end{equation}
for all $(x_1,y_1,z_1),(x_2, y_2, z_2)\in\R^3$. In particular, by applying the definition of $T$ and comparing first coordinates of both sides of $(\dagger)$, we have
\begin{align*}
2(x_1 + x_2) - 4(y_1+y_2) + 3(z_1&+ z_2) + b =\\ &(2x_1 - 4y_1 + 3z_1 + b) + (2x_2 - 4y_2 +3z_2 + b),
\end{align*}
and after simplifying, we have $b = 2b$, and hence $b= 0$. Now by applying the definition of $T$ and comparing second coordinates of both sides of $(\dagger)$, we have
\begin{align*}
6(x_1 + x_2) + c(x_1+x_2)(y_1+y_2)(z_1+z_2) &= 6x_1 + c(x_1y_1z_1) + 6x_2 + c(x_2y_2z_2)\\
&= 6(x_1 + x_2) + c(x_1y_1z_1 + x_2y_2z_2),
\end{align*}
which implies
\begin{equation*}
c(x_1+x_2)(y_1+y_2)(z_1+z_2) = c(x_1y_1z_1 + x_2y_2z_2).
\end{equation*}
Now suppose $c\neq 0$. Then choosing $(x_1,y_1,z_1) = (x_2,y_2,z_2) = (1,1,1)$, the equation above implies $8 = 2$, a contradiction. Thus $c=0$, completing the proof.
\end{proof}
% Problem 3
\begin{problem}{3}
Suppose $T\in \Hom(\F^n,\F^m)$. Show that there exist scalars $A_{j,k}\in\F$ for $j=1,\dots,m$ and $k=1,\dots,n$ such that
\begin{equation*}
T(x_1,\dots,x_n) = (A_{1,1}x_1 + \dots + A_{1,n}x_n,\dots, A_{m,1}x_1 + \dots + A_{m,n}x_n)
\end{equation*}
for every $(x_1,\dots,x_n)\in\F^n$.
\end{problem}
\begin{proof}
Given $x\in\F^n$, we may write
\begin{equation*}
x = x_1 e_1 + \dots + x_n e_n,
\end{equation*}
where $e_1,\dots,e_n$ is the standard basis of $\F^n$. Since $T$ is linear, we have
\begin{equation*}
Tx = T(x_1 e_1 + \dots +x_n e_n) = x_1 Te_1 + \dots + x_n Te_n.
\end{equation*}
Now for each $Te_k\in\F^m$, where $k=1,\dots, n$, there exist $A_{1,k},\dots, A_{m,k}\in\F$ such that
\begin{align*}
Te_k &= A_{1,k}e_1 + \dots + A_{m,k}e_m\\
&= \left(A_{1,k}, \dots, A_{m,k}\right)
\end{align*}
and thus
\begin{equation*}
x_kTe_k = \left(A_{1,k}x_k, \dots, A_{m,k}x_k\right).
\end{equation*}
Therefore, we have
\begin{align*}
Tx &= \sum_{k = 1}^n \left(A_{1,k}x_k, \dots, A_{m,k}x_k\right)\\
&= \left(\sum_{k = 1}^nA_{1,k}x_k, \dots, \sum_{k = 1}^nA_{m,k}x_k \right),
\end{align*}
and thus there exist scalars $A_{j,k}\in\F$ for $j=1,\dots,m$ and $k=1,\dots,n$ of the desired form.
\end{proof}
% Problem 5
\begin{problem}{5}
Prove that $\Hom(V,W)$ is a vector space.
\end{problem}
\begin{proof}
We check each property in turn.\\
\textbf{Commutative:} Given $S,T\in\Hom(V,W)$ and $v\in V$, we have
\begin{equation*}
(T+S)(v) = Tv + Sv = Sv + Tv = (S + T)(v)
\end{equation*}
and so addition is commutative.\\
\textbf{Associative:} Given $R,S,T\in\Hom(V,W)$ and $v\in V$, we have
\begin{align*}
((R + S) + T)(v) = (R+S)(v) + Tv &= Rv + Sv + Tv\\
&= R + (S + T)(v) = (R + (S + T))(v)
\end{align*}
and so addition is associative. And given $a,b\in\F$, we have
\begin{equation*}
((ab)T)(v) = (ab)(Tv) = a(b(Tv)) = (a(bT))(v)
\end{equation*}
and so scalar multiplication is associative as well.\\
\textbf{Additive identity:} Let $0\in\Hom(V,W)$ denote the zero map, let $T\in\Hom(V,W)$, and let $v\in V$. Then
\begin{equation*}
(T + 0)(v) = Tv + 0v = Tv + 0 = Tv
\end{equation*}
and so the zero map is the additive identity.\\
\textbf{Additive inverse:} Let $T\in\Hom(V,W)$ and define $(-T)\in\Hom(V,W)$ by $(-T)v = -Tv$. Then
\begin{equation*}
(T + (-T))(v) = Tv + (-T)v = Tv - Tv = 0,
\end{equation*}
and so $(-T)$ is the additive inverse for each $T\in\Hom(V,W)$.\\
\textbf{Multiplicative identity:} Let $T\in\Hom(V,W)$. Then
\begin{equation*}
(1T)(v) = 1(Tv) = Tv
\end{equation*}
and so the multiplicative identity of $\F$ is the multiplicative identity of scalar multiplication.\\
\textbf{Distributive properties:} Let $S,T\in\Hom(V,W)$, $a,b\in\F$, and $v\in V$. Then
\begin{align*}
(a(S + T))(v) = a((S + T)(v)) = a(Sv + Tv) &= a(Sv) + a(Tv)\\
& = (aS)(v) + (aT)(v)
\end{align*}
and
\begin{align*}
((a + b)T)(v) = (a+b)(Tv) = a(Tv) + b(Tv) = (aT)(v) + (bT)(v)
\end{align*}
and so the distributive properties hold.
\par Since all properties of a vector space hold, we see $\Hom(V,W)$ is in fact a vector space, as desired.
\end{proof}
% Problem 7
\begin{problem}{7}
Show that every linear map from a $1$-dimensional vector space to itself is multiplication by some scalar. More precisely, prove that if $\dim V= 1$ and $T\in\Hom(V,V)$, then there exists $\lambda\in\F$ such that $Tv = \lambda v$ for all $v\in V$.
\end{problem}
\begin{proof}
Since $\dim V = 1$, a basis of $V$ consists of a single vector. So let $w\in V$ be such a basis. Then there exists $\alpha\in\F$ such that $v = \alpha w$ and $\lambda\in \F$ such that $Tw = \lambda w$. It follows
\begin{equation*}
Tv = T(\alpha w) = \alpha Tw = \alpha\lambda w = \lambda(\alpha w) = \lambda v,
\end{equation*}
as desired.
\end{proof}
% Problem 9
\begin{problem}{9}
Give an example of a function $\varphi:\C\to\C$ such that
\begin{equation*}
\varphi(w + z) = \varphi(w) + \varphi(z)
\end{equation*}
for all $w,z\in\C$ but $\varphi$ is not linear. (Here $\C$ is thought of as a complex vector space.)
\end{problem}
\begin{proof}
Define
\begin{align*}
\varphi:\C&\to\C\\
x + yi &\mapsto x - yi.
\end{align*}
Then for $x_1 + y_1i, x_2 + y_2i\in\C$, it follows
\begin{align*}
\varphi((x_1 + y_1i) + (x_2 + y_2i)) &= \varphi((x_1 + x_2) + (y_1 + y_2)i)\\
&= (x_1 + x_2) - (y_1 + y_2)i\\
&= (x_1 - y_1)i + (x_2 - y_2)i\\
&= \varphi(x_1 + y_1i) + \varphi(x_2 + y_2i)
\end{align*}
and so $\varphi$ satisfies the additivity requirement. However, we have
\begin{align*}
\varphi(i\cdot i) = \varphi(-1) = -1
\end{align*}
and
\begin{equation*}
i\cdot\varphi(i) = i(-i) = 1
\end{equation*}
and hence $\varphi$ fails the homogeneity requirement of a linear map.
\end{proof}
% Problem 11
\begin{problem}{11}
Suppose $V$ is finite-dimensional. Prove that every linear map on a subspace of $V$ can be extended to a linear map on $V$. In other words, show that if $U$ is a subspace of $V$ and $S\in\Hom(U,W)$, then there exists $T\in\Hom(V,W)$ such that $Tu = Su$ for all $u\in U$.
\end{problem}
\begin{proof}
Suppose $U$ is a subspace of $V$ and $S\in\Hom(U,W)$. Let $v_1,\dots, u_m$ be a basis of $U$ and let $v_1,\dots, v_m, v_{m+1},\dots, v_n$ be an extension of this basis to $V$. For any $z\in V$, there exist $a_1,\dots, a_n\in\F$ such that $z =\sum_{k=1}^na_kv_k$, and so we define
\begin{align*}
T: V &\to W\\
\sum_{k = 1}^n a_kv_k &\mapsto \sum_{k = 1}^m a_kSv_k + \sum_{k = m+1}^n a_kv_k.
\end{align*}
Since every $v\in V$ has a unique representation as a linear combination of elements of our basis, the map is well-defined. We first show $T$ is a linear map. So suppose $z_1,z_2\in V$. Then there exist $a_1,\dots a_n\in \F$ and $b_1,\dots, b_n\in\F$ such that
\begin{equation*}
z_1 = a_1v_1 + \dots + a_nv_n ~~~\text{and}~~~ z_2 = b_1v_1 + \dots + b_nv_n.
\end{equation*}
It follows
\begin{align*}
T(z_1 + z_2) &= T\left(\sum_{k=1}^na_kv_k + \sum_{k=1}^nb_kv_k\right)\\
&= T\left(\sum_{k=1}^n(a_k + b_k)v_k\right)\\
&= \sum_{k=1}^m(a_k + b_k)Sv_k + \sum_{k=m+1}^n(a_k + b_k)v_k\\
&= \left(\sum_{k=1}^ma_kSv_k + \sum_{k=m+1}^na_kv_k\right) + \left(\sum_{k=1}^mb_kSv_k + \sum_{k=m+1}^nb_kv_k\right)\\
&= T\left(\sum_{k=1}^na_kv_k\right) + T\left(\sum_{k=1}^nb_kv_k \right)\\
&= Tz_1 + Tz_2
\end{align*}
and so $T$ is additive. To see that $T$ is homogeneous, let $\lambda \in\F$ and $z\in V$, so that we may write $z =\sum_{k=1}^na_kv_k$ for some $a_1,\dots,a_n\in\F$. We have
\begin{align*}
T(\lambda z) &= T\left(\lambda \sum_{k=1}^na_kv_k\right)\\
&= T\left(\sum_{k=1}^n(\lambda a_k)v_k\right)\\
&= S\left(\sum_{k=1}^m(\lambda a_k)v_k\right) + \sum_{k=m+1}^n(\lambda a_k)v_k\\
&= \lambda S\left(\sum_{k=1}^ma_kv_k\right) + \lambda\sum_{k=m+1}^n a_kv_k\\
&= \lambda\left(S\left(\sum_{k=1}^ma_kv_k\right) + \sum_{k=m+1}^n\lambda a_kv_k\right)\\
&= \lambda T\left(\sum_{k=1}^na_kv_k\right)\\
&= \lambda Tz
\end{align*}
and so $T$ is homogeneous as well hence $T\in\Hom(V,W)$. Lastly, to see that $T\mid_U = S$, let $u\in U$. Then there exist $a_1,\dots, a_m\in\F$ such that $u=\sum_{k=1}^ma_kv_k$, and hence
\begin{align*}
Tu &= T\left( \sum_{k=1}^ma_kv_k\right)\\
&= S\left(\sum_{k=1}^ma_kv_k\right)\\
&= Su,
\end{align*}
and so indeed $T$ agrees with $S$ on $U$, completing the proof.
\end{proof}
% Problem 13
\begin{problem}{13}
Suppose $v_1,\dots, v_m$ is a linearly dependent list of vectors in $V$. Suppose also that $W\neq\{0\}$. Prove that there exist $w_1,\dots, w_m\in W$ such that no $T\in\Hom(V,W)$ satisfies $Tv_k=w_k$ for each $k = 1,\dots, m$.
\end{problem}
\begin{proof}
Since $v_1,\dots,v_m$ is linearly dependent, one of them may be written as a linear combination of the others. Without loss of generality, suppose this is $v_m$. Then there exist $a_1,\dots,a_{m-1}\in\F$ such that
\begin{align*}
v_m = a_1v_1 + \dots + a_{m-1}v_{m-1}.
\end{align*}
Since $W\neq\{0\}$, there exists some nonzero $z\in W$. Define $w_1,\dots,w_m\in W$ by
\begin{equation*}
w_k = \begin{cases}z &\text{ if }k = m\\ 0 &\text{ otherwise.} \end{cases}
\end{equation*}
Now suppose there exists $T\in\Hom(V,W)$ such that $Tv_k = w_k$ for $k = 1,\dots, m$. It follows
\begin{align*}
T(0) &= T(v_m - a_1v_1 - \dots - a_{m-1}v_{m-1})\\
&= Tv_m - a_1Tv_1 - \dots - a_{m-1}Tv_{m-1}\\
&= z.
\end{align*}
But $z\neq0$, and thus $T(0)\neq 0$, a contradiction, since linear maps take $0$ to $0$. Therefore, no such linear map can exist.
\end{proof}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% SECTION B
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\section{Null Spaces and Ranges}
% Problem 1
\begin{problem}{1}
Give an example of a linear map $T$ such that $\dim\Null T = 3$ and $\dim\Range T = 2$.
\end{problem}
\begin{proof}
Define the map
\begin{align*}
T:\R^5 &\to \R^5\\
(x_1, x_2, x_3,x_4, x_5) &\mapsto (0, 0, 0, x_4, x_5).
\end{align*}
First we show $T$ is a linear map. Suppose $x,y\in\R^5$. Then
\begin{align*}
T(x + y) &= T((x_1, x_2, x_3,x_4, x_5) + (y_1, y_2, y_3,y_4, y_5) )\\
&= T(x_1+y_1, x_2+y_2, x_3+y_3,x_4+y_4, x_5+y_5)\\
&= (0, 0, 0, x_4 + y_5, x_5 + y_5)\\
&= (0, 0, 0, x_4, x_5) + (0, 0, 0, y_4, y_5)\\
&= T(x) + T(y).
\end{align*}
Next let $\lambda\in\R$. Then
\begin{align*}
T(\lambda x) &= T(\lambda x_1, \lambda x_2, \lambda x_3, \lambda x_4, \lambda x_5) \\
&= (0, 0, 0, \lambda x_4, \lambda x_5)\\
&= \lambda(0,0,0,x_4, x_5)\\
&= \lambda T(x),
\end{align*}
and so $T$ is in fact a linear map. Now notice that
\begin{equation*}
\Null T = \{(x_1,x_2,x_3,0,0)\in\R^5\mid x_1, x_2, x_3\in\R\}.
\end{equation*}
This space clearly has as a basis $e_1,e_2,e_3\in\R^5$ and hence has dimension $3$. Now, by the Fundamental Theorem of Linear Maps, we have
\begin{equation*}
\dim\R^5 = 3 + \dim\Range T
\end{equation*}
and hence $\dim\Range T = 2$, as desired.
\end{proof}
% Problem 3
\begin{problem}{3}
Suppose $v_1,\dots,v_m$ is a list of vectors in $V$. Define $T\in\Hom(\F^m, V)$ by
\begin{equation*}
T(z_1,\dots, z_m) = z_1v_1 + \dots + z_m v_m.
\end{equation*}
\begin{enumerate}[(a)]
\item What property of $T$ corresponds to $v_1,\dots, v_m$ spanning $V$?
\item What property of $T$ corresponds to $v_1,\dots, v_m$ being linearly independent?
\end{enumerate}
\end{problem}
\begin{proof}
\begin{enumerate}[(a)]
\item We claim surjectivity of $T$ corresponds to $v_1,\dots, v_m$ spanning $V$. To see this, suppose $T$ is surjective, and let $w\in V$. Then there exists $z\in \F^m$ such that $Tz = w$. This yields
\begin{equation*}
z_1v_1 + \dots + z_m v_m = w,
\end{equation*}
and hence every $w\in V$ can be expressed as a linear combination of $v_1,\dots, v_n$. That is, $\Span(v_1,\dots,v_n)= V$.
\item We claim injectivity of $T$ corresponds to $v_1,\dots, v_m$ being linearly independent. To see this, suppose $T$ is injective, and let $a_1,\dots, a_n\in\F$ such that
\begin{equation*}
a_1v_1 + \dots + a_nv_n = 0.
\end{equation*}
Then
\begin{equation*}
T(a) = T(a_1,\dots, a_n) = a_1v_1 + \dots + a_nv_n = 0
\end{equation*}
which is true iff $a = 0$ since $T$ is injective. That is, $a_1=\dots = a_n=0$ and hence $v_1,\dots, v_n$ is linearly independent. \qedhere
\end{enumerate}
\end{proof}
% Problem 5
\begin{problem}{5}
Give an example of a linear map $T:\R^4\to\R^4$ such that
\begin{equation*}
\Range T = \Null T.
\end{equation*}
\end{problem}
\begin{proof}
Define
\begin{align*}
T: \R^4 &\to \R^4\\
(x_1,x_2,x_3,x_4) &\mapsto (x_3, x_4, 0, 0).
\end{align*}
Clearly $T$ is a linear map, and we have
\begin{equation*}
\Null T =\{(x_1, x_2, x_3, x_4)\mid x_3 = x_4 = 0\in\R\} = \R^2\times \{0\}^2
\end{equation*}
and
\begin{equation*}
\Range T = \{(x, y, 0, 0)\mid x, y\in\R\} = \R^2 \times \{0\}^2.
\end{equation*}
Hence $\Range T = \Null T$, as desired.
\end{proof}
% Problem 7
\begin{problem}{7}
Suppose $V$ and $W$ are finite-dimensional with $2\leq \dim V\leq \dim W$. Show that $\{T\in \Hom(V,W)\mid T \text{ is not injective}\}$ is not a subspace of $\Hom(V,W)$.
\end{problem}
\begin{proof}
Let $Z = \{T\in \Hom(V,W)\mid T \text{ is not injective}\}$, let $v_1,\dots, v_m$ be a basis of $V$, where $m\geq 2$, and let $w_1,\dots, w_n$ be a basis of $W$, where $n\geq m$. We define $T\in \Hom(V,W)$ by its behavior on the basis
\begin{equation*}
Tv_k \defeq \begin{cases}0 &\text{ if }k = 1\\ w_2 &\text{ if }k = 2 \\ \frac{1}{2}w_{k} &\text{ otherwise}\end{cases}
\end{equation*}
so that clearly $T$ is not injective since $Tv_1 = 0 = T(0)$, and hence $T\in Z$. Similarly, define $S\in\Hom(V,W)$ by its behavior on the basis
\begin{equation*}
Sv_k \defeq \begin{cases}w_1 &\text{ if }k = 1\\0 &\text{ if }k = 2\\ \frac{1}{2}w_{k} &\text{ otherwise}\end{cases}
\end{equation*}
and note that $S$ is not injective either since $Sv_2 = 0 = S(0)$, and hence $S\in Z$. However, notice
\begin{align*}
(S+ T)(v_k) = w_k \text{ for } k = 1,\dots, n
\end{align*}
and hence $\Null(S+T)=\{0\}$ since it takes the basis of $V$ to the basis of $W$, so that $S+T$ is in fact injective. Therefore $S+T\not\in Z$, and $Z$ is not closed under addition. Thus $Z$ is not a subspace of $\Hom(V,W)$.
\end{proof}
% Problem 9
\begin{problem}{9}
Suppose $T\in\Hom(V,W)$ is injective and $v_1,\dots, v_n$ is linearly independent in $V$. Prove that $Tv_1,\dots, Tv_n$ is linearly independent in $W$.
\end{problem}
\begin{proof}
Suppose $a_1,\dots, a_n\in\F$ are such that
\begin{equation*}
a_1Tv_1 + \dots + a_nTv_n = 0.
\end{equation*}
Since $T$ is a linear map, it follows
\begin{align*}
T(a_1v_1 + \dots + a_nv_n) = 0.
\end{align*}
But since $\Null T = \{0\}$ (by virtue of $T$ being a linear map), this implies $a_1v_1 + \dots + a_nv_n= 0$. And since $v_1,\dots, v_n$ are linearly independent, we must have $a_1=\dots = a_n = 0$, which in turn implies $Tv_1,\dots, Tv_n$ is indeed linearly independent in $W$.
\end{proof}
% Problem 11
\begin{problem}{11}
Suppose $S_1,\dots,S_n$ are injective linear maps such that $S_1S_2\dots S_n$ makes sense. Prove that $S_1S_2\dots S_n$ is injective.
\end{problem}
\begin{proof}
For $n\in\Z_{\geq 2}$, let $P(n)$ be the statement: $S_1,\dots, S_n$ are injective linear maps such that $S_1S_2\dots S_n$ makes sense, and the product $S_1S_2\dots S_n$ is injective. We induct on $n$.\\
\textbf{Base case:} Suppose $n =2$, and assume $S_1\in\Hom(V_0,V_1)$ and $S_2\in\Hom(V_1,V_2)$, so that the product $S_1S_2$ is defined, and assume that both $S_1$ and $S_2$ are injective. Suppose $v_1,v_2\in V_0$ are such that $v_1\neq v_2$, and let $w_1 = S_2v_1$ and $w_2 = S_2v$. Since $S_2$ is injective, $w_1\neq w_2$. And since $S_1$ is injective, this in turn implies that $S_1(w_1)\neq S_1(w_2)$. In other words, $S_1(S_2(v_1)) \neq S_1(S_2(v_2))$, so that $S_1S_2$ is injective as well, and hence $P(2)$ is true.\\
\textbf{Inductive step:} Suppose $P(k)$ is true for some $k\in\Z^+$, and consider the product $(S_1S_2\dots S_{k})S_{k+1}$. The term in parentheses is injective by hypothesis, and the product of this term with $S_{k+1}$ is injective by our base case. Thus $P(k+1)$ is true.
\par By the principle of mathematical induction, the statement $P(n)$ is true for all $n\in\Z_{\geq 2}$, as was to be shown.
\end{proof}
% Problem 13
\begin{problem}{13}
Suppose $T$ is a linear map from $\F^4$ to $\F^2$ such that
\begin{equation*}
\Null T = \{(x_1,x_2,x_3,x_4)\in\F^4\mid x_1 = 5x_2\text{ and }x_3 = 7x_4\}.
\end{equation*}
Prove that $T$ is surjective.
\end{problem}
\begin{proof}
We claim the list
\begin{equation*}
(5, 1, 0, 0), (0, 0, 7, 1)
\end{equation*}
is a basis of $\Null T$. This implies
\begin{align*}
\dim \Range T &= \dim\F^4 - \dim\Null T\\
&= 4 - 2\\
&= 2,
\end{align*}
and hence $T$ is surjective (since the only $2$-dimensional subspace of $\F^2$ is the space itself). So let's prove our claim that this list is a basis.
\par Clearly the list is linearly independent. To see that it spans $\Null T$, suppose $x=(x_1,x_2,x_3,x_4)\in \Null T$, so that $x_1 = 5x_2$ and $x_3 = 7x_4$. We may write
\begin{align*}
\begin{pmatrix}x_1\\x_2\\x_3\\x_4 \end{pmatrix} = \begin{pmatrix}5x_2\\x_2\\7x_4\\x_4 \end{pmatrix} = x_2\begin{pmatrix}5\\1\\0\\0\end{pmatrix} + x_4\begin{pmatrix}0\\0\\7\\1\end{pmatrix},
\end{align*}
and indeed $x$ is in the span of our list, so that our list is in fact a basis, completing the proof.
\end{proof}
% Problem 15
\begin{problem}{15}
Prove that there does not exist a linear map from $\F^5$ to $\F^2$ whose null space equals
\begin{equation*}
\{(x_1,x_2,x_3,x_4,x_5)\in\F^5\mid x_1=3x_2 \text{ and }x_3 = x_4 = x_5\}.
\end{equation*}
\end{problem}
\begin{proof}
Suppose such a $T\in\Hom(\F^5, \F^2)$ did exist. We claim
\begin{equation*}
(3, 1, 0, 0, 0), (0, 0, 1, 1, 1)
\end{equation*}
is a basis of $\Null T$. This implies
\begin{align*}
\dim \Range T &= \dim\F^5 - \dim \Null T\\
&= 5 - 2\\
&= 3,
\end{align*}
which is absurd, since the codomain of $T$ has dimension $2$. Hence such a $T$ cannot exist. So, let's prove our claim that this list is a basis.
\par Clearly $(3, 1, 0, 0, 0), (0, 0, 1, 1, 1)$ is linearly independent. To see that it spans $\Null T$, suppose $x=(x_1,\dots,x_5)\in\Null T$, so that $x_1=3x_2$ and $x_3 = x_4 = x_5$. We may write
\begin{align*}
\begin{pmatrix}x_1\\x_2\\x_3\\x_4\\x_5 \end{pmatrix} = \begin{pmatrix}3x_2\\x_2\\x_3\\x_3\\x_3 \end{pmatrix} = x_2\begin{pmatrix}3\\1\\0\\0\\0\end{pmatrix} + x_3\begin{pmatrix}0\\0\\1\\1\\1\end{pmatrix},
\end{align*}
and indeed $x$ is in the span of our list, so that our list is in fact a basis, completing the proof.
\end{proof}
% Problem 17
\begin{problem}{17}
Suppose $V$ and $W$ are both finite-dimensional. Prove that there exists an injective linear map from $V$ to $W$ if and only if $\dim V\leq \dim W$.
\end{problem}
\begin{proof}
$(\Rightarrow)$ Suppose $T\in\Hom(V,W)$ is injective. If $\dim V > \dim W$, Thereom 3.23 tells us that no map from $V$ to $W$ would be injective, a contradiction, and so we must have $\dim V\leq \dim W$.
\par $(\Leftarrow)$ Suppose $\dim V\leq \dim W$. Then the inclusion map $\iota:V\to W$ is both a linear map and injective.
\end{proof}
% Problem 19
\begin{problem}{19}
Suppose $V$ and $W$ are finite-dimensional and that $U$ is a subspace of $V$. Prove that there exists $T\in\Hom(V,W)$ such that $\Null T = U$ if and only if $\dim U\geq \dim V - \dim W$.
\end{problem}
\begin{proof}
$(\Leftarrow)$ Suppose $\dim U \geq \dim V- \dim W$. Since $U$ is a subspace of $V$, there exists a subspace $U'$ of $V$ such that
\begin{equation*}
V = U \oplus U'.
\end{equation*}
Let $u_1,\dots u_m$ be a basis for $U$, let $u'_1,\dots, u'_n$ be a basis for $U'$, and let $w_1,\dots, w_p$ be a basis for $W$. By hypothesis, we have
\begin{equation*}
m \geq (m + n) - p,
\end{equation*}
which implies $p\geq n$. Thus we may define a linear map $T\in\Hom(V, W)$ by its values on the basis of $V=U\oplus U'$ by taking $Tu_k = 0$ for $k = 1,\dots m$ and $Tu'_j = w_j$ for $j = 1,\dots, n$ (since $p\geq n$, there is a $w_j$ for each $u'_j$). The map is linear by Theorem 3.5, and its null space is $U$ by construction.
\par $(\Rightarrow)$ Suppose $U$ is a subspace of $V$, $T\in\Hom(V,W)$, and $\Null T = U$. Then, since $\Range T$ is a subspace of $W$, we have $\dim\Range T \leq \dim W$. Combining this inequality with the Fundamental Theorem of Linear Maps yields
\begin{align*}
\dim \Null T &= \dim V - \dim\Range T\\
&\geq \dim V - \dim W.
\end{align*}
Since $\dim \Null T = \dim U$, we have the desired inequality.
\end{proof}
% Problem 21
\begin{problem}{21}
Suppose $V$ is finite-dimensional and $T\in\Hom(V,W)$. Prove that $T$ is surjective if and only if there exists $S\in\Hom(W,V)$ such that $TS$ is the identity map on $W$.
\end{problem}
\begin{proof}
$(\Rightarrow)$ Suppose $T\in\Hom(V,W)$ is surjective, so that $W$ is necessarily finite-dimensional as well. Let $v_1,\dots, v_m$ be a basis of $V$ and let $n=\dim W$, where $m\geq n $ by surjectivity of $T$. Note that
\begin{equation*}
Tv_1,\dots, Tv_m
\end{equation*}
span $W$. Thus we may reduce this list to a basis by removing some elements (possibly none, if $n = m$). Suppose this reduced list were $Tv_{i_1},\dots, Tv_{i_n}$ for some $i_1,\dots, i_n\in\{1,\dots, m\}$. We define $S\in\Hom(W,V)$ by its behavior on this basis
\begin{align*}
S(Tv_{i_k}) \defeq v_{i_k} \text{ for }k = 1,\dots, n.
\end{align*}
Suppose $w \in W$. Then there exist $a_1,\dots, a_n\in\F$ such that
\begin{equation*}
w = a_1 Tv_{i_1} + \dots + a_nTv_{i_n}
\end{equation*}
and thus
\begin{align*}
TS(w) &= TS\left(a_1 Tv_{i_1} + \dots + a_nTv_{i_n}\right)\\
&= T\left(S\left( a_1 Tv_{i_1} + \dots + a_nTv_{i_n}\right)\right)\\
&= T\left( a_1 S(Tv_{i_1}) + \dots + a_nS(Tv_{i_n})\right)\\
&= T(a_1 v_{i_1} + \dots + a_nv_{i_n})\\
&= a_1 Tv_{i_1} + \dots + a_nTv_{i_n}\\
&= w,
\end{align*}
and so $TS$ is the identity map on $W$.
\par $(\Leftarrow)$ Suppose there exists $S\in\Hom(W,V)$ such that $TS\in\Hom(W,W)$ is the identity map, and suppose by way of contradiction that $T$ is not surjective, so that $\dim \Range TS < \dim W$. By the Fundamental Theorem of Linear Maps, this implies
\begin{align*}
\dim W &= \dim\Null TS + \dim \Range TS\\
&< \dim\Null TS + \dim W
\end{align*}
and hence $\dim\Null TS > 0$, a contradiction, since the identity map can only have trivial null space. Thus $T$ is surjective, as desired.
\end{proof}
% Problem 23
\begin{problem}{23}
Suppose $U$ and $V$ are finite-dimensional vector spaces and $S\in\Hom(V,W)$ and $T\in\Hom(U,V)$. Prove that
\begin{equation*}
\dim\Range ST \leq \min\{\dim\Range S, \dim\Range T\}.
\end{equation*}
\end{problem}
\begin{proof}
We will show that both $\dim\Range ST \leq \dim \Range S$ and $\dim\Range ST \leq \dim \Range T$, since this implies the desired inequality.
\par We first show that $\dim\Range ST \leq \dim \Range S$. Suppose $w \in \Range ST$. Then there exists $u\in U$ such that $ST(u) = w$. But this implies that $w\in\Range S$ as well, since $Tu\in S^{-1}(w)$. Thus $\Range ST \subseteq \Range S$, which implies $\dim \Range ST \leq \dim\Range S$.
\par We now show that $\dim\Range ST \leq \dim \Range T$. Note that if $v\in\Null T$, so that $Tv =0$, then $ST(v) = 0$ (since linear maps take zero to zero). Thus we have $\Null T\subseteq \Null ST$, which implies $\dim \Null T\leq \dim \Null ST$. Combining this inequality with the Fundamental Theorem of Linear Maps applied to $T$ yields
\begin{align}
\dim U \leq \dim \Null ST + \dim \Range T.
\end{align}
Similarly, we have
\begin{equation}
\dim U = \dim\Null ST + \dim\Range ST.
\end{equation}
Combining $(1)$ and $(2)$ yields
\begin{align*}
\dim\Null ST + \dim\Range ST \leq \dim\Null ST + \dim\Range T
\end{align*}
and hence $\dim\Range ST \leq \dim\Range T$, completing the proof.
\end{proof}
% Problem 25
\begin{problem}{25}
Suppose $V$ is finite-dimensional and $T_1,T_2\in\Hom(V,W)$. Prove that $\Range T_1\subseteq \Range T_2$ if and only if there exists $S\in\Hom(V,V)$ such that $T_1= T_2S$.
\end{problem}
\begin{proof}
$(\Leftarrow)$ Suppose there exists $S\in\Hom(V,V)$ such that $T_1=T_2S$, and let $w\in\Range T_1$. Then there exists $v\in V$ such that $T_1v = w$, and hence $T_2S(v) = w$. But then $w \in\Range T_2$ as well, and hence $\Range T_1\subseteq \Range T_2$.
\par $(\Rightarrow)$ Suppose $\Range T_1\subseteq \Range T_2$, and let $v_1,\dots, v_n$ be a basis of $V$. Let $w_k = Tv_k$ for $k=1,\dots, n$. Then there exist $u_1,\dots,u_n\in V$ such that $T_2u_k = w_k$ for $k=1,\dots,n$ (since $w_k\in\Range T_1$ implies $w_k\in\Range T_2$). Define $S\in\Hom(V,V)$ by its behavior on the basis
\begin{equation*}
Sv_k \defeq u_k \text{ for }k= 1,\dots, n.
\end{equation*}
It follows that $T_2S(v_k) = T_2u_k = w_k = T_1v_k$. Since $T_2S$ and $T_1$ are equal on the basis, they are equal as linear maps, as was to be shown.
\end{proof}
% Problem 27
\begin{problem}{27}
Suppose $p\in\poly(\R)$. Prove that there exists a polynomial $q\in\poly(\R)$ such that $5q'' + 3q' = p$.
\end{problem}
\begin{proof}
Suppose $\deg p =n$, and consider the linear map
\begin{align*}
D: \poly_{n+1}(\R) &\to \poly_n(\R)\\
q &\mapsto 5q'' + 3q'.
\end{align*}
If we can show $D$ is surjective, we're done, since this implies that there exists some $q\in\poly_{n+1}(\R)$ such that $Dq = 5q'' + 3q' = p$. To that end, suppose $r \in\Null D$. Then we must have $r'' = 0$ and $r' = 0$, which is true if and only if $r$ is constant. Thus any $\alpha\in\R^\times$ is a basis of $\Null D$, and so $\dim\Null D=1$. By the Fundamental Theorem of Linear Maps, we have
\begin{align*}
\dim\Range D =\dim\poly_{n + 1}(\R) - \dim\Null D,
\end{align*}
and hence
\begin{align*}
\dim\Range D = (n + 2) - 1 = n + 1.
\end{align*}
Since the only subspace of $\poly_n(\R)$ with dimension $n + 1$ is the space itself, $D$ is surjective, as desired.
\end{proof}
% Problem 29
\begin{problem}{29}
Suppose $\varphi\in\Hom(V,\F)$. Suppose $u\in V$ is not in $\Null\varphi$. Prove that
\begin{equation*}
V = \Null\varphi \oplus \{au\mid a\in\F\}.
\end{equation*}
\end{problem}
\begin{proof}
First note that since $u\in V-\Null\varphi$, there exists some nonzero $\varphi(u)\in\Range\varphi$ and hence $\dim\Range\varphi\geq 1$. But since $\Range\varphi\subseteq\F$, and $\dim\F = 1$, we must have $\dim\Range\varphi = 1$. Thus, letting $n= \dim V$, it follows
\begin{align*}
\dim\Null\varphi &= \dim V - \dim\Range\varphi\\
&= n - 1.
\end{align*}
Let $v_1,\dots,v_{n-1}$ be a basis for $\Null\varphi$. We claim $v_1,\dots,v_{n-1}, u$ is an extension of this basis to a basis of $V$, which would then imply $V = \Null\varphi \oplus \{au\mid a\in\F\}$, as desired.
\par To show $v_1,\dots,v_{n-1}, u$ is a basis of $V$, it suffices to show linearly independence (since it has length $n = \dim V$). So suppose $a_1,\dots,a_n\in\F$ are such that
\begin{equation*}
a_1v_1 + \dots + a_{n-1}v_{n-1} + a_nu = 0.
\end{equation*}
We may write
\begin{equation*}
a_nu = -a_1v_1 - \dots - a_{n-1}v_{n-1},
\end{equation*}
which implies $a_nu\in\Null \varphi$. By hypothesis, $u\not\in\Null\varphi$, and thus we must have $a_n = 0$. But now each of the $a_1,\dots,a_{n-1}$ must be $0$ as well (since $v_1,\dots,v_{n-1}$ form a basis of $\Null\varphi$ and thus are linearly independent). Therefore, $v_1,\dots,v_{n-1}, u$ is indeed linearly independent, proving our claim.
\end{proof}
% Problem 31
\begin{problem}{31}
Give an example of two linear maps $T_1$ and $T_2$ from $\R^5$ to $\R^2$ that have the same null space but are such that $T_1$ is not a scalar multiple of $T_2$.
\end{problem}
\begin{proof}
Let $e_1,\dots, e_5$ be the standard basis of $\R^5$. We define $T_1,T_2\in\Hom(\R^5,\R^2)$ by their behavior on the basis (using the standard basis for $\R^2$ as well)
\begin{align*}
T_1e_1 &\defeq e_2\\
T_1e_2 &\defeq e_1\\
T_1e_k &\defeq 0 \text{ for }k = 3, 4, 5
\end{align*}
and
\begin{align*}
T_2e_1 &\defeq e_1\\
T_2e_2 &\defeq e_2\\
T_2e_k &\defeq 0 \text{ for }k = 3, 4, 5.
\end{align*}
Clearly $\Null T_1 = \Null T_2$. We claim $T_2$ is not a scalar multiple of $T_1$. To see this, suppose not. Then there exists $\alpha\in\R$ such that $T_1 = \alpha T_2$. In particular, this implies $T_1e_1 = \alpha T_2e_1$. But this is absurd, since $T_1e_1 = e_2$ and $T_2e_1 = e_1$, and of course $e_1,e_2$ is linearly independent. Thus no such $\alpha$ can exist, and $T_1,T_2$ are as desired.
\end{proof}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% SECTION C
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\section{Matrices}
% Problem 1
\begin{problem}{1}
Suppose $V$ and $W$ are finite-dimensional and $T\in\Hom(V,W)$. Show that with respect to each choice of bases of $V$ and $W$, the matrix of $T$ has at least $\dim\Range T$ nonzero entries.
\end{problem}
\begin{proof}
Let $v_1,\dots, v_n$ be a basis of $V$, let $w_1,\dots, w_m$ be a basis of $W$, let $r = \dim\Range T$, and let $s = \dim\Null T$. Then there are $s$ basis vectors of $V$ which map to zero and $r$ basis vectors of $V$ with nontrivial representation as linear combinations of $w_1,\dots, w_m$. That is, suppose $Tv_k\neq 0$, where $k\in\{1,\dots, n\}$. Then there exist $a_1,\dots, a_m\in\F$, not all zero, such that
\begin{equation*}
Tv_k = a_1w_1 + \dots + a_mw_m.
\end{equation*}
The coefficients form column $k$ of $\mat(T)$, and there are $r$ such vectors in the basis of $V$. Hence there are $r$ columns of $\mat(T)$ with at least one nonzero entry, as was to be shown.
\end{proof}
% Problem 3
\begin{problem}{3}
Suppose $V$ and $W$ are finite-dimensional and $T\in\Hom(V,W)$. Prove that there exist a basis of $V$ and a basis of $W$ such that with respect to these bases, all entries of $\mat(T)$ are $0$ except the entries in row $j$, column $j$, equal $1$ for $1\leq j\leq \dim\Range T$.
\end{problem}
\begin{proof}
Let $R$ be the subspace of $V$ such that
\begin{equation*}
V = R\oplus \Null T,
\end{equation*}
let $r_1,\dots, r_m$ be a basis of $R$ (where $m =\dim\Range T$), and let $v_1,\dots, v_n$ be a basis of $\Null T$ (where $n = \dim\Null T$). Then $r_1,\dots, r_m, v_1,\dots,v_n$ is a basis of $V$. It follows that $Tr_1,\dots, Tr_m$ is a basis of $\Range T$, and hence there is an extension of this list to a basis of $W$. Suppose $Tr_1,\dots, Tr_m, w_1,\dots, w_p$ is such an extension (where $p = \dim W - m$). Then, for $j = 1,\dots m$, we have
\begin{equation*}
Tr_j = \left(\sum_{i = 1}^m\delta_{i,j}\cdot Tr_t\right) + \left(\sum_{k = 1}^p 0\cdot w_k\right),
\end{equation*}
where $\delta_{i,j}$ is the Kronecker delta function. Thus, column $j$ of $\mat(T)$ is has an entry of $1$ in row $j$ and $0$'s elsewhere, where $j$ ranges over $1$ to $m = \dim\Range T$. Since $Tv_1 = \dots = Tv_n = 0$, the remaining columns of $\mat(T)$ are all zero. Thus $\mat(T)$ has the desired form.
\end{proof}
% Problem 5
\begin{problem}{5}
Suppose $w_1,\dots, w_n$ is a basis of $W$ and $V$ is finite-dimensional. Suppose $T\in\Hom(V,W)$. Prove that there exists a basis $v_1,\dots, v_m$ of $V$ such that all the entries in the first row of $\mat(T)$ (with respect to the bases $v_1,\dots,v_m$ and $w_1,\dots,w_n$) are $0$ except for possibly a $1$ in the first row, first column.
\end{problem}
\begin{proof}
First note that if $\Range T\subseteq \Span(w_2,\dots,w_n)$, the first row of $\mat(T)$ will be all zeros regardless of choice of basis for $V$.
\par So suppose $\Range T\not\subseteq \Span(w_2,\dots, w_n)$ and let $u_1\in V$ be such that $Tu_1\not\in\Span(w_2,\dots, w_n)$. There exist $a_1,\dots, a_n\in\F$ such that
\begin{equation*}
Tu_1=a_1w_1+\dots + a_nw_n,
\end{equation*}
and notice $a_1\neq 0$ since $Tu_1\not\in\Span(w_2,\dots,w_n)$. Hence we may define
\begin{equation*}
z_1 \defeq \frac{1}{a_1}u_1.
\end{equation*}
It follows
\begin{equation}\label{eq:1}
Tz_1 = w_1 + \frac{a_2}{a_1}w_2 + \dots + \frac{a_n}{a_1}w_n.
\end{equation}
Now extend $z_1$ to a basis $z_1,\dots, z_m$ of $V$. Then for $k=2,\dots, m$, there exist $A_{1,k},\dots, A_{n,k}\in\F$ such that
\begin{equation*}
Tz_k = A_{1,k}w_1 + \dots + A_{n,k}w_n,
\end{equation*}
and notice
\begin{align}
T(z_k - A_{1,k}z_1) &= Tz_k - A_{1,k}Tz_1 \nonumber \\
&= \left(A_{1,k}w_1 + \dots + A_{n,k}w_n\right) - A_{1,k}\left( w_1 + \frac{a_2}{a_1}w_2 + \dots + \frac{a_n}{a_1}w_n\right) \nonumber\\
&= \left(A_{2,k} - A_{1,k}\right)\frac{a_2}{a_1}w_2 + \dots + \left(A_{n,k} - A_{1,k}\right)\frac{a_n}{a_1}w_n. \label{eq:2}
\end{align}
Now we define a new list in $V$ by
\begin{align*}
v_k \defeq \begin{cases}z_1 &\text{ if }k = 1\\ z_k - A_{1,k}z_1 &\text{ otherwise}\end{cases}
\end{align*}
for $k = 1,\dots, m$. We claim $v_1,\dots,v_m$ is a basis. To see this, it suffices to prove the list is linearly independent, since its length equals $\dim V$. So suppose $b_1,\dots, b_m\in\F$ are such that
\begin{equation*}
b_1v_1 + \dots + b_mv_m = 0.
\end{equation*}
By definition of the $v_k$, it follows
\begin{equation*}
b_1z_1 + b_2(z_2 - A_{1,k}z_1) + \dots + b_m(z_m - A_{1,k}z_1 ) = 0.
\end{equation*}
But since $z_1,\dots, z_m$ is a basis of $V$, the expression on the LHS above is simply a linear combination of vectors in a basis. Thus we must have $b_1=\dots=b_m=0$, and indeed $v_1,\dots, v_m$ are linearly independent, as claimed.
\par Finally, notice \eqref{eq:1} tells us the first column of $\mat(T, v_k, w_k)$ is all $0$'s except a $1$ in the first entry, and \eqref{eq:2} tells us the remaining columns have a $0$ in the first entry. Thus $\mat(T, v_k, w_k)$ has the desired form, completing the proof.
\end{proof}
% Problem 7
\begin{problem}{7}
Suppose $S,T\in\Hom(V,W)$. Prove that $\mat(S+T) = \mat(S) + \mat(T)$.
\end{problem}
\begin{proof}
Let $v_1,\dots, v_m$ be a basis of $V$ and let $w_1,\dots, w_n$ be a basis of $W$. Also, let $A = \mat(S)$ and $B =\mat(T)$ be the matrices of these linear transformations with respect to these bases. It follows
\begin{align*}
(S + T)v_k &= Sv_k + Tv_k\\
&= \left(A_{1, k}w_1 + \dots + A_{n,k}w_n\right) + \left(B_{1, k}w_1 + \dots + B_{n,k}w_n\right) \\
&= (A_{1, k} + B_{1,k})w_1 + \dots + (A_{n,k} + B_{n,k})w_n.
\end{align*}
Hence $\mat(S + T)_{j,k}=A_{j,k} + B_{j,k}$, and indeed we have $\mat(S+T) = \mat(S) + \mat(T)$, as desired.
\end{proof}
% Problem 9
\begin{problem}{9}
Suppose $A$ is an $m$-by-$n$ matrix and $c = \begin{pmatrix}c_1\\ \vdots \\ c_n\end{pmatrix}$ is an $n$-by-$1$ matrix. Prove that
\begin{equation*}
Ac = c_1A_{\cdot, 1} + \dots + c_nA_{\cdot, n}.
\end{equation*}
\end{problem}
\begin{proof}
By definition, it follows
\begin{align*}
Ac &= \begin{pmatrix}
&A_{1,1} &A_{1,2} &\dots &A_{1, n} \\
&A_{2,1} &A_{2,2} &\dots &A_{2, n} \\
&\vdots &\vdots & \ddots &\vdots \\
&A_{m,1} &A_{m, 2} &\dots &A_{m,n}\end{pmatrix}
\begin{pmatrix}c_1\\ c_2\\ \vdots \\ c_n\end{pmatrix}\\
&= \begin{pmatrix} A_{1,1}c_1 + A_{1,2}c_2 + \dots + A_{1, n}c_n \\
A_{2,1}c_1 + A_{2,2}c_2 + \dots + A_{2, n}c_n \\
\vdots \\
A_{m,1}c_1 + A_{m,2}c_2 + \dots + A_{m, n}c_n \end{pmatrix}\\
&= c_1\begin{pmatrix} A_{1,1}\\ A_{2,1}\\ \vdots \\ A_{m,1}\end{pmatrix} + c_2\begin{pmatrix} A_{1,2}\\ A_{2,2}\\ \vdots \\ A_{m,2}\end{pmatrix} + \dots + c_n\begin{pmatrix} A_{1,n}\\ A_{2,n}\\ \vdots \\ A_{m,n}\end{pmatrix}\\
&= c_1A_{\cdot, 1} + \dots + c_nA_{\cdot, n},
\end{align*}
as desired.
\end{proof}
% Problem 11
\begin{problem}{11}
Suppose $a = (a_1, \dots, a_n)$ is a $1$-by-$n$ matrix and $C$ is an $n$-by-$p$ matrix. Prove that
\begin{equation*}
aC = a_1C_{1,\cdot} + \dots + a_nC_{n,\cdot}.
\end{equation*}
\end{problem}
\begin{proof}
By definition, it follows
\begin{align*}
aC &= (a_1, \dots, a_n)\begin{pmatrix}
&C_{1,1} &C_{1,2} &\dots &C_{1, p} \\
&C_{2,1} &C_{2,2} &\dots &C_{2, p} \\
&\vdots &\vdots & \ddots &\vdots \\
&C_{n,1} &C_{n, 2} &\dots &C_{n,p}\end{pmatrix}\\
&= \left(\sum_{k = 1}^n a_k C_{k, 1}, \sum_{k = 1}^n a_k C_{k, 2}, \dots, \sum_{k = 1}^n a_k C_{k, p} \right)\\
&= \sum_{k = 1}^n\left(a_kC_{k,1}, \dots, a_kC_{k, p}\right)\\
&= \sum_{k = 1}^na_k\left(C_{k,1}, \dots, C_{k, p}\right)\\
&= \sum_{k = 1}^na_kC_{k, \cdot},
\end{align*}
as desired.
\end{proof}
% Problem 13
\begin{problem}{13}
Prove that the distributive property holds for matrix addition and matrix multiplication. In other words, suppose $A,B,C,D,E,$ and $F$ are matrices whose sizes are such that $A(B+C)$ and $(D+E)F$ make sense. Prove that $AB+AC$ and $DF + EF$ both make sense and that $A(B+ C) = AB + AC$ and $(D + E)F = DF + EF$.
\end{problem}
\begin{proof}
First note that if $A(B+C)$ makes sense, then the number of columns of $A$ must equal the number of rows of $B+C$. But the sum of two matrices is only defined if their dimensions are equal, and hence the number of rows of both $B$ and $C$ must equal the number of columns of $A$. Thus $AB + AC$ makes sense. So suppose $A\in\F^{m,n}$ and $B,C\in\F^{n,p}$. It follows
\begin{align*}
\left(A(B+C)\right)_{j,k} &= \sum_{r = 1}^n A_{j, r}(B + C)_{r,k}\\
&= \sum_{r = 1}^n A_{j, r}(B_{r,k} + C_{r,k})\\
&= \sum_{r = 1}^n\left(A_{j,r}B_{r,k} + A_{j,r}C_{r,k}\right)\\
&= \sum_{r = 1}^nA_{j,r}B_{r,k} + \sum_{r = 1}^nA_{j,r}C_{r,k} \\
&= (AB)_{j,k} + (AC)_{j,k},
\end{align*}
proving the first distributive property.
\par Now note that if $(D + E)F$ makes sense, then the number of columns of $D + E$ must equal the number of rows of $F$. Hence the number of columns of both $D$ and $E$ must equal the number of rows of $F$, and thus $DF+ EF$ makes sense as well. So suppose $D,E \in\F^{m,n}$ and $F\in\F^{n,p}$. It follows
\begin{align*}
\left((D+E)F\right)_{j,k} &= \sum_{r = 1}^n (D+E)_{j,r} F_{r, k}\\
&= \sum_{r = 1}^n (D_{j,r} + E_{j,r})F_{r,k}\\
&= \sum_{r = 1}^n D_{j,r}F_{r,k} + E_{j,r}F_{r,k}\\
&= \sum_{r=1}^n D_{j,r}F_{r,k} + \sum_{r = 1}^nE_{j,r}F_{r,k}\\
&= (DF)_{j,k} + (EF)_{j,k},
\end{align*}
proving the second distributive property.
\end{proof}
% Problem 15
\begin{problem}{15}
Suppose $A$ is an $n$-by-$n$ matrix and $1\leq j,k\leq n$. show that the entry in row $j$, column $k$, of $A^3$ (which is defined to mean $AAA$) is
\begin{equation*}
\sum_{p=1}^n\sum_{r = 1}^nA_{j,p}A_{p,r}A_{r,k}.
\end{equation*}
\end{problem}
\begin{proof}
For $1\leq p,k\leq n$, we have
\begin{align*}
(A^2)_{p,k} = \sum_{r=1}^nA_{p,r}A_{r,k}.
\end{align*}
Thus, for $1\leq j,k\leq n$, it follows
\begin{align*}
(A^3)_{j,k} &= \sum_{p = 1}^n A_{j,p}(A^2)_{p,k}\\
&= \sum_{p = 1}^nA_{j,p}\sum_{r = 1}^n A_{p, r}A_{r, k}\\
&= \sum_{p = 1}^n\sum_{r = 1}^nA_{j,p}A_{p,r}A_{r, k},
\end{align*}
as desired.
\end{proof}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% SECTION D
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\section{Invertibility and Isomorphic Vector Spaces}
% Problem 1
\begin{problem}{1}
Suppose $T\in\Hom(U,V)$ and $S\in\Hom(V,W)$ are both invertible linear maps. Prove that $ST\in\Hom(U,W)$ is invertible and that $(ST)^{-1}=T^{-1}S^{-1}$.
\end{problem}
\begin{proof}
For all $u \in U$, we have
\begin{align*}
(T^{-1}S^{-1}ST)(u) &= T^{-1}(S^{-1}(S(T(u))))\\
&= T^{-1}(I(T(u))\\
&= T^{-1}(T(u))\\
&= v
\end{align*}
and hence $T^{-1}S^{-1}$ is a left inverse of $ST$. Similarly, for all $w \in W$, we have
\begin{align*}
(STT^{-1}S^{-1})(w) &= S(T(T^{-1}(S^{-1}(w))))\\
&= S(I(S^{-1}(w)))\\
&= S(S^{-1}(w))\\
&= w
\end{align*}
and hence $T^{-1}S^{-1}$ is a right inverse of $ST$. Therefore, $ST$ is invertible, as desired.
\end{proof}
% Problem 3
\begin{problem}{3}
Suppose $V$ is finite-dimensional, $U$ is a subspace of $V$, and $S\in\Hom(U,V)$. Prove there exists an invertible operator $T\in\Hom(V)$ such that $Tu=Su$ for every $u\in U$ if and only if $S$ is injective.
\end{problem}
\begin{proof}
$(\Leftarrow)$ Suppose $S$ is injective, and let $W$ be the subspace of $V$ such that $V = U\oplus W$. Let $u_1,\dots, u_m$ be a basis of $U$ and let $w_1,\dots, w_n$ be a basis of $W$, so that $u_1,\dots, u_m,w_1,\dots,w_n$ is a basis of $V$. Define $T\in\Hom(V)$ by its behavior on this basis of $V$
\begin{align*}
Tu_k &\defeq Su_k\\
Tw_j &\defeq w_j
\end{align*}
for $k = 1,\dots, m$ and $j = 1,\dots, n$. Since $S$ is injective, so too is $T$. And since $V$ is finite-dimensional, this implies that $T$ is invertible, as desired.
\par $(\Rightarrow)$ Suppose there exists an invertible operator $T\in\Hom(V)$ such that $Tu=Su$ for every $u\in U$. Since $T$ is invertible, it is also injective. And since $T$ is injective, so to is $S = T\mid_U$, completing the proof.
\end{proof}
% Problem 5
\begin{problem}{5}
Suppose $V$ is finite-dimensional and $T_1,T_2\in\Hom(V,W)$. Prove that $\Range T_1 = \Range T_2$ if and only if there exists an invertible operator $S\in\Hom(V)$ such that $T_1=T_2S$.
\end{problem}
\begin{proof}
$(\Rightarrow)$ Suppose $\Range T_1=\Range T_2 \defeq R$, so that $\Null T_1 = \Null T_2\defeq N$ as well. Let $Q$ be the unique subspace of $V$ such that
\begin{equation*}
V = N \oplus Q,
\end{equation*}
and let $u_1,\dots,u_m$ be a basis of $N$ and $v_1,\dots,v_n$ be a basis of $Q$. We claim there exists a unique $q_k\in Q$ such that $T_2q_k = T_1v_k$ for $k = 1,\dots, n$. To see this, suppose $q_k,q'_k\in Q$ are such that $T_2q_k = T_2q'_k = T_1v_k$. Then $T_2(q_k - q'_k) = 0$, and hence $q_k - q'_k \in N$. But since $N\cap Q = \{0\}$, this implies $q_k - q'_k = 0$ and thus $q_k = q'_k$. And so the choice of $q_k$ is indeed unique. We now define $S\in\Hom(V)$ by its behavior on the basis
\begin{align*}
Su_k &= u_k \text{ for } k = 1,\dots,m\\
Sv_j &= q_j \text{ for }j = 1,\dots,n.
\end{align*}
Let $v\in V$, so that there exist $a_1,\dots, a_m,b_1,\dots,b_n\in\F$ such that
\begin{equation*}
v = a_1u_1 + \dots + a_mu_m + b_1v_1 + \dots + b_nv_n.
\end{equation*}
It follows
\begin{align*}
(T_2S)(v) &= T_2(S(a_1u_1 + \dots + a_mu_m + b_1v_1 + \dots + b_nv_n))\\
&= T_2(a_1Su_1 + \dots + a_mSu_m + b_1Sv_1 + \dots + b_nSv_n)\\
&= T_2(a_1u_1 + \dots + a_mu_m + b_1q_1 + \dots + b_nq_n)\\
&= a_1T_2u_1 + \dots + a_mT_2u_m + b_1T_2q_1 + \dots + b_nT_2q_n\\
&= b_1T_1v_1 + \dots + b_nT_1v_n.
\end{align*}
Similarly, we have
\begin{align*}
T_1v &= T_1(a_1u_1 + \dots + a_mu_m + b_1v_1 + \dots + b_nv_n)\\
&= a_1T_1u_1 + \dots + a_mT_1u_m + b_1T_1v_1 + \dots + b_nT_1v_n\\
&= b_1T_1v_1 + \dots + b_nT_1v_n,
\end{align*}
and so indeed $T_1 = T_2S$. To see that $S$ is invertible, it suffices to prove it is injective. So let $v\in V$ be as before, and suppose $Sv = 0$. It follows
\begin{align*}
Sv &= S(a_1u_1 + \dots + a_mu_m + b_1v_1 + \dots + b_nv_n)\\
&= (a_1u_1 + \dots + a_mu_m) + (b_1Sv_1 + \dots + b_nSv_n)\\
&= 0.
\end{align*}
By the proof of Theorem 3.22, $Sv_1,\dots, Sv_n$ is a basis of $R$, and thus the list $u_1,\dots, u_m, Sv_1,\dots, Sv_n$ is a basis of $V$, and each of the $a$'s and $b$'s must be zero. Therefore $S$ is indeed injective, completing the proof in this direction.
\par $(\Leftarrow)$ Suppose there exists an invertible operator $S\in\Hom(V)$ such that $T_1 = T_2S$. If $w\in\Range T_1$, then there exists $v\in V$ such that $T_1v = w$, and hence $(T_2S)(v) = T_2(S(v)) = w$, so that $w\in\Range T_2$ and we have $\Range T_1\subseteq \Range T_2$. Conversely, suppose $w'\in\Range T_2$, so that there exists $v'\in V$ such that $T_2v' = w'$. Then, since $T_2 = T_1S^{-1}$, we have $(T_1S^{-1})(v') = T_1(S^{-1}(v')) = w'$, so that $w'\in\Range T_1$. Thus $\Range T_2\subseteq\Range T_1$, and we have shown $\Range T_1 = \Range T_2$, as desired.
\end{proof}
% Problem 7
\begin{problem}{7}
Suppose $V$ and $W$ are finite-dimensional. Let $v\in V$. Let
\begin{equation*}
E = \{T\in \Hom(V,W)\mid Tv = 0\}.
\end{equation*}
\begin{enumerate}[(a)]
\item Show that $E$ is a subspace of $\Hom(V,W)$.
\item Suppose $v\neq 0$. What is $\dim E$?
\end{enumerate}
\end{problem}
\begin{proof}
\begin{enumerate}[(a)]
\item First note that the zero map is clearly an element of $E$, and hence $E$ contains the additive identity of $\Hom(V,W)$. Now suppose $T_1,T_2\in E$. Then
\begin{align*}
(T_1 + T_2)(v) = T_1v + T_2v = 0
\end{align*}
and hence $T_1 + T_2\in E$, so that $E$ is closed under addition. Finally, suppose $T\in E$ and $\lambda\in\F$. Then
\begin{equation*}
(\lambda T)(v) = \lambda Tv = \lambda 0 = 0,
\end{equation*}
and so $E$ is closed under scalar multiplication as well. Thus $E$ is indeed a subspace of $\Hom(V,W)$.
\item Suppose $v\neq 0$, and let $\dim V = m$ and $\dim W = n$. Extend $v$ to a basis $v, v_2, \dots, v_m$ of $V$, and endow $W$ with any basis. Let $\mathcal{E}$ denote the subspace of $\F^{m,n}$ of matrices whose first column is all zero.
\par We claim $T\in E$ if and only if $\mat(T)\in \mathcal{E}$, so that $\mat: E\to \mathcal{E}$ is an isomorphism. Clearly if $T\in E$ (so that $Tv = 0$), then $\mat(T)_{\cdot, 1}$ is all zero, and hence $T\in\mathcal{E}$. Conversely, suppose $\mat(T)\in \mathcal{E}$. It follows
\begin{align*}
\mat(Tv) &= \mat(T)\mat(v)\\
&= \begin{pmatrix}
0 &A_{1, 2} &\dots &A_{1,n}\\
0 &A_{2,2} &\dots &A_{2,n}\\
\vdots &\vdots &\vdots &\vdots\\
0 &A_{m,2} &\dots &A_{m,n}
\end{pmatrix}
\begin{pmatrix}1\\ 0\\ \vdots\\ 0\end{pmatrix}\\
&= \begin{pmatrix}0\\ 0\\ \vdots\\ 0\end{pmatrix},
\end{align*}
and thus we must have $Tv = 0$ so that $T\in E$, proving our claim. So indeed $E\cong \mathcal{E}$.
\par Now note that $\mathcal{E}$ has as a basis the set of all matrices with a single $1$ in a column besides the first, and zeros everywhere else. There are $mn - n$ such matrices, and hence $\dim \mathcal{E} = mn - n$. Thus we have $\dim E = mn - n$ as well, as desired. \qedhere