|
|
\documentclass{article} \usepackage[utf8]{inputenc} \usepackage{amsfonts} \usepackage{amsthm} \usepackage{enumerate} \usepackage{hyperref} \hypersetup{ colorlinks, citecolor=black, filecolor=black, linkcolor=black, urlcolor=black }
\theoremstyle{definition} \newtheorem{definition}{Def}[section] \newtheorem{theorem}[definition]{Thm}
% ndiv command, to allow ̸| (not-div) without needing extra packages
\newcommand{\ndiv}{\hspace{-4pt}\not|\hspace{2pt}} \newcommand{\crossover}{\hspace{-0pt}\not\hspace{-2pt}}
\title{Notes on "A book of Abstract Algebra", Charles C. Pinter} \author{arnaucube} \date{February 2022}
\begin{document}
\maketitle
\begin{abstract} Notes on \emph{"A book of Abstract Algebra - by Charles C. Pinter"}, is a $LaTeX$ version of handmade notes taken while reading the book. It contains only some definitions and theorems (without proofs), so it is highly recommended to read the actual book instead of the current notes. \\ \emph{This is an unfinished and 'work in progress' document.} \end{abstract}
\tableofcontents % \newpage
% TODO maybe this 'Introduction' section should be removed
% \section{Introduction}
%
% $\mathbb{R} \supseteq \mathbb{Q} \supseteq \mathbb{Z} \supseteq \mathbb{N}$
% \begin{itemize}
% \item $\mathbb{R}$ (reals): $\{ \ldots, -3, 0, \sqrt{2}, 2, e, \pi, 4, \ldots \}$
% \item $\mathbb{Q}$ (rationals): $\{ \frac{a}{b} \mid a, b \in \mathbb{Z} \wedge b \neq 0 \}$
% \item $\mathbb{Z}$ (integers): $\{ \ldots, -3, -1, 0, 1, 3, \ldots \}$
% \item $\mathbb{N}$ (naturals): non negative integers, $\{1, 2, 3, ...\}$
% \end{itemize}
\section{Groups}
\begin{definition}[Group] A set $G$ with an operation $*$ which satisfies the axioms: \begin{enumerate}[i.] \item $*$ is \emph{associative} \item \emph{(identity element)} there is an element $e \in G$ s.t. $a * e = a$ and $e * a = a$ $\forall a \in G$ \item \emph{(inverse)} $\forall a \in G$, there is an element $a^{-1} \in G$ s.t. $a*a^{-1} = e$ and $a^{-1} * a =e$ \end{enumerate} \end{definition}
\begin{definition}[Abelian Group] A group $G$ is said to be \emph{commutative} if $\forall a, b \in G$, $ab = ba$. A commutative group is also called \emph{Abelian}. \end{definition}
\begin{definition}[Order of an element] In a group $G$, the order of an element $a \in G$ is the least positive integer $n$ such that $a \cdot a \cdots a = a^n = e$. It is represented by $ord(a)$. \end{definition}
\begin{definition}[Order of a group] Order of a group $G$, is the number of elements in $G$. It is represented by $|G|$. \end{definition}
\begin{definition}[Cyclic group] Let $G$ be a group, and $a \in G$. If $G$ consists of all the powers of $a$ and nothing else: $$G = \{a^n : n \in \mathbb{Z}\}$$ then, $G$ is called a \emph{cyclic group}, and $a$ is called its \emph{generator}. \\ The group $G$ generated by $a$ is defined by $G=\langle a \rangle$. \end{definition}
\begin{theorem} The \emph{order of a cyclic group} is the same as the \emph{order of it's generator}. In other words, for a cyclic group, $|\langle a \rangle | = ord(a)$. \begin{itemize} \item[] $\langle a \rangle$ defines a cyclic group generated by $a$. $\langle a \rangle = \{e, a, a^2, ..., a^{n-1}\}$ \item[] $| \langle a \rangle |$ defines the order of the cyclic group generated by $a$. \end{itemize} \end{theorem}
\begin{theorem} Every subgroup of a cyclic group is cyclic. \end{theorem}
\section{Subgroups} \begin{definition}[Subgroup] Let $G$ be a group, and $H$ a non-empty subset of $G$. If \begin{enumerate}[i.] \item the idenity $e$ of $G$ is in $H$. \item $H$ is closed with respect to the operation. Which is for $a, b \in H$, $ab \in H$. \item $H$ is closed with respect to inverses. Which is for $a \in H$, $a^{-1} \in H$. \end{enumerate} we call $H$ a \emph{subgroup} of $G$. The operation of $H$ is the same as the operation of $G$. \end{definition}
\begin{theorem} Every subgroup of a cyclic group is cyclic. \end{theorem}
\section{Functions} \begin{definition}[Function] If $A$ and $B$ are sets, then a function from $A$ to $B$ is a rule which to every element $x$ in $A$ assigns a unique element $y$ in $B$. \\ Functions are represented by $f: A \rightarrow B$, where $\forall a \in A \Rightarrow f(a) \in B$. \end{definition}
\begin{definition}[Injective (monomorphism)] A function $f:A \rightarrow B$ is called \emph{injective} if each element of $B$ is the image of no more than one element of $A$. \end{definition}
\begin{definition}[Surjective (epimorphism)] A function $f:A \rightarrow B$ is called \emph{surjective} if each element of $B$ is the image of at least one element of $A$. \end{definition}
\begin{definition}[Bijective (isomorphism)] A function $f:A \rightarrow B$ is called \emph{bijective} if it is both \emph{injective} and \emph{surjective}. \\ A function $f: A \rightarrow B$ has an inverse iff it is \emph{bijective}. In that case, the inverse $f^{-1}$ is a bijective function from $B$ to $A$. \end{definition}
\begin{definition}[Composite function] A function $f:A \rightarrow B$ and $g: B \rightarrow C$ be functions. The \emph{composite function} denoted by $g \circ f$ is a function from $A$ to $C$ defined as follows: $$[g \circ f](x) = g(f(x)), \forall x \in A$$ \end{definition}
\begin{definition}[Permutation] By a \emph{permutation} of a set $A$ we mean a \emph{bijective function from $A$ to $A$}, that is, a one-to-one correspondence between $A$ and itself. \\ The set of all the permutations of $A$, with the operation $\circ$ of composition, is a group. \\ For any positive integer $n$, the symmetric group on the set ${1,2, 3,..., n}$ is called the \emph{symmetric group on $n$ elements}, and is denoted by $S_n$. \end{definition}
% TODO define 'group of permutations' & 'cycle of permutations'
\section{Isomorphism}
\begin{definition}[Isomorphism] Let $G_1$ and $G_2$ be groups. A bijective function $f: G_1 \rightarrow G_2$ with the property that for any two elements $a, b \in G_1$, $$f(ab) = f(a)f(b)$$ is called an \emph{isomorphism} from $G_1$ to $G_2$. \\ If there exists an isomorphism from $G_1$ to $G_2$, we say that $G_1$ is \emph{isomorphic} to $G_2$, symbolized by $G_1 \cong G_2$. \end{definition}
\begin{theorem}[Cayley's Theorem] Every group is isomorphic to a group of permutations. \end{theorem}
\begin{theorem} (Isomorphism of cyclic groups) \begin{enumerate}[i.] \item For every positive integer $n$, every cyclic group of order $n$ is isomorphic to $\mathbb{Z}_n$. Thus, any two cyclic groups of order $n$ are isomorphic to each other. \item Every cyclic group of order infinity is isomorphic to $\mathbb{Z}$, and therefore any two cyclic groups of order infinity are isomorphic to each other. \end{enumerate} \end{theorem}
\section{Cosets}
\begin{definition}[Coset] Let $G$ be a group, and $H$ a subgroup of $G$. For any element $a$ in $G$, the symbol $aH$ denotes the set of all products $ah$, as $a$ remains fixed and $h$ ranges over $H$. $aH$ is caled a \emph{left coset} of $H$ in $G$. \\ In similar fashion, $Ha$ denotes the set of all products $ha$, as $a$ remains fixed an $h$ ranges over $H$. $Ha$ is called a \emph{right coset} of $H$ in $G$. \end{definition}
\begin{theorem} If $Ha$ is any coset of $H$, there is a one-to-one correspondence from $H$ to $Ha$ (there is a bijection between $H$ and $Ha$).\\ If $a \in G$, then $|H| = |Ha|$. \end{theorem}
\begin{theorem}[Lagrange's theorem] Let $G$ be a finite group, and $H$ any subgroup of $G$. The order of $G$ is a multiple of the order of $H$. $|H|$ divides $|G|$. \end{theorem} Lagrange's theorem can be easily seen by the facts that: \begin{enumerate}[i.] \item cosets partition the group G \item $|Ha| = |H|$ (each coset has the same order as H). \end{enumerate}
By consequence, \begin{theorem} If $G$ is a group with a prime number $p$ of elements, then $G$ is a cyclic group. Furthermore, any element $a \neq e$ in $G$ is a generator of $G$. \end{theorem}
Thus, \begin{theorem} The order of any element of a finite group divides the order of the group. \end{theorem}
\begin{definition}[Index of H in G] Number of cosets of H in G. Represented by $(G:H)$.\\ Combined with \emph{Lagrange Theorem}, we know that $|G| = |H| \cdot |G:H|$, so, $$(G:H) = \frac{|G|}{|H|}$$ \end{definition}
\section{Homomorphisms}
\begin{definition}[Homomorhism] If $G$ and $G$ are groups, a \emph{homomorphism} from $G$ to $H$ is a function $f: G \rightarrow H$ s.t. for any two elements $a, b \in G$, $$f(ab) = f(a)f(b)$$ If there exists a homomorphism from $G$ \emph{onto} $H$, we say that $H$ is a \emph{homomorphic image} of $G$. \end{definition} Note: an \emph{isomorphism} is a \emph{bijective} \emph{homomorphism}. \\ Example of an \emph{homomorphism}: $f: \mathbb{Z}_6 \rightarrow \mathbb{Z}_3$.
\begin{theorem} Let $G$ and $G$ be groups, and $f: G \rightarrow H$ a homomorphism. Then \begin{enumerate}[i.] \item $f(e) = e$ \item $f(a^{-1}) = [f(a)]^{-1}, \quad \forall a \in G$ \end{enumerate} \end{theorem}
\begin{definition}[Conjugate] A \emph{conjugate} of $a$ is any element of the form $xax^{-1}$, where $x \in G$. \end{definition}
\begin{definition}[Normal subgroup] Let $H$ be a subgroup of a group $G$. $H$ is called a \emph{normal} subgroup of $G$ if it is closed with respect to conjugates, that is, if\\ for any $a \in H$ and $x \in G$, $xax^{-1} \in H$. \\ Alternatively, we can see that $H$ is a \emph{normal} subgroup iff $\forall a \in G, aH = Ha$. \\ In an abelian group, every subgroup is normal. \end{definition}
\begin{definition}[Kernel]\label{def:homomorphismkernel} Let $f: G \rightarrow H$ be a homomorphism. The \emph{kernel} of $f$ is the set $K$ of all the elements of $G$ which are carried by $f$ onto the neutral element of $H$. That is, $$K = {x \in G : f(x) = e}$$ \emph{Kernel in the context of Extension fields: \ref{def:extensionkernel}} \end{definition} For every homomorphism, the $e \in G$ maps to $e \in H$, so the \emph{kernel} is never empty, it always contains the identity $e_G$, and if the kernel only contains the identity, then $f$ is one-to-one (injective).
\section{Quotient Groups}
Quotient group construction is useful as a way of actually manufacturing all the homomorphic images of any group G. Additionally, we can often choose $H$ so as to "factor out" unwanted properties of $G$, and prserve in $G/H$ only "desirable" traits.
\begin{definition}[Coset multiplication] The coset of $a$, multiplied by the coset of $b$, is defined to be the coset of $ab$. In symbols, $Ha \cdot Hb = H(ab)$. \end{definition}
\begin{theorem} Let $H$ be a normal subgroup of $G$. If $Ha = Hc$ and $Hb = Hd$, then $H(ab) = H(cd)$. \end{theorem}
\begin{definition} $G/H$ denotes the set which consists of \emph{all the cosets of $H$}. \\ Thus, if $Ha, Hb, Hc, \ldots$ are cosets of $H$, then $G/H = \{ Ha, Hb, Hc, ... \}$. \end{definition}
\begin{theorem}[Quotient group] $G/H$ with coset multiplication is a group. \end{theorem}
\begin{theorem} $G/H$ is a homomorphic image of G. \\ Conversely, every homomorphic image of $G$ is a quotient group of $G$. \end{theorem}
\begin{theorem} Let $G$ be a group and $H$ a subgroup of $G$. Then \begin{enumerate}[i.] \item $Ha = Hb$ iff ${ab}^{-1} \in H$ \item $Ha = H$ iff $a \in H$ \end{enumerate} \end{theorem}
\section{Rings}
\begin{definition}[Ring] A set $A$ with operations called \emph{addition} and \emph{multiplication} which satisfy the following axioms: \begin{enumerate}[i.] \item $A$ with addition alone is an abelian group. \item Multiplication is associative. \item Multiplication is distributive over addition. That is, $\forall a,b,c \in A$, $$a(b+c) = ab + ac$$ $$(b+c)a = ba + ca$$ \end{enumerate} \end{definition}
\begin{definition}[Commutative ring] By definition, addition is commutative in every ring but multiplication is not. When multiplication also is commutative in a ring, we call that ring a \emph{commutative} ring. \end{definition}
\begin{definition}[Unity] A ring does not necessarily have a neutral element for multiplication. If there is in $A$ a neutral element for mulitplication, it is called the \emph{unity} of $A$, and is denoted by the symbol $1$. \\ If $A$ has a unity, we call $A$ a \emph{ring with unity}. \end{definition}
\begin{definition}[Field] If $A$ is a commutative ring with unity in which every nonzero element is invertible, $A$ is called a \emph{field}. \end{definition}
\begin{definition}[Divisor of zero] In any ring, a nonzero element a is called a \emph{divisor of zero} if there is a nonzero element b in the ring such that the product ab or ba is equal to zero. \end{definition}
\begin{definition}[Cancellation property] A ring is said to have the cancellation property if $ab = ac$ or $ba = ca$ implies $b = c$ for any elements a, b, and c in the ring if $a \neq 0$. \end{definition}
\begin{theorem} A ring has the \emph{cancellation property} iff it has no \emph{divisors of zero}. \end{theorem}
\begin{definition}[Ideal] A nonempty subset $B$ of a ring $A$ is called an \emph{ideal} of $A$ if $B$ is closed with respect to addition and negatives, and $B$ absorbs products in $A$. \\ (\emph{Absorbs product}: $\forall b \in B$ and $x \in A$, then $xb, bx \in B$). \end{definition}
\begin{definition}[Principal ideal] A \emph{principal ideal} is an ideal $I$ in a ring $R$ that is generated by a single element $a \in R$ through multiplication by every element of $R$. In other words $I = aR = \{a r : r \in R \}$. \\ (eg. Every ideal of $\mathbb{Z}$ is principal). \end{definition}
\begin{definition}[Integral domain] An \emph{integral domain} is defined to be a commutative ring with unity having the cancellation property. \end{definition}
Every field is an integral domain, but the converse is not true (eg. $\mathbb{Z}$ is an integral domain but not a field).
\begin{definition}[Characteristic n] Let $A$ be a ring with unity, the \emph{characteristic} of $A$ is the least positive integer $n$ such that $$1 + 1 + \cdots + 1 = 0$$ If there is no such positive integer $n$, $A$ has characteristic $0$. \end{definition}
\section{Elements of number theory}
\begin{definition}[Euclid's lemma] Let $m$ and $n$ be integers, and let $p$ be a prime. If $p|(mn)$, then either $p|m$ or $p|n$. \end{definition}
\begin{theorem}[Factorization into primes] Ever integer $n>1$ can be expressed as a product of positive primes. That is, there are one or more primes $p_1, \ldots, p_r$ such that $n=p_1 p_2 \cdots p_r$. \end{theorem}
\begin{theorem}[Unique factorization] Suppose $n$ can be factored into positive primes in two ways, namely, $$n= p_1 \cdots p_r = q_1 \cdots q_t$$ Then $r=t$, and the $p_i$ are the same numbers as the $q_j$ except, possibly, for the order in which they appear. \end{theorem}
From the last two theorems: every integer $m$ can be factored into primes, and the prime factors of $m$ are unique (except for the order).
\begin{theorem}[Little theorem of Fermat] Let $p$ be a prime. Then, $$a^{p-1} \equiv 1 \pmod p, \forall a \not\equiv 0 \pmod p$$ \\ So, by taking $a^{p-2} \cdot a \equiv 1 \pmod p$, where $a^{p-2} \equiv a^{-1} \pmod p$ (the inverse modulo p), we see that $a^p \equiv a \pmod p, \forall a \in \mathbb{Z}$, so $a^p - a$ is a multiple of $p$.
~\\\emph{Relation to Lagrange's theorem:}\\ Let $G = \mathbb{Z}_p$, and let $H$ be the multiplicative subgroup of $G$ generated by $a$ (ie. $H = \{ 1, a, a^2, \ldots \}$). The order of $H$ ($h = |H|$), is also the order of $a$ (ie. smallest $n>1$ s.t. $a^n=1~mod~p$).
By Lagrange's theorem, $h~|~|G| = p - 1$, so $p-1 = h \cdot m$, thus $$
a^{p-1} = (a^h)^m \equiv 1^m \equiv 1~mod~p $$
~\\\emph{Another perspective:}\\ We have $a^p \equiv a \pmod{p}$, by dividing by $a$ on both sides, we obtain $a^{p-1} \equiv 1 \pmod{p}$.
\end{theorem}
\begin{theorem}[Euler's $\phi$ function] \emph{Euler's $\phi$ function} describes the number of integers in $\mathbb{Z} / n \mathbb{Z}$ which are relatively prime (coprime) to $n$. \end{theorem}
\begin{theorem}[Euler's theorem] If $a$ and $n$ are relatively prime, $$a^{\phi(n)} \equiv 1 \pmod n$$ \end{theorem}
\section{Polynomials}
\begin{definition} Let $A$ be a commutative ring with unity, and $x$ an arbitrary symbol. Every expression of the form $$a_0 + a_1 x + a_2 x^2 + \cdots + a_n x^n$$ is called a \emph{polynomial in $x$ with coefficients in $A$}, or more simply, a \emph{polynomial in $x$ over $A$}. \end{definition} The expressions $a_k x^k$, for $k \in \{ 1, \ldots, n \}$, are called the \emph{terms} of the polynomial, being $a_n x^n$ the \emph{leading term}, and $a_0$ the \emph{constant term}. The $a_k$ are called the \emph{coefficients} of $x^k$, being $a_n$ the \emph{leading coefficient}. And the \emph{degree} of a polynomial $a(x)$ is the greatest $n$ such that the coefficient of $x^n$ is not zero. The polynomial whose leading coefficient is equal to $1$ is called \emph{monic}.
\begin{theorem}[Division algorithm for polynomials] If $a(x)$ and $b(x)$ are polynomials over a field $F$, and $b(x) \neq 0$, there exist polynomials $q(x)$ and $r(x)$ over $F$ such that $a(x) = b(x) q(x) + r(x)$ and [$r(x) = 0$ or $\deg r(x) < \deg b(x)$]. \end{theorem}
\begin{theorem} Any two nonzero polynomials $a(x), b(x) \in F[x]$ have a $\gcd d(x)$. Furthermore, $d(x)$ can be expressed as a \emph{linear combination} $$d(x)= r(x) a(x) + s(x) b(x)$$ where $r(x), s(x) \in F[x]$. \end{theorem}
\begin{theorem}[Factorization into irreducible polynomials] Every polynomial $a(x)$ of positive degree in $F[x]$ can be written as a product $$a(x) = k p_1(x) p_2(x) \cdots p_r(x)$$ where $k$ is a constant in $F$ and $p_1(x), \ldots, p_r(x)$ are monic irreducible polynomials of $F[x]$. \end{theorem}
\begin{theorem}[Unique factorization] If $a(x)$ can be written in two ways as a product of monic irreducibles, say $$a(x) = k p_1(x) \cdots p_r(x) = l q_1(x) \cdots q_s(x)$$ then $k=l$, $r=s$, and $p_i(x) = q_j(x)$. \end{theorem}
\begin{theorem} $c$ is a root of $a(x)$ iff $x - c$ is a factor of $a(x)$. \end{theorem} \begin{theorem} If $a(x)$ has distinct roots $c_1, \ldots, c_m$ in $F$, then $(x-c_1)(x-c_2)\cdots(x-c_m)$ is a factor of $a(x)$. \end{theorem}
\begin{theorem} If $a(x)$ has degree $n$, it has at most $n$ roots. \end{theorem}
In finite $F$, polynomial $\neq$ polynomial function. If $F$ is infinite, polynomial $=$ polynomial function.
For every polynomial with rational coefficients, there is a polynomial with integer coefficients having the same roots. See: $$a(x) = \frac{k_0}{l_0} + \frac{k_1}{l_1} x + \cdots + \frac{k_n}{l_n} x^n$$ $$=\frac{1}{l_0 \cdots l_n} \cdot \underbrace{(k_0 l_1 \cdots l_n + k_1 l_0 l_2 \cdots l_n x + \cdots + k_n l_0 \cdots l_{n-1} x^n)}_{b(x)}$$ $a(x)$ has rational coefficients, $b(x)$ has integer coefficients. $b(x)$ differs from $a(x)$ only by a constant factor ($\frac{1}{l_0\cdots l_n}$), so $a(x)$ and $b(x)$ have the same roots.
$\Longrightarrow~~\forall~p(x) \in \mathbb{Q}[x]$, there is a $f(x) \in \mathbb{R}$ with the same roots (for every polynomial with rational coefficients, there is a polynomial with integer coefficients having the same roots).
\begin{theorem} If $s/t$ is a root of $a(x)$, then $s|a_0$ and $t|a_n$. \end{theorem}
\begin{theorem} Suppose $a(x)$ can be factured as $a(x) = b(x)c(x)$, where $b(x), c(x)$ have rational coefficients. Then there are polynomials $B(x), C(x)$ with integer coefficients, which are constant multiples of $b(x)$ and $c(x)$ respectively, such that $a(x) = B(x)C(x)$. \end{theorem}
\begin{theorem}[Eisenstein's irreducibility criterion] Let $a(x) = a_0 + a_1 x + \cdots + a_n x^n$ be a polynomial with integer coefficients.
If there is prime $p$ such that $p | a_i, ~\forall i\in\{0, n-1\}$, and $p \ndiv a_n$ and $p^2 \ndiv a_0$, then $a(x)$ is irreducible over $\mathbb{Q}$. % Suppose there is a prime number $p$ which divides every coefficient of $a(x)$ except the leading coefficient $a_n$; suppose $p$ does not divide $a_n$ and $p^2$ does not divide $a_0$. Then $a(x)$ is irreducible over $\mathbb{Q}$.
\end{theorem}
\section{Extensions of fields}
\begin{definition}[Kernel]\label{def:extensionkernel} The \emph{kernel} of $\sigma_c$ consists of all polynomials $a(x) \in F[x]$ such that $c$ is a root of $a(x)$.
\emph{Kernel in the context of Homomorphisms: \ref{def:homomorphismkernel}} \end{definition}
\begin{definition}[Algebraic] $c \in E$ is called \emph{algebraic over} $F$ if it is the root of some nonzero polynomial $a(x) \in F[x]$.
Otherwise, $c$ is called \emph{transcendental over} $F$. \end{definition}
$E/K$ denotes the (field) extension of $E$ over $K$.
\begin{theorem}[Basic theorem of field extensions] Let $F$ be a field and $a(x) \in F[x]$ a nonconstant polynomial. There exists an extension field $E/F$ and an element $c \in E$ such that $c$ is a root of $a(x)$. \end{theorem}
Let $a(x) \in F[x]$ be a polynomial of degree $n$. There is an extension field $E/F$ which contains all $n$ roots of $a(x)$.
\section{Vector spaces}
\begin{definition}[Vector space] A \emph{vector space} over a field $F$ is a set $V$, with two operations $+, \cdot$, called \emph{vector addition} and \emph{scalar multiplication}, such that
\begin{itemize} \item $V$ with vector addition is an abelian group \item $\forall k \in F$ and $\overrightarrow{a} \in V$, the scalar product $k \overrightarrow{a}$ is an element of $V$, subject to the following conditions: $\forall k, l \in F,~\overrightarrow{a},\overrightarrow{b} \in V$ \begin{enumerate}[i.] \item $k(\overrightarrow{a} + \overrightarrow{b}) = k\overrightarrow{a} + k\overrightarrow{b}$ \item $(k + l)\overrightarrow{a} = k\overrightarrow{a} + k\overrightarrow{b}$ \item $k(l\overrightarrow{a}) = (kl)\overrightarrow{a}$ \item $1 \overrightarrow{a} = \overrightarrow{a}$ \end{enumerate} \end{itemize} \end{definition}
\begin{definition}[Linear combination] If $\overrightarrow{a_1}, \overrightarrow{a_2}, \ldots, \overrightarrow{a_n} \in V$, and $k_1, k_2, \ldots, k_n$ are scalars, then the vector $$k_1 \overrightarrow{a_1} + k_2 \overrightarrow{a_2} + \cdots + k_n \overrightarrow{a_n}$$ is called a \emph{linear combination} of $\overrightarrow{a_1}, \overrightarrow{a_2}, \ldots, \overrightarrow{a_n}$.
The set of all the linear combinations of $\overrightarrow{a_1}, \overrightarrow{a_2}, \ldots, \overrightarrow{a_n}$ is a \emph{subspace of} $V$. \end{definition}
\begin{definition}[Linear dependancy] Let $S = \{$\overrightarrow{a_1}, \overrightarrow{a_2}, \ldots, \overrightarrow{a_n}$\}$ be a set of distinct vectors in a vector space $V$. $S$ is said to be \emph{linearly dependent} if there are scalars $k_1, \ldots, k_n$, not all zero, such that $k_1 \overrightarrow{a_1} + k_2 \overrightarrow{a_2} + \cdots + k_n \overrightarrow{a_n} = 0$. Which is equivalent to saying that at least one of the vectors in $S$ is a linear combination of the others.
If $S$ is not linearly dependent, then it is \emph{linearly independent}. $S$ is linearly independent iff $k_1 \overrightarrow{a_1} + k_2 \overrightarrow{a_2} + \cdots + k_n \overrightarrow{a_n}=0$ implies $k_1 = k_2 = \cdots = k_n =0$. Which is equivalent to saying thatno vector in $S$ is equal to a linear combination of the other vectors in $S$. \end{definition}
If $\{ \overrightarrow{a_1}, \overrightarrow{a_2}, \ldots, \overrightarrow{a_n} \}$ is linearly dependent, then some $a_i$ is a linear combination of the preceding ones.
If $\{ \overrightarrow{a_1}, \overrightarrow{a_2}, \ldots, \overrightarrow{a_n} \}$ spans $V$, and $a_i$ is a linear combination of the preceding vectors, then $\{ \overrightarrow{a_1}, \ldots, \crossover{\overrightarrow{a_i}}, \ldots, \overrightarrow{a_n} \}$ still spans $V$.
\begin{theorem} Any two bases of a vector space $V$ have the same number of elements.
(This comes from the fact that all bases of $\mathbb{R}^n$ contain exactly $n$ vectors) \end{theorem}
If the set $(\overrightarrow{a_1}, \overrightarrow{a_2}, \ldots, \overrightarrow{a_n})$ spans $V$, it contains a basis of $V$.
\framebox{WIP: covered until chapter 28, work in progress.}
\end{document}
|