\documentclass[11pt]{tufte-handout}
\usepackage[num=2,date=September\ 13,topic=Asymptotic\ analysis]{algorithms-hw}
\begin{document}
\coversheet
\begin{question}[K\&T 2.2] \label{q:running} Suppose you have
algorithms with the six running times listed below. (Assume these
are the \emph{exact} number of operations performed as a function of
the input size $n$.) Suppose you have a computer that can perform
$10^{10}$ operations per second, and you need to compute a result in
at most an hour of computation. For each of the algorithms, what is
the largest input size $n$ for which you would be able to get the
result within an hour? For answers smaller than $10^{10}$, give
your answer as an exact integer; for larger answers you may provide
an approximation.
\marginnote[-4em]{\emph{\ref{q:running}}\quad\qrcode{Most\ of\ these\
can\ be\ solved\ analytically,\ but\ for\ at\ least\ one\ you\ may\ need\ to\
resort\ to\ numeric\ methods\ (e.g.\ use\ Wolfram\ Alpha\ or\ write\
a\ bit\ of\ code\ yourself).}}
\begin{enumerate}
\item $n^2$
\item $n^3$
\item $100n^2$
\item $n \log_2 n$
\item $2^n$
\item $2^{2^n}$
\item $n!$
\end{enumerate}
\end{question}
In addition to $O$, $\Theta$, and $\Omega$, we will also sometimes use
the notation $o(g(n))$ (little-o) to mean that a function is $O(g(n))$
but \emph{not} $\Theta(g(n))$. So, if $\lim_{n \to \infty} T(n)/g(n)
= 0$, then $T(n)$ is $o(g(n))$. Intuitively, if big-O is like
``less than or equal to'', little-o is like ``less than''; if $f(n)$
is $o(g(n))$ then $f$ ``grows strictly more slowly than'' $g$.
\begin{question}[Some asymptotic properties] \label{q:props}
\mbox{}
\begin{enumerate}[label=(\alph*)]
\item Show that $n^j$ is $o(n^k)$ whenever $j < k$.
\item Prove that $\log_a n$ is $\Theta(\log_b n)$ for any positive
integer bases $a,b > 1$. \marginnote{\emph{Hint}: recall, or look
up, the change-of-base formula for logarithms.} Conclude that
we are justified in writing simply $\Theta(\log n)$ without caring
about the base of the logarithm.
\item Prove that $n^k$ is $o(b^n)$ for any positive integer $k$ and
any real number $b > 1$. For example, $n^{295}$ is $o(1.0001^n)$.
\emph{In the long run}, any polynomial function grows more slowly
than any exponential function! \marginnote{\emph{Hint}: use
L'H\^opital's rule $k$ times. Recall that the derivative of
$b^n$ with respect to $n$ is $b^n \ln b$.}
\end{enumerate}
\end{question}
\newpage
\begin{question}[K\&T 2.3]
Take the following list of functions and arrange them in ascending
order of asymptotic growth rate. That is, if function $g(n)$ immediately
follows function $f(n)$ in your list, then it should be the case
that $f(n)$ is $O(g(n))$. Please prove/justify your claims.
\begin{enumerate}
\item $f_{1}(n) = n^{2.5}$
\item $f_{2}(n) = \sqrt{2n}$
\item $f_{3}(n) = n + 10$
\item $f_{4}(n) = 10^n$
\item $f_{5}(n) = 100^n$
\item $f_{6}(n) = n^2 \log n$
\end{enumerate}
\end{question}
\begin{question} \label{q:theta} Characterize the asymptotic behavior
of each of the following in terms of $n$, using $\Theta$. \textbf{Give
a brief justification for each answer.} For items asking for
the time needed to perform some operation or solve some problem, you
should describe the \emph{worst case} running time of the \emph{best
possible} algorithm.\marginnote[-0.2in]{\textbf{Please come ask
for help} if you are unsure about any of these!}
\begin{enumerate}[label=(\alph*)]
\item $1 + 2 + 3 + 4 + \dots + n$
\item Average number of array lookups needed to find a given element
in a sorted array of length $n$.
\item Number of two-element subsets of a set of size $n$.
\item $1 + 2 + 4 + 8 + \dots + 2^n$
\item Total number of nodes in a complete binary tree with $n$
leaves. \label{qi:binary}
\item Number of edges in a graph with $n$ nodes, where every node is
connected to every other node. \label{qi:complete}
\item Number of different ways to arrange $n$ people in a line.
\item Average number of steps needed to find a given element in a
linked list of length $n$.
\item Biggest integer that can be represented with $n$ bits.
\item Worst-case number of swaps needed to sort an array of length $n$
if you are only allowed to swap adjacent elements.
\item \label{qi:check} Worst-case number of steps needed to check
whether two lists, each containing $n$ integers (not necessarily
sorted), have any element in common.
\marginnote[-4em]{\emph{\ref{q:theta}\ref{qi:check}}\quad\qrcode{Theta(n^2)\
is\ too\ slow!}}
\newpage
\item Number of array lookups performed by this Python code:
\begin{verbatim}
for i in range(0,n):
for j in range (0,i):
sum += array[i][j]
\end{verbatim}
\item Number of addition operations performed by this Java code:
\begin{verbatim}
for (int i = 0; i < n; i += 3) {
sum = sum + i;
}
\end{verbatim}
\item Total number of calls to \texttt{println} performed by this Java code:
\begin{verbatim}
for (int i = 1; i < n; i *= 2) {
for (j = 0; j < 20; j++) {
System.out.println(i + j);
}
}
\end{verbatim}
\item Total number of calls to \texttt{print} performed by this
Python code:
\begin{verbatim}
def foo(n):
print(n)
if n > 0:
foo(n-1)
foo(n-1)
\end{verbatim}
\end{enumerate}
\end{question}
\begin{question}[K\&T 2.8]
You're doing some stress-testing on various models of glass jars to
determine the height from which they can be dropped and still not
break. The setup for this experiment, on a particular type of jar,
is as follows. You have a ladder with $n$ rungs, and you want to
find the highest rung from which you can drop a copy of the jar and
not have it break. We call this the \emph{highest safe rung}.
It might be natural to try binary search: drop a jar from the middle
rung, see if it breaks, and then recursively try from rung $n/4$ or
$3n/4$ depending on the outcome. But this has the drawback that you
could break a lot of jars in finding the answer.
If your primary goal were to conserve jars, on the other hand, you
could try the following strategy. Start by dropping a jar from the
first rung, then the second rung, and so forth, climbing one higher
each time until the jar breaks. In this way, you only need a single
jar---at the moment it breaks, you have the correct answer---but you
may have to drop it $n$ times (rather than $\log n$ times as in the
binary search solution).
So here is the trade-off: it seems you can perform fewer drops if
you're willing to break more jars. To understand better how this
trade-off works at a quantitative level, let's consider how to run
this experiment given a fixed ``budget'' of $k \geq 1$ jars. In
other words, you have to determine the correct answer---the highest
safe rung---and can use at most $k$ jars in doing so.
\begin{enumerate}[label=(\alph*)]
\item Suppose you are given a budget of $k = 2$ jars. Describe a
strategy for finding the highest safe rung that requires you to
drop a jar at most $f(n)$ times, for some function $f(n)$ that
grows slower than linearly. (In other words, $f(n)$ should be
$o(n)$, that is, $\lim_{n \to \infty} f(n)/n = 0$.)
\item Now suppose you have a budget of $k > 2$ jars, for some given
$k$. Describe a strategy for finding the highest safe rung using
at most $k$ jars. If $f_k(n)$ denotes the number of times you
need to drop a jar according to your strategy, then the functions
$f_1, f_2, f_3, \dots$ should have the property that each grows
asymptotically slower than the previous one: $\lim_{n \to \infty}
f_k(n)/f_{k-1}(n) = 0$ for each $k$.
\end{enumerate}
\end{question}
\end{document}