\documentstyle[12pt]{article}
\input{newcmds}
\begin{document}

\begin{center}
{\large\bf C\&O 666 \\ 
Assignment 4 
}
\end{center}

\begin{flushleft}
{\large  Due on Thurs, Mar. 30,~1995
}
\end{flushleft}

\begin{enumerate}

\item
Consider the following problem:
\begin{eqnarray*}
Minimize & \frac{4}{3}(x_1^2-x_1x_2+x_2^2)^{\frac{3}{4}} - x_3\\
subject ~ to & -x_1,-x_2,-x_3 \leq 0\\
       & x_3 \leq 2
\end{eqnarray*}
Show that the objective function is convex and that the optimal solution
is attained at the unique point $\overline{x}=(0,0,2)^t$.
Let the initial point be $x_1=(0,a,0)^t$, where $a \leq 1/(2\sqrt{2}$.
Find feasible directions using the program:
\begin{eqnarray*}
Minimize & \nabla f(x_k)^td\\
subject~to &  \hat{A}_1d \leq 0\\
           &   d^td \leq 1
\end{eqnarray*}
where $\hat{A}_1$ is the matrix whose rows are the gradients of the active
constraints at $x_1$. Show that Zoutendijk's method with exact
line search yields the sequence
\begin{eqnarray*}
x_k = \left\{ \begin{array}{ll}
               \left[0,(\frac{1}{2})^{k-1}a,\frac{1}{2}
                   \sum_{j=0}^{k-2}(\frac{a}{2^j})^\frac{1}{2}\right] & 
                                    if~k~is~odd,~k \geq 3 \\
               \left[(\frac{1}{2})^{k-1}a,0,\frac{1}{2}
                   \sum_{j=0}^{k-2}(\frac{a}{2^j})^\frac{1}{2}\right] & 
                                    if~k~is~even
               \end{array}
       \right.
\end{eqnarray*}
Does this sequence converge to the optimum? Why? How would you correct
the problem?

\item
Let $P$ be a symmetric $n \times n$ matrix and $Q$ a positive 
semi-definite symmetric $n \times n$ matrix. Assume that $x^tPx > 0$,
for all $x \neq 0$ satisfying $x^tQx = 0$. Show that there exists
a scalar $c$ such that
\begin{eqnarray*}
P + cQ > 0.
\end{eqnarray*}

\item
Consider the (differentiable) equality constrained program
\begin{eqnarray*}
Minimize & f(x) \\
subject ~ to &  h(x)=0,
\end{eqnarray*}
where $h: \Re^n \rightarrow \Re^m$. Suppose that the second order
sufficient optimality conditions hold at $x^*$. Show that there
exists a scalar $\overline{c}$ such that
\begin{eqnarray*}
\nabla_{xx}^2 L(x^*,\lambda^*)+\overline{c}\nabla h(x^*) \nabla h(x^*)^t 
   \succ 0~(\mbox{positive definite}).
\end{eqnarray*}
Use the above to show that $x^*$ is a strict local minimum. 
(Hint. Use the augmented Lagrangian.)
\end{enumerate}

\begin{enumerate}

\item
Solve the problem
\[
\begin{array}{cc}
\min&x^2 +xy +y^2 -2y \\
{\rm subject~to}& x+y=2
\end{array}
\]
three ways analytically
\begin{enumerate}
\item
with the necessary conditions.
\item
with a quadratic penalty function.
\item
with an exact penalty function.
\end{enumerate}
\item
Consider the problem $\min f(x) ~{\rm s.t.}~h(x)=0,$ where $f,h$ are
appropriate functions on $\Re^n$. Show when the condition 
number of the
Hessian of the quadratic
penalty function goes to infinity as the penalty parameter goes to infinity.
Why should this cause problems in the penalty method?
\item
For the problem
$\min f(x) ~ {\rm s.t.}~g(x) \leq 0,$ where $g(x)$ is r dimensional, define
the penalty function
\[
p(x) = f(x) + c \max \{0,g_1(x), g_2(x), \cdots , g_r(x) \}.
\]
Let $d,\mu~(d \neq 0)$ be a solution to the quadratic program
\[
\begin{array}{cc}
\min&\half d^tBd + \nabla f(x)^td  \\
{\rm s.t.}& g(x) + \nabla g(x)^td \leq 0,
\end{array}
\]
where $B$ is positive definite. Show that $d$ is a descent direction for $p$
for sufficiently large $c$.
\end{enumerate}
\end{document}
