# source the function "constrOptim.nl" source("g:/computing/constrOptim2.txt") ################################ fr <- function(x) { ## Rosenbrock Banana function x1 <- x[1] x2 <- x[2] 100 * (x2 - x1 * x1)^2 + (1 - x1)^2 } grr <- function(x) { ## Gradient of 'fr' x1 <- x[1] x2 <- x[2] c(-400 * x1 * (x2 - x1 * x1) - 2 * (1 - x1), 200 * (x2 - x1 * x1)) } constrOptim(c(.5,0), fr, grr, ui=rbind(c(-1,0),c(1,-1)), ci=c(-0.9,0.1)) # Note the difference in function and gradient counts # Also, the number of outer iterations are smaller because of # a different (and better) termination criterion constrOptim2(c(.5,0), fr, grr, ui=rbind(c(-1,0),c(1,-1)), ci=c(-0.9,0.1)) ################################ # Now we demonstrate the "bug" in constrOptim when maximizing, i.e. when control$fnscale = -1 fr.neg <- function(x) { ## negative Rosenbrock Banana function x1 <- x[1] x2 <- x[2] -(100 * (x2 - x1 * x1)^2 + (1 - x1)^2 ) } grr.neg <- function(x) { ## Gradient of 'fr.neg' x1 <- x[1] x2 <- x[2] -c(-400 * x1 * (x2 - x1 * x1) - 2 * (1 - x1), 200 * (x2 - x1 * x1)) } # This performs only one outer iteration constrOptim(c(.5,0), fr.neg, grr.neg, ui=rbind(c(-1,0),c(1,-1)), ci=c(-0.9,0.1), control=list(fnscale=-1)) # This correctly performs many outer iterations until convergence constrOptim2(c(.5,0), fr.neg, grr.neg, ui=rbind(c(-1,0),c(1,-1)), ci=c(-0.9,0.1), control=list(fnscale=-1)) ################################ # Now we demonstrate the use of numerical gradient fr <- function(x) { ## Rosenbrock Banana function x1 <- x[1] x2 <- x[2] 100 * (x2 - x1 * x1)^2 + (1 - x1)^2 } grr <- function(x) { ## Gradient of 'fr' x1 <- x[1] x2 <- x[2] c(-400 * x1 * (x2 - x1 * x1) - 2 * (1 - x1), 200 * (x2 - x1 * x1)) } # Nelder-Mead is used when gradient is not specified constrOptim(c(.5,0), fr, gr=NULL, ui=rbind(c(-1,0),c(1,-1)), ci=c(-0.9,0.1)) # BFGS algorithm is used even though gradient is not specified constrOptim2(c(.5,0), fr, ui=rbind(c(-1,0),c(1,-1)), ci=c(-0.9,0.1))