SAS code in the NLP procedure chapter (nlp)
/****************************************************************/
/* S A S S A M P L E L I B R A R Y */
/* */
/* NAME: NLP */
/* TITLE: SAS code in the NLP procedure chapter (nlp) */
/* PRODUCT: OR */
/* SYSTEM: ALL */
/* KEYS: NLP */
/* PROCS: NLP */
/* DATA: */
/* */
/* SUPPORT: UPDATE: */
/* REF: */
/* MISC: */
/* */
/****************************************************************/
title 'PROC NLP';
/****************************************************************/
/* PROC NLP: Getting Started */
/****************************************************************/
title3 'An Unconstrained Problem. Rosenbrocks Function';
/* An Unconstrained Problem */
proc nlp;
min f;
decvar x1 x2;
f1 = 10 * (x2 - x1 * x1);
f2 = 1 - x1;
f = .5 * (f1 * f1 + f2 * f2);
run;
title3 'Least Squares';
/* Least Squares */
proc nlp;
lsq f1 f2;
decvar x1 x2;
f1 = 10 * (x2 - x1 * x1);
f2 = 1 - x1;
run;
title3 'Boundary Constraints on the Decision Variables';
/* Boundary Constraints on the Decision Variables */
proc nlp;
lsq f1 f2;
decvar x1 x2;
bounds x1-x2 <= .5;
f1 = 10 * (x2 - x1 * x1);
f2 = 1 - x1;
run;
title3 'Linear Constraints on the Decision Variables';
/* Linear Constraints on the Decision Variables */
proc nlp;
lsq f1 f2;
decvar x1 x2;
bounds x1-x2 <= .5;
lincon x1 + x2 <= .6;
f1 = 10 * (x2 - x1 * x1);
f2 = 1 - x1;
run;
title3 'Nonlinear Constraints on the Decision Variables';
/* Nonlinear Constraints on the Decision Variables */
proc nlp tech=QUANEW;
min f;
decvar x1 x2;
bounds x1-x2 <= .5;
lincon x1 + x2 <= .6;
nlincon c1 >= 0;
c1 = x1 * x1 - 2 * x2;
f1 = 10 * (x2 - x1 * x1);
f2 = 1 - x1;
f = .5 * (f1 * f1 + f2 * f2);
run;
/* A Simple Maximum Likelihood Example */
title3 'A Simple Maximum Likelihood Example';
data x;
input x @@;
datalines;
1 3 4 5 7
;
proc nlp data=x vardef=n covariance=h pcov phes;
profile mean sigma / alpha=.5 .1 .05 .01;
max loglik;
parms mean=0, sigma=1;
bounds sigma > 1e-12;
loglik=-0.5*((x-mean)/sigma)**2-log(sigma);
run;
/* CRPJAC Statement */
title3 'CRPJAC Statement';
proc nlp tech=levmar;
lsq f1 f2;
decvar x1 x2;
gradient g1 g2;
crpjac cpj1-cpj3;
f1 = 10 * (x2 - x1 * x1);
f2 = 1 - x1;
g1 = -200 * x1 * (x2 - x1 * x1) - (1 - x1);
g2 = 100 * (x2 - x1 * x1);
cpj1 = 400 * x1 * x1 + 1 ;
cpj2 = -200 * x1;
cpj3 = 100;
run;
/* GRADIENT Statement */
title3 'GRADIENT Statement';
proc nlp tech=congra;
min y;
decvar x1 x2;
gradient g1 g2;
y1 = 10 * (x2 - x1 * x1);
y2 = 1 - x1;
y = .5 * (y1 * y1 + y2 * y2);
g1 = -200 * x1 * (x2 - x1 * x1) - (1 - x1);
g2 = 100 * (x2 - x1 * x1);
run;
/* HESSIAN Statement */
title3 'HESSIAN Statement';
proc nlp tech=nrridg;
min f;
decvar x1 x2;
gradient g1 g2;
hessian h1-h3;
f1 = 10 * (x2 - x1 * x1);
f2 = 1 - x1;
f = .5 * (f1 * f1 + f2 * f2);
g1 = -200 * x1 * (x2 - x1 * x1) - (1 - x1);
g2 = 100 * (x2 - x1 * x1);
h1 = -200 * (x2 - 3 * x1 * x1) + 1;
h2 = -200 * x1;
h3 = 100;
run;
/* JACOBIAN Statement (Rosenbrock Function) */
title3 'JACOBIAN Statement (Rosenbrock Function)';
proc nlp tech=levmar;
array j[2,2] j1-j4;
lsq f1 f2;
decvar x1 x2;
jacobian j1-j4;
f1 = 10 * (x2 - x1 * x1);
f2 = 1 - x1;
j[1,1] = -20 * x1;
j[1,2] = 10;
j[2,1] = -1;
j[2,2] = 0; /* is not needed */
run;
/* MINQUAD and MAXQUAD Statement */
title3 'MINQUAD and MAXQUAD Statement';
proc nlp pall;
array h[2,2] .4 0
0 4;
minquad h, -100;
decvar x1 x2 = -1;
bounds 2 <= x1 <= 50,
-50 <= x2 <= 50;
lincon 10 <= 10 * x1 - x2;
run;
proc nlp pall;
minquad h, -100;
decvar x1 x2;
bounds 2 <= x1 <= 50,
-50 <= x2 <= 50;
lincon 10 <= 10 * x1 - x2;
h1 = .4; h4 = 4;
run;
proc nlp all;
matrix h[1,1] = .4 4;
minquad h, -100;
decvar x1 x2 = -1;
bounds 2 <= x1 <= 50,
-50 <= x2 <= 50;
lincon 10 <= 10 * x1 - x2;
run;
title3;
/****************************************************************/
/* PROC NLP Example 1: Using the DATA= Option */
/****************************************************************/
title 'PROC NLP Example 1. Using the DATA= Option';
proc nlp tech=levmar;
lsq y1-y15;
parms x1-x3 = 1;
tmp1 = 15 * x2 + min(1,15) * x3;
y1 = 0.14 - (x1 + 1 / tmp1);
tmp1 = 14 * x2 + min(2,14) * x3;
y2 = 0.18 - (x1 + 2 / tmp1);
tmp1 = 13 * x2 + min(3,13) * x3;
y3 = 0.22 - (x1 + 3 / tmp1);
tmp1 = 12 * x2 + min(4,12) * x3;
y4 = 0.25 - (x1 + 4 / tmp1);
tmp1 = 11 * x2 + min(5,11) * x3;
y5 = 0.29 - (x1 + 5 / tmp1);
tmp1 = 10 * x2 + min(6,10) * x3;
y6 = 0.32 - (x1 + 6 / tmp1);
tmp1 = 9 * x2 + min(7,9) * x3;
y7 = 0.35 - (x1 + 7 / tmp1);
tmp1 = 8 * x2 + min(8,8) * x3;
y8 = 0.39 - (x1 + 8 / tmp1);
tmp1 = 7 * x2 + min(9,7) * x3;
y9 = 0.37 - (x1 + 9 / tmp1);
tmp1 = 6 * x2 + min(10,6) * x3;
y10 = 0.58 - (x1 + 10 / tmp1);
tmp1 = 5 * x2 + min(11,5) * x3;
y11 = 0.73 - (x1 + 11 / tmp1);
tmp1 = 4 * x2 + min(12,4) * x3;
y12 = 0.96 - (x1 + 12 / tmp1);
tmp1 = 3 * x2 + min(13,3) * x3;
y13 = 1.34 - (x1 + 13 / tmp1);
tmp1 = 2 * x2 + min(14,2) * x3;
y14 = 2.10 - (x1 + 14 / tmp1);
tmp1 = 1 * x2 + min(15,1) * x3;
y15 = 4.39 - (x1 + 15 / tmp1);
run;
data bard;
input r @@;
w1 = 16. - _n_;
w2 = min(_n_ , 16. - _n_);
datalines;
.14 .18 .22 .25 .29 .32 .35 .39
.37 .58 .73 .96 1.34 2.10 4.39
;
proc nlp data=bard tech=levmar;
lsq y;
parms x1-x3 = 1.;
y = r - (x1 + _obs_ / (w1 * x2 + w2 * x3));
run;
proc nlp tech=levmar;
array r[15] .14 .18 .22 .25 .29 .32 .35 .39 .37 .58
.73 .96 1.34 2.10 4.39 ;
array y[15] y1-y15;
lsq y1-y15;
parms x1-x3 = 1.;
do i = 1 to 15;
w1 = 16. - i;
w2 = min(i , w1);
w3 = w1 * x2 + w2 * x3;
y[i] = r[i] - (x1 + i / w3);
end;
run;
/****************************************************************/
/* PROC NLP Example 2: Using the INQUAD Option */
/****************************************************************/
title 'PROC NLP Example 2. Using the INQUAD= Option';
data quad1;
input _type_ $ _name_ $ x1 x2;
datalines;
const . -100 -100
quad x1 0.4 0
quad x2 0 4
;
proc nlp inquad=quad1 all;
min ;
parms x1 x2 = -1;
bounds 2 <= x1 <= 50,
-50 <= x2 <= 50;
lincon 10 <= 10 * x1 - x2;
run;
data quad2;
input _type_ $ _row_ $ _col_ $ _value_;
datalines;
const . . -100
quad x1 x1 0.4
quad x2 x2 4
;
data quad3;
input _type_ $ _name_ $ x1 x2 _rhs_;
datalines;
const . -100 -100 .
quad x1 0.02 0 .
quad x2 0.00 2 .
parms . -1 -1 .
lowerbd . 2 -50 .
upperbd . 50 50 .
ge . 10 -1 10
;
proc nlp inquad=quad3;
min ;
parms x1 x2;
run;
/****************************************************************/
/* PROC NLP Example 3: Using the INEST= Option */
/****************************************************************/
title 'PROC NLP Example 3. Using the INEST=Option';
proc nlp tech=trureg outest=res;
min y;
parms x1 = 1,
x2 = .5;
bounds 0 <= x1-x2;
lincon .57735 * x1 - x2 >= 0,
x1 + 1.732 * x2 >= 0,
-x1 - 1.732 * x2 >= -6;
y = (((x1 - 3)**2 - 9.) * x2**3) / (27 * sqrt(3));
run;
data betts1(type=est);
input _type_ $ x1 x2 _rhs_;
datalines;
parms 1 .5 .
lowerbd 0 0 .
ge .57735 -1 .
ge 1 1.732 .
le 1 1.732 6
;
proc nlp inest=betts1 tech=trureg;
min y;
parms x1 x2;
y = (((x1 - 3)**2 - 9) * x2**3) / (27 * sqrt(3));
run;
data betts2(type=est);
input _type_ $ x1 x2 _rhs_ a b c d;
datalines;
parms 1 .5 . 3 9 27 3
lowerbd 0 0 . . . . .
ge .57735 -1 0 . . . .
ge 1 1.732 0 . . . .
le 1 1.732 6 . . . .
;
proc nlp inest=betts2 tech=trureg;
min y;
parms x1 x2;
y = (((x1 - a)**2 - b) * x2**3) / (c * sqrt(d));
run;
/****************************************************************/
/* PROC NLP Example 4: Restarting an Optimization */
/****************************************************************/
title 'PROC NLP Example 4. Restarting an Optimization';
proc nlp tech=trureg outmodel=model outest=est out=out1;
lsq y1 y2;
parms x1 = -1.2 ,
x2 = 1.;
y1 = 10. * (x2 - x1 * x1);
y2 = 1. - x1;
run;
proc print data=out1;
run;
proc nlp tech=none model=model inest=est out=out2 outder=1 pjac PHISTORY;
lsq y1 y2;
parms x1 x2;
run;
proc print data=out2;
run;
/****************************************************************/
/* PROC NLP Example 5: Approximate Standard Errors */
/****************************************************************/
title 'PROC NLP Example 5. Approximate Standard Errors';
data x;
input x @@;
datalines;
1 3 4 5 7
;
proc nlp data=x cov=j pstderr pshort PHISTORY;
lsq resid;
parms mean=0;
resid=x-mean;
run;
proc nlp data=x cov=1 sigsq=1 pstderr phes pcov pshort;
min nloglik;
parms mean=0, sigma=1;
bounds 1e-12 < sigma;
nloglik=.5*((x-mean)/sigma)**2 + log(sigma);
run;
proc nlp data=x cov=1 sigsq=1 pstderr pcov pshort;
min sqresid;
parms mean=0;
sqresid=.5*(x-mean)**2;
run;
proc nlp data=x cov=2 pstderr pcov pshort;
min sqresid;
parms mean=0;
sqresid=.5*(x-mean)**2;
run;
/****************************************************************/
/* PROC NLP Example 6: Maximum Likelihood Weibull Estimation */
/****************************************************************/
title 'PROC NLP Example 6. Maximum Likelihood Weibull Estimation';
title3 'Lawless (1982): 2-Parameter Weibull MLE';
data pike;
input days cens @@;
datalines;
143 0 164 0 188 0 188 0
190 0 192 0 206 0 209 0
213 0 216 0 220 0 227 0
230 0 234 0 246 0 265 0
304 0 216 1 244 1
;
data par1(type=est);
keep _type_ sig c theta;
_type_='parms'; sig = .5;
c = .5; theta = 0; output;
_type_='lb'; sig = 1.0e-6;
c = 1.0e-6; theta = .; output;
run;
proc nlp data=pike tech=tr inest=par1 outest=opar1
outmodel=model cov=2 vardef=n pcov phes;
max logf;
parms sig c;
profile sig c / alpha = .9 to .1 by -.1 .09 to .01 by -.01;
x_th = days - theta;
s = - (x_th / sig)**c;
if cens=0 then s + log(c) - c*log(sig) + (c-1)*log(x_th);
logf = s;
run;
/* Calculate upper bound for theta parameter */
proc univariate data=pike noprint;
var days;
output out=stats n=nobs min=minx range=range;
run;
data stats;
set stats;
keep _type_ theta;
/* 1. write parms observation */
theta = minx - .1 * range;
if theta < 0 then theta = 0;
_type_ = 'parms';
output;
/* 2. write ub observation */
theta = minx * (1 - 1e-4);
_type_ = 'ub';
output;
run;
proc sort data=opar1;
by _type_;
run;
data par2(type=est);
merge opar1(drop=theta) stats;
by _type_;
keep _type_ sig c theta;
if _type_ in ('parms' 'lowerbd' 'ub');
run;
proc nlp data=pike tech=tr inest=par2 outest=opar2
model=model cov=2 vardef=n pcov phes;
max logf;
parms sig c theta;
profile sig c theta / alpha = .5 .1 .05 .01;
run;
title3;
/****************************************************************/
/* PROC NLP Example 7: Simple Pooling Problem */
/****************************************************************/
title 'PROC NLP Example 7. Simple Pooling Problem';
proc nlp all;
parms amountx amounty amounta amountb amountc
pooltox pooltoy ctox ctoy pools = 1;
bounds 0 <= amountx amounty amounta amountb amountc,
amountx <= 100,
amounty <= 200,
0 <= pooltox pooltoy ctox ctoy,
1 <= pools <= 3;
lincon amounta + amountb = pooltox + pooltoy,
pooltox + ctox = amountx,
pooltoy + ctoy = amounty,
ctox + ctoy = amountc;
nlincon nlc1-nlc2 >= 0.,
nlc3 = 0.;
max f;
costa = 6; costb = 16; costc = 10;
costx = 9; costy = 15;
f = costx * amountx + costy * amounty
- costa * amounta - costb * amountb - costc * amountc;
nlc1 = 2.5 * amountx - pools * pooltox - 2. * ctox;
nlc2 = 1.5 * amounty - pools * pooltoy - 2. * ctoy;
nlc3 = 3 * amounta + amountb - pools * (amounta + amountb);
run;
data init1(type=est);
input _type_ $ amountx amounty amounta amountb
amountc pooltox pooltoy ctox ctoy pools
_rhs_ costa costb costc costx costy
ca cb cc cd;
datalines;
parms 1 1 1 1 1 1 1 1 1 1
. 6 16 10 9 15 2.5 1.5 2. 3.
;
proc nlp inest=init1 all;
parms amountx amounty amounta amountb amountc
pooltox pooltoy ctox ctoy pools;
bounds 0 <= amountx amounty amounta amountb amountc,
amountx <= 100,
amounty <= 200,
0 <= pooltox pooltoy ctox ctoy,
1 <= pools <= 3;
lincon amounta + amountb = pooltox + pooltoy,
pooltox + ctox = amountx,
pooltoy + ctoy = amounty,
ctox + ctoy = amountc;
nlincon nlc1-nlc2 >= 0.,
nlc3 = 0.;
max f;
f = costx * amountx + costy * amounty
- costa * amounta - costb * amountb - costc * amountc;
nlc1 = ca * amountx - pools * pooltox - cc * ctox;
nlc2 = cb * amounty - pools * pooltoy - cc * ctoy;
nlc3 = cd * amounta + amountb - pools * (amounta + amountb);
run;
data init2(type=est);
input _type_ $ amountx amounty amounta amountb amountc
pooltox pooltoy ctox ctoy pools
_rhs_ costa costb costc costx costy;
datalines;
parms 1 1 1 1 1 1 1 1 1 1
. 6 16 10 9 15 2.5 1.5 2 3
lowerbd 0 0 0 0 0 0 0 0 0 1
. . . . . . . . . .
upperbd 100 200 . . . . . . . 3
. . . . . . . . . .
eq . . 1 1 . -1 -1 . . .
0 . . . . . . . . .
eq 1 . . . . -1 . -1 . .
0 . . . . . . . . .
eq . 1 . . . . -1 . -1 .
0 . . . . . . . . .
eq . . . . 1 . . -1 -1 .
0 . . . . . . . . .
;
proc nlp inest=init2 outmod=model all;
parms amountx amounty amounta amountb amountc
pooltox pooltoy ctox ctoy pools;
nlincon nlc1-nlc2 >= 0.,
nlc3 = 0.;
max f;
f = costx * amountx + costy * amounty
- costa * amounta - costb * amountb - costc * amountc;
nlc1 = 2.5 * amountx - pools * pooltox - 2. * ctox;
nlc2 = 1.5 * amounty - pools * pooltoy - 2. * ctoy;
nlc3 = 3 * amounta + amountb - pools * (amounta + amountb);
run;
proc nlp inest=init2 model=model all;
parms amountx amounty amounta amountb amountc
pooltox pooltoy ctox ctoy pools;
nlincon nlc1-nlc2 >= 0.,
nlc3 = 0.;
max f;
run;
proc nlp inest=init2 model=model all;
parms amountx amounty amounta amountb amountc
pooltox pooltoy ctox ctoy = 0,
pools = 2;
nlincon nlc1-nlc2 >= 0.,
nlc3 = 0.;
max f;
run;
/****************************************************************/
/* PROC NLP Example 8: Chemical Equilibrium */
/****************************************************************/
title 'PROC NLP Example 8. Chemical Equilibrium';
proc nlp tech=tr pall;
array c[10] -6.089 -17.164 -34.054 -5.914 -24.721
-14.986 -24.100 -10.708 -26.662 -22.179;
array x[10] x1-x10;
min y;
parms x1-x10 = .1;
bounds 1.e-6 <= x1-x10;
lincon 2. = x1 + 2. * x2 + 2. * x3 + x6 + x10,
1. = x4 + 2. * x5 + x6 + x7,
1. = x3 + x7 + x8 + 2. * x9 + x10;
s = x1 + x2 + x3 + x4 + x5 + x6 + x7 + x8 + x9 + x10;
y = 0.;
do j = 1 to 10;
y = y + x[j] * (c[j] + log(x[j] / s));
end;
run;
proc nlp tech=tr fdhessian all;
array c[10] -6.089 -17.164 -34.054 -5.914 -24.721
-14.986 -24.100 -10.708 -26.662 -22.179;
array x[10] x1-x10;
array g[10] g1-g10;
min y;
parms x1-x10 = .1;
bounds 1.e-6 <= x1-x10;
lincon 2. = x1 + 2. * x2 + 2. * x3 + x6 + x10,
1. = x4 + 2. * x5 + x6 + x7,
1. = x3 + x7 + x8 + 2. * x9 + x10;
s = x1 + x2 + x3 + x4 + x5 + x6 + x7 + x8 + x9 + x10;
y = 0.;
do j = 1 to 10;
y = y + x[j] * (c[j] + log(x[j] / s));
g[j] = c[j] + log(x[j] / s);
end;
run;
/****************************************************************/
/* PROC NLP Example 9: Minimize Total Delay in a Network */
/****************************************************************/
title 'PROC NLP Example 9. Minimize Total Delay in a Network';
proc nlp all initial=.5;
max y;
parms x12 x13 x32 x24 x34;
bounds x12 <= 10,
x13 <= 30,
x32 <= 10,
x24 <= 30,
x34 <= 10;
/* what flows into an intersection must flow out */
lincon x13 = x32 + x34,
x12 + x32 = x24,
x24 + x34 = x12 + x13;
y = x24 + x34 + 0*x12 + 0*x13 + 0*x32;
run;
proc nlp all initial=.5;
min y;
parms x12 x13 x32 x24 x34;
bounds x12 x13 x32 x24 x34 >= 0;
lincon x13 = x32 + x34, /* flow in = flow out */
x12 + x32 = x24,
x24 + x34 = 5; /* = f = desired flow */
t12 = 5 + .1 * x12 / (1 - x12 / 10);
t13 = x13 / (1 - x13 / 30);
t32 = 1 + x32 / (1 - x32 / 10);
t24 = x24 / (1 - x24 / 30);
t34 = 5 + .1 * x34 / (1 - x34 / 10);
y = t12*x12 + t13*x13 + t32*x32 + t24*x24 + t34*x34;
run;
/****************************************************************/
/* Rewriting NLP Models for PROC OPTMODEL */
/****************************************************************/
/* PROC NLP Example 7 */
proc nlp all;
parms amountx amounty amounta amountb amountc
pooltox pooltoy ctox ctoy pools = 1;
bounds 0 <= amountx amounty amounta amountb amountc,
amountx <= 100,
amounty <= 200,
0 <= pooltox pooltoy ctox ctoy,
1 <= pools <= 3;
lincon amounta + amountb = pooltox + pooltoy,
pooltox + ctox = amountx,
pooltoy + ctoy = amounty,
ctox + ctoy = amountc;
nlincon nlc1-nlc2 >= 0.,
nlc3 = 0.;
max f;
costa = 6; costb = 16; costc = 10;
costx = 9; costy = 15;
f = costx * amountx + costy * amounty
- costa * amounta - costb * amountb - costc * amountc;
nlc1 = 2.5 * amountx - pools * pooltox - 2. * ctox;
nlc2 = 1.5 * amounty - pools * pooltoy - 2. * ctoy;
nlc3 = 3 * amounta + amountb - pools * (amounta + amountb);
run;
proc optmodel;
var amountx init 1 >= 0 <= 100,
amounty init 1 >= 0 <= 200;
var amounta init 1 >= 0,
amountb init 1 >= 0,
amountc init 1 >= 0;
var pooltox init 1 >= 0,
pooltoy init 1 >= 0;
var ctox init 1 >= 0,
ctoy init 1 >= 0;
var pools init 1 >=1 <= 3;
con amounta + amountb = pooltox + pooltoy,
pooltox + ctox = amountx,
pooltoy + ctoy = amounty,
ctox + ctoy = amountc;
number costa, costb, costc, costx, costy;
costa = 6; costb = 16; costc = 10;
costx = 9; costy = 15;
max f = costx * amountx + costy * amounty
- costa * amounta - costb * amountb - costc * amountc;
con nlc1: 2.5 * amountx - pools * pooltox - 2. * ctox >= 0,
nlc2: 1.5 * amounty - pools * pooltoy - 2. * ctoy >= 0,
nlc3: 3 * amounta + amountb - pools * (amounta + amountb)
= 0;
solve;
print amountx amounty amounta amountb amountc
pooltox pooltoy ctox ctoy pools;
/* PROC NLP Example 8 */
proc nlp tech=tr pall;
array c[10] -6.089 -17.164 -34.054 -5.914 -24.721
-14.986 -24.100 -10.708 -26.662 -22.179;
array x[10] x1-x10;
min y;
parms x1-x10 = .1;
bounds 1.e-6 <= x1-x10;
lincon 2. = x1 + 2. * x2 + 2. * x3 + x6 + x10,
1. = x4 + 2. * x5 + x6 + x7,
1. = x3 + x7 + x8 + 2. * x9 + x10;
s = x1 + x2 + x3 + x4 + x5 + x6 + x7 + x8 + x9 + x10;
y = 0.;
do j = 1 to 10;
y = y + x[j] * (c[j] + log(x[j] / s));
end;
run;
proc optmodel;
set CMP = 1..10;
number c{CMP} = [-6.089 -17.164 -34.054 -5.914 -24.721
-14.986 -24.100 -10.708 -26.662 -22.179];
var x{CMP} init 0.1 >= 1.e-6;
con 2. = x[1] + 2. * x[2] + 2. * x[3] + x[6] + x[10],
1. = x[4] + 2. * x[5] + x[6] + x[7],
1. = x[3] + x[7] + x[8] + 2. * x[9] + x[10];
/* replace the variable s in the NLP model */
impvar s = sum{i in CMP} x[i];
min y = sum{j in CMP} x[j] * (c[j] + log(x[j] / s));
solve;
print x y;
/* Generalized Read */
data comp;
input c a_1 a_2 a_3;
datalines;
-6.089 1 0 0
-17.164 2 0 0
-34.054 2 0 1
-5.914 0 1 0
-24.721 0 2 0
-14.986 1 1 0
-24.100 0 1 1
-10.708 0 0 1
-26.662 0 0 2
-22.179 1 0 1
;
data atom;
input b @@;
datalines;
2. 1. 1.
;
proc optmodel;
set CMP;
set ELT;
number c{CMP};
number a{ELT,CMP};
number b{ELT};
read data atom into ELT=[_n_] b;
read data comp into CMP=[_n_]
c {i in ELT} < a[i,_n_]=col("a_"||i) >;
var x{CMP} init 0.1 >= 1.e-6;
con bal{i in ELT}: b[i] = sum{j in CMP} a[i,j]*x[j];
impvar s = sum{i in CMP} x[i];
min y = sum{j in CMP} x[j] * (c[j] + log(x[j] / s));
print a b;
solve;
print x;
quit;
data comp;
input name $ c a_h a_n a_o;
datalines;
H -6.089 1 0 0
H2 -17.164 2 0 0
H2O -34.054 2 0 1
N -5.914 0 1 0
N2 -24.721 0 2 0
NH -14.986 1 1 0
NO -24.100 0 1 1
O -10.708 0 0 1
O2 -26.662 0 0 2
OH -22.179 1 0 1
;
data atom;
input name $ b;
datalines;
H 2.
N 1.
O 1.
;
proc optmodel;
set<string> CMP;
set<string> ELT;
number c{CMP};
number a{ELT,CMP};
number b{ELT};
read data atom into ELT=[name] b;
read data comp into CMP=[name]
c {i in ELT} < a[i,name]=col("a_"||i) >;
var x{CMP} init 0.1 >= 1.e-6;
con bal{i in ELT}: b[i] = sum{j in CMP} a[i,j]*x[j];
impvar s = sum{i in CMP} x[i];
min y = sum{j in CMP} x[j] * (c[j] + log(x[j] / s));
solve;
print x;
quit;