Resources

Getting Started Example for PROC ENTROPY


/*--------------------------------------------------------------

                    SAS Sample Library

        Name: entgs.sas
 Description: Example program from SAS/ETS User's Guide,
              The ENTROPY Procedure
       Title: Getting Started Example for PROC ENTROPY
     Product: SAS/ETS Software
        Keys: Generalized Maximum Entropy
        PROC: ENTROPY
       Notes:

--------------------------------------------------------------*/

data one;
   do by = 1 to 100;
      do t = 1 to 10;
         x1 = 10 * ranuni( 512);
         y = x1 + 2*t + 7 * rannor(456);
         output;
      end;
   end;
run;


ods exclude all;
ods select ParameterEstimates;
ods output ParameterEstimates=parm1;

proc entropy data=one gme primal;
   model y = x1 t;
   by by;
run;

ods select ParameterEstimates;
ods output ParameterEstimates=parm2;

proc reg data=one;
   model y = x1 t;
   by by;
run;

proc kde data=parm1(where=(Variable='x1'));
   univar Estimate / gridl=-2 gridu=4 out=density;
run;

data density;
   set density;
   rename value = Estimate;
   type = "GME";
   label type = "Estimation Type";
run;

proc kde data=parm2(where=(Variable='x1'));
   univar Estimate / gridl=-2.0 gridu=4 out=density2;
run;

data density2;
   set density2;
   rename value = Estimate;
   type = "OLS";
   label type = "Estimation Type";
run;

data d;
   set density density2;
run;

proc sgplot data=d;
   series x=estimate y=density / group=type;
   xaxis label="Parameter Estimate";
run;

title "Test Scores compiled by Coleman et al. (1966)";
data coleman;
   input test_score 6.2 teach_sal 6.2 prcnt_prof 8.2
            socio_stat 9.2 teach_score 8.2 mom_ed 7.2;
   label test_score="Average sixth grade test scores in observed district";
   label teach_sal="Average teacher salaries per student (1000s of dollars)";
   label prcnt_prof="Percent of students' fathers with professional employment";
   label socio_stat="Composite measure of socio-economic status in the district";
   label teach_score="Average verbal score for teachers";
   label mom_ed="Average level of education (years) of the students' mothers";
datalines;
37.01   3.83   28.87     7.20   26.60   6.19
26.51   2.89   20.10   -11.71   24.40   5.17
36.51   2.86   69.05    12.32   25.70   7.04
40.70   2.92   65.40    14.28   25.70   7.10
37.10   3.06   29.59     6.31   25.40   6.15
33.90   2.07   44.82     6.16   21.60   6.41
41.80   2.52   77.37    12.70   24.90   6.86
33.40   2.45   24.67    -0.17   25.01   5.78
41.01   3.13   65.01     9.85   26.60   6.51
37.20   2.44    9.99    -0.05   28.01   5.57
23.30   2.09   12.20   -12.86   23.51   5.62
35.20   2.52   22.55     0.92   23.60   5.34
34.90   2.22   14.30     4.77   24.51   5.80
33.10   2.67   31.79    -0.96   25.80   6.19
22.70   2.71   11.60   -16.04   25.20   5.62
39.70   3.14   68.47    10.62   25.01   6.94
31.80   3.54   42.64     2.66   25.01   6.33
31.70   2.52   16.70   -10.99   24.80   6.01
43.10   2.68   86.27    15.03   25.51   7.51
41.01   2.37   76.73    12.77   24.51   6.96
;


proc entropy data=coleman;
   model test_score = teach_sal prcnt_prof socio_stat teach_score mom_ed;
run;

data coleman; set coleman;
   attrib test_score teach_sal prcnt_prof socio_stat teach_score
          mom_ed label=" ";
run;


symbol v=dot h=1 c=green;

proc reg data=coleman;
   model test_score = teach_sal prcnt_prof socio_stat teach_score mom_ed;
   plot rstudent.*obs.
        / vref= -1.714 1.714 cvref=blue lvref=1
          HREF=0 to 30 by 5 cHREF=red  cframe=ligr;
run;

proc robustreg data=coleman;
   model test_score = teach_sal prcnt_prof
                      socio_stat teach_score mom_ed;
run;

proc entropy data=coleman collin;
   model test_score = teach_sal prcnt_prof socio_stat teach_score mom_ed;
run;

data coleman;
   set coleman;
   if _n_ = 3 or _n_ = 18 then delete;
run;


proc reg data=coleman ridge=0.9 outest=t noprint;
   model test_score = teach_sal prcnt_prof socio_stat teach_score mom_ed;
run;

proc print data=t;
run;

data prior;
   do by = 1 to 100;
      do t = 1 to 10;
         y = 2*t + 5 * rannor(4);
         output;
      end;
   end;
run;


proc entropy data=prior outest=parm1 noprint;
   model y = t ;
   by by;
run;

proc univariate data=parm1;
   var t;
run;

   title "Prior Distribution of Parameter T";

   data w;
      input T weight;
   datalines;
   10 1
   15 5
   20 5
   25 1
   ;


proc univariate data=w;
   freq weight;
   histogram T / cfill=ligr;
run;

proc entropy data=prior outest=parm2 noprint;
   priors t 0(1) 2(3) 4(1)
          intercept -100(.5) -10(1.5) 0(2) 10(1.5) 100(0.5);
   model y = t;
   by by;
run;

proc univariate data=parm2;
   var t;
run;

proc entropy data=prior outest=parm3 noprint;
   priors t -2(1) 0(3) 2(1)
          intercept -100(.5)  0(2) 100(0.5);
   model y = t;
   by by;
run;

proc univariate data=parm3;
   var t;
run;

data prior2;
   do by = 1 to 100;
      do t = 1 to 50;
         y = 2*t + 5 * rannor(456);
         output;
      end;
   end;
run;

proc entropy data=prior2 outest=parm3 noprint;
   priors t -2(1) 0(3) 2(1)
          intercept -100(.5) 0(2) 100(0.5);
   model y = t;
   by by;
run;

proc univariate data=parm3;
   var t;
run;

data one;
   array x[6] ( 1 2 3 4 5 6 );
   y=4.0;
run;


proc entropy data=one pure;
   priors x1 0 1 x2 0 1 x3 0 1 x4 0 1 x5 0 1 x6 0 1;
   model y = x1-x6/ noint;
   restrict x1 + x2 +x3 +x4 + x5 + x6 =1;
run;

data m;
            /* Known Transition matrix */
   array p[4,4] (0.7 .4 .0 .1
                 0.1 .5 .4 .0
                 0.0 .1 .6 .0
                 0.2 .0 .0 .9 ) ;
            /* Initial Market shares */
   array y[4] y1-y4 ( .4 .3 .2 .1 );
   array x[4] x1-x4;
   drop p1-p16 i;
   do i = 1 to 3;
       x[1] = y[1]; x[2] = y[2];
       x[3] = y[3]; x[4] = y[4];
       y[1] = p[1,1] * x1 + p[1,2] * x2 + p[1,3] * x3 + p[1,4] * x4;
       y[2] = p[2,1] * x1 + p[2,2] * x2 + p[2,3] * x3 + p[2,4] * x4;
       y[3] = p[3,1] * x1 + p[3,2] * x2 + p[3,3] * x3 + p[3,4] * x4;
       y[4] = p[4,1] * x1 + p[4,2] * x2 + p[4,3] * x3 + p[4,4] * x4;
       output;
   end;
run;


proc entropy markov pure data=m(obs=1);
   model y1-y4 = x1-x4;
run;

proc entropy markov pure data=m(obs=2);
   model y1-y4 = x1-x4;
run;

data kpdata;
   input job x1 x2 x3 x4;
datalines;
   0 1 3 11 1
   0 1 14 12 1
   0 1 44 12 1
   0 1 18 12 1
   0 1 24 14 0
   0 1 38 13 1
   0 1 8 14 0
   0 1 19 14 1
   0 1 8 12 1
   0 1 3 12 1
   0 1 6 12 1
   0 1 40 11 1
   0 1 2 12 1
   0 1 22 12 1
   0 1 4 12 1
   0 1 22 9 1
   0 1 39 6 1
   0 1 3 12 1
   0 1 3 10 1
   0 1 7 9 1
   0 1 27 15 0
   0 1 52 6 0
   0 1 3 12 0
   0 1 35 12 1
   0 1 8 14 1
   0 1 34 11 1
   0 1 14 14 1
   0 1 9 9 1
   0 1 9 14 1
   0 1 5 13 1
   0 1 52 14 1
   1 1 10 12 1
   1 1 25 12 1
   1 1 32 12 1
   1 1 29 12 1
   1 1 4 12 1
   1 1 11 15 1
   1 1 11 16 1
   1 1 19 16 1
   1 1 18 12 1
   1 1 25 10 1
   1 1 8 9 1
   1 1 6 10 1
   1 1 2 12 1
   1 1 23 8 1
   1 1 8 12 1
   1 1 29 3 1
   1 1 30 13 1
   1 1 17 12 1
   1 1 9 12 1
   1 1 11 15 1
   1 1 9 14 1
   1 1 17 10 1
   1 1 4 11 1
   1 1 30 9 1
   1 1 22 16 0
   1 1 29 10 1
   1 1 6 10 1
   1 1 11 16 1
   1 1 5 12 1
   1 1 12 12 1
   1 1 26 8 1
   1 1 35 8 1
   1 1 17 12 1
   1 1 46 6 1
   1 1 6 11 1
   1 1 37 6 1
   1 1 32 11 1
   1 1 43 8 1
   1 1 4 12 1
   1 1 46 6 1
   1 1 51 13 1
   1 1 39 10 1
   1 1 37 12 1
   1 1 10 12 1
   1 1 4 12 1
   1 1 4 12 1
   1 1 49 14 0
   1 1 32 12 1
   1 1 9 12 1
   1 1 9 12 1
   1 1 8 13 1
   1 1 5 12 1
   1 1 34 9 1
   1 1 19 8 1
   1 1 41 7 0
   1 1 37 14 1
   1 1 4 9 1
   1 1 43 11 1
   1 1 14 12 1
   1 1 9 12 1
   1 1 33 8 1
   1 1 15 13 1
   1 1 12 12 1
   1 1 19 13 1
   1 1 23 8 0
   1 1 26 13 1
   1 1 13 13 1
   1 1 22 12 1
   1 1 4 11 1
   2 1 22 12 1
   2 1 10 11 1
   2 1 21 9 1
   2 1 38 6 0
   2 1 11 12 1
   2 1 47 9 1
   2 1 18 13 1
   2 1 8 12 1
   2 1 13 12 1
   2 1 10 12 1
   2 1 41 11 1
   2 1 49 11 1
   2 1 4 13 1
   2 1 9 12 1
   2 1 33 12 1
   2 1 2 12 1
   2 1 11 15 1
   2 1 56 6 1
   2 1 31 13 1
   2 1 13 14 1
   2 1 33 11 1
   2 1 41 12 1
   2 1 6 12 1
   2 1 21 12 1
   2 1 25 13 1
   2 1 13 15 1
   2 1 2 12 1
   2 1 23 12 1
   2 1 32 12 1
   2 1 46 12 1
   2 1 13 12 1
   2 1 29 12 1
   2 1 30 12 1
   2 1 50 10 1
   2 1 32 10 1
   2 1 29 12 1
   2 1 9 16 0
   2 1 49 8 1
   2 1 9 14 0
   2 1 41 14 1
   2 1 9 12 1
   2 1 5 11 1
   2 1 17 12 1
   2 1 9 11 1
   2 1 30 12 1
   2 1 29 7 0
   2 1 9 14 1
   2 1 37 12 1
   2 1 44 7 0
   2 1 22 12 1
   2 1 26 12 1
   2 1 10 12 1
   2 1 33 13 1
   2 1 41 8 1
   2 1 39 12 1
   2 1 29 12 0
   2 1 38 14 1
   2 1 12 12 0
   2 1 9 12 0
   2 1 10 14 1
   2 1 9 16 0
   2 1 20 12 1
   2 1 9 11 1
   2 1 41 14 1
   2 1 6 14 1
   2 1 10 12 1
   2 1 11 14 0
   2 1 21 12 1
   2 1 20 13 1
   2 1 31 14 1
   2 1 4 16 1
   2 1 12 13 1
   2 1 17 14 1
   2 1 40 6 1
   2 1 53 12 1
   2 1 35 14 1
   2 1 12 14 1
   2 1 13 15 1
   2 1 48 8 1
   2 1 23 12 1
   2 1 11 12 1
   2 1 9 12 1
   2 1 9 12 1
   2 1 4 12 1
   3 1 34 16 1
   3 1 12 12 1
   3 1 21 13 0
   3 1 12 15 1
   3 1 17 12 1
   3 1 21 12 1
   3 1 20 12 1
   3 1 35 12 0
   3 1 44 15 1
   3 1 6 16 1
   3 1 5 14 1
   3 1 42 11 1
   3 1 34 12 1
   3 1 37 16 1
   3 1 19 13 1
   3 1 32 12 1
   3 1 25 12 1
   3 1 19 12 1
   3 1 50 12 1
   3 1 6 12 1
   3 1 49 12 1
   3 1 3 11 1
   3 1 49 18 1
   3 1 39 15 1
   3 1 20 15 1
   3 1 10 12 1
   3 1 5 12 1
   3 1 10 13 1
   3 1 30 16 1
   3 1 31 15 1
   3 1 9 12 1
   3 1 8 12 1
   3 1 49 13 1
   3 1 11 16 1
   3 1 2 12 1
   3 1 6 12 1
   3 1 12 10 1
   3 1 5 17 1
   3 1 3 12 1
   3 1 6 16 1
   3 1 38 8 1
   4 1 9 16 1
   4 1 10 16 1
   4 1 37 14 1
   4 1 13 14 1
   4 1 11 13 1
   4 1 21 16 0
   4 1 4 16 1
   4 1 2 16 1
   4 1 6 16 1
   4 1 13 17 1
   4 1 13 16 1
   4 1 18 16 1
   4 1 44 14 1
   4 1 9 16 1
   4 1 38 16 1
   4 1 25 14 1
   4 1 32 13 1
   4 1 18 17 1
   4 1 22 19 1
   4 1 20 12 1
   4 1 11 18 1
   4 1 9 16 1
   4 1 14 14 1
   4 1 2 15 1
   4 1 8 14 1
   4 1 26 12 0
   4 1 10 15 1
   4 1 8 12 1
   4 1 25 16 1
   4 1 8 16 1
   4 1 20 14 1
   4 1 11 12 1
   4 1 23 20 1
   4 1 25 19 1
   4 1 31 13 1
   4 1 2 16 1
   4 1 25 14 1
   4 1 3 16 1
   4 1 16 19 0
   4 1 6 14 1
   4 1 17 19 1
   4 1 23 15 1
   4 1 16 14 1
   4 1 19 13 1
   4 1 9 14 1
   4 1 14 19 1
   4 1 45 14 1
   4 1 17 16 1
   4 1 42 13 1
   4 1 15 12 1
   4 1 17 12 1
   4 1 8 16 1
   4 1 15 17 1
   4 1 37 18 1
   4 1 33 16 1
   4 1 16 11 1
   4 1 13 18 1
   4 1 44 15 1
   4 1 28 16 1
   4 1 41 11 1
   4 1 25 16 1
   4 1 66 12 1
   4 1 31 12 1
   4 1 25 20 1
   4 1 17 17 1
   4 1 23 16 1
   4 1 34 17 1
   4 1 21 16 1
   4 1 4 15 1
   4 1 41 8 1
   4 1 8 12 1
   4 1 2 19 1
   4 1 20 16 1
   4 1 38 15 1
   4 1 23 16 0
   4 1 31 18 1
   4 1 19 20 1
   4 1 4 20 1
   4 1 29 19 0
   4 1 47 12 0
   4 1 27 20 1
   4 1 11 16 1
   4 1 24 13 1
   4 1 12 14 1
   4 1 14 19 1
   4 1 22 16 1
   4 1 11 18 1
   4 1 16 16 1
   4 1 18 19 1
   4 1 17 13 1
   4 1 41 13 1
   4 1 6 16 1
   4 1 3 16 1
   4 1 6 19 1
   4 1 36 18 1
   4 1 3 12 1
   4 1 6 18 1
   4 1 6 13 1
   4 1 9 17 1
   4 1 6 16 1
   4 1 14 19 1
   4 1 7 19 1
   4 1 11 19 0
   4 1 14 14 1
   4 1 13 16 1
   4 1 24 15 1
   4 1 7 18 1
   4 1 19 13 1
   4 1 43 12 1
   4 1 31 12 1
   4 1 39 7 1
   4 1 12 16 1
;


proc entropy data=kpdata gmed tech=nra;
   model job = x1 x2 x3 x4 / noint
         esupports=( -.1 -0.0666 -0.0333 0 0.0333 0.0666 .1 );
run;

proc entropy data=kpdata gmed tech=nra;
   model job = x1 x2 x3 x4 / noint
         esupports=( -.1 -0.0666 -0.0333 0 0.0333 0.0666 .1 )
         marginals;
run;

proc entropy data=kpdata gmed tech=nra;
   model job = x1 x2 x3 x4 / noint
         esupports=( -.1 -0.0666 -0.0333 0 0.0333 0.0666 .1 )
         marginals=( x2=.4 x3=10 x4=0);
run;

data zero;
  do x1 = 1 to 20;
     x2 = 20 * rannor(1445);
     x3 = ranuni(1231);
     y = 10 + .2 * x1 -15 * x2 + x3 + 0.01 * rannor(193875);
     output;
  end;
run;


proc entropy data=zero;
   bounds .1 <= x1 <= 100,
           0 <= x2 <=  25.6,
           0 <= x3 <=   5;
   model y = x1 x2 x3;
run;