mice and miceafter for Regression modelling

Installing the miceafter and mice packages

You can install the development version from GitHub with:

# install.packages("devtools")
devtools::install_github("mwheymans/miceafter")

You can install mice with:

install.packages("mice")

Examples

mice and miceafter for pooling logistic regression models

lbp_orig is a dataset that is part of the miceafter package with missing values. So we first impute them with the mice function. Than we use the mids2milist function to turn a mids object, as a result of using mice, into a milist object with multiply imputed datasets. Than we use the with function to apply repeated logistic regression analyses. With the pool_glm function we obtain the results for the pooled model.


  library(mice)
  library(miceafter)
  
  imp <- mice(lbp_orig, m=5, maxit=5, printFlag = FALSE) 
 
  dat_imp <- mids2milist(imp)
  
  ra <- with(dat_imp, expr = glm(Chronic ~ factor(Carrying) + Gender + Smoking + 
                      Function + JobControl + JobDemands + SocialSupport, 
                      family = binomial))
  
  poolm <- pool_glm(ra, method="D1")
  poolm$pmodel
#>                term     estimate  std.error  statistic        df     p.value
#> 1       (Intercept) -2.599678571 2.79904484 -0.9287735  80.72068 0.355775562
#> 2            Gender -0.160779207 0.44648149 -0.3601027 141.96434 0.719305476
#> 3           Smoking  0.088180473 0.36379678  0.2423894 147.14268 0.808815992
#> 4          Function -0.078376259 0.04900571 -1.5993292 115.89698 0.112470142
#> 5        JobControl  0.004606545 0.02150228  0.2142352  94.85725 0.830823487
#> 6        JobDemands  0.008403115 0.03951551  0.2126536 132.04678 0.831925088
#> 7     SocialSupport  0.063177544 0.06015009  1.0503317 130.21219 0.295511087
#> 8 factor(Carrying)2  1.373648173 0.58076934  2.3652216  41.19666 0.022808070
#> 9 factor(Carrying)3  1.949499572 0.61017175  3.1950013  40.94789 0.002691594
#>           OR    lower.EXP upper.EXP
#> 1 0.07429746 0.0002832468 19.488700
#> 2 0.85148005 0.3522582803  2.058201
#> 3 1.09218521 0.5321866243  2.241448
#> 4 0.92461647 0.8390884552  1.018862
#> 5 1.00461717 0.9626342925  1.048431
#> 6 1.00843852 0.9326155609  1.090426
#> 7 1.06521594 0.9457089771  1.199825
#> 8 3.94973376 1.2225400655 12.760643
#> 9 7.02517111 2.0486582993 24.090415
  poolm$pmultiparm
#>                  p-values D1 F-statistic
#> Gender           0.718780256  0.12967398
#> Smoking          0.808478793  0.05875261
#> Function         0.110170682  2.55785374
#> JobControl       0.830492967  0.04589673
#> JobDemands       0.831618887  0.04522156
#> SocialSupport    0.293713421  1.10319662
#> factor(Carrying) 0.006526093  5.44561744

Back to Examples

mice and miceafter for pooling linear regression models

The lbp_orig is a dataset that is part of the miceafter package with missing values. So we first impute them with the mice function. Than we use the mids2milist function to turn a mids object, as a result of using mice, into a milist object with multiply imputed datasets. Than we use the with function to apply repeated linear regression analyses. With the pool_glm function we obtain the results for the pooled model.


  library(mice)
  library(miceafter)
  
  imp <- mice(lbp_orig, m=5, maxit=5, printFlag = FALSE) 
 
  dat_imp <- mids2milist(imp)
  
  ra <- with(dat_imp, expr = glm(Pain ~ factor(Carrying) + Gender + Smoking + 
                      Function + JobControl + JobDemands + SocialSupport))
  
  poolm <- pool_glm(ra, method="D1")
  poolm$pmodel
#>                term     estimate  std.error   statistic        df     p.value
#> 1       (Intercept)  5.116256747 2.33216453  2.19378036 123.50275 0.030123078
#> 2            Gender -0.543023519 0.41765045 -1.30018660  91.56731 0.196800206
#> 3           Smoking -0.118049319 0.31689773 -0.37251551 140.95274 0.710068592
#> 4          Function -0.045499572 0.04683091 -0.97157131  44.25423 0.336544504
#> 5        JobControl -0.019450157 0.02147524 -0.90570135  27.81561 0.372872805
#> 6        JobDemands  0.036942210 0.03421558  1.07968979 133.37123 0.282228988
#> 7     SocialSupport  0.001670081 0.05193580  0.03215665 140.25373 0.974392841
#> 8 factor(Carrying)2  0.638354466 0.55983313  1.14025847  14.03321 0.273266532
#> 9 factor(Carrying)3  1.755155121 0.57093300  3.07418757  17.36160 0.006747234
#>         2.5 %     97.5 %
#> 1  0.50006673 9.73244676
#> 2 -1.37256559 0.28651855
#> 3 -0.74453624 0.50843760
#> 4 -0.13986577 0.04886662
#> 5 -0.06345334 0.02455303
#> 6 -0.03073315 0.10461757
#> 7 -0.10100816 0.10434833
#> 8 -0.56210162 1.83881055
#> 9  0.55250008 2.95781016
  poolm$pmultiparm
#>                  p-values D1 F-statistic
#> Gender            0.19452222  1.69048520
#> Smoking           0.70952192  0.13876780
#> Function          0.33448888  0.94395082
#> JobControl        0.37072919  0.82029493
#> JobDemands        0.28039628  1.16573005
#> SocialSupport     0.97434832  0.00103405
#> factor(Carrying)  0.01896136  4.87813729

Back to Examples

mice and miceafter for selecting logistic regression models

We follow the same procedure as the first example but also apply model selection here.


  library(mice)
  library(miceafter)
  
  imp <- mice(lbp_orig, m=5, maxit=5, printFlag = FALSE) 
 
  dat_imp <- mids2milist(imp)
  
  ra <- with(dat_imp, expr = glm(Chronic ~ factor(Carrying) + Gender + Smoking + 
                      Function + JobControl + JobDemands + SocialSupport, 
                      family = binomial))
  
  poolm <- pool_glm(ra, method="D1", p.crit = 0.15, direction = "BW")
#> Removed at Step 1 is - Smoking
#> Removed at Step 2 is - JobDemands
#> Removed at Step 3 is - JobControl
#> Removed at Step 4 is - Gender
#> Removed at Step 5 is - SocialSupport
#> 
#> Selection correctly terminated, 
#> No more variables removed from the model
  poolm$pmodel
#>                term    estimate  std.error  statistic        df     p.value
#> 1       (Intercept) -0.58603210 0.71646223 -0.8179525  95.34782 0.415423692
#> 2          Function -0.07534636 0.04651968 -1.6196663 127.91028 0.107766927
#> 3 factor(Carrying)2  1.26330472 0.48465748  2.6065928 120.67837 0.010297190
#> 4 factor(Carrying)3  1.84734477 0.55588526  3.3232484  65.43706 0.001459931
#>          OR lower.EXP upper.EXP
#> 1 0.5565312 0.1342136  2.307716
#> 2 0.9274222 0.8458662  1.016842
#> 3 3.5370913 1.3549595  9.233497
#> 4 6.3429552 2.0902963 19.247549
  poolm$pmultiparm
#>                  p-values D1 F-statistic
#> Function         0.105579303    2.623319
#> factor(Carrying) 0.002598671    6.103450

Back to Examples

mice and miceafter for selecting linear regression models

We follow the same procedure as the second example but also apply model selection here.


  library(mice)
  library(miceafter)
  
  imp <- mice(lbp_orig, m=5, maxit=5, printFlag = FALSE) 
 
  dat_imp <- mids2milist(imp)
  
  ra <- with(dat_imp, expr = glm(Pain ~ factor(Carrying) + Gender + Smoking + 
                      Function + JobControl + JobDemands + SocialSupport))
  
  poolm <- pool_glm(ra, method="D1", p.crit = 0.15, direction = "BW")
#> Removed at Step 1 is - SocialSupport
#> Removed at Step 2 is - Smoking
#> Removed at Step 3 is - JobDemands
#> Removed at Step 4 is - Gender
#> 
#> Selection correctly terminated, 
#> No more variables removed from the model
  poolm$pmodel
#>                term    estimate  std.error statistic        df      p.value
#> 1       (Intercept)  7.14316335 1.09008239  6.552866 118.59755 1.529257e-09
#> 2          Function -0.09574888 0.04172459 -2.294783  83.53042 2.425399e-02
#> 3        JobControl -0.03007168 0.01761107 -1.707544  91.52800 9.111104e-02
#> 4 factor(Carrying)2  0.64444136 0.43231177  1.490687  53.38486 1.419292e-01
#> 5 factor(Carrying)3  1.55475765 0.47589180  3.267040  47.14458 2.030604e-03
#>        2.5 %       97.5 %
#> 1  4.9846161  9.301710600
#> 2 -0.1787296 -0.012768141
#> 3 -0.0650512  0.004907834
#> 4 -0.2225203  1.511403061
#> 5  0.5974643  2.512050970
  poolm$pmultiparm
#>                  p-values D1 F-statistic
#> Function         0.022655956    5.266029
#> JobControl       0.088797963    2.915706
#> factor(Carrying) 0.003907592    5.869005

Back to Examples