5

I'm fitting a GMM using OpenMX:

# Load OpenMx
library(OpenMx)

# Growth Mixture Model

data(myGrowthMixtureData)
names(myGrowthMixtureData)

class1 <- mxModel("Class1",
              type="RAM",
              manifestVars=c("x1", "x2", "x3", "x4", "x5"),
              latentVars=c("intercept", "slope"),
              # residual variances
              mxPath(
                from=c("x1", "x2", "x3", "x4", "x5"),
                arrows=2,
                free=TRUE,
                values = c(1, 1, 1, 1, 1),
                labels=c("residual", "residual", "residual", "residual", "residual")
              ),
              # latent variances and covariance
              mxPath(
                from=c("intercept", "slope"),
                arrows=2,
                connect="unique.pairs",
                free=TRUE,
                values=c(1, .4, 1),
                labels=c("vari1", "cov1", "vars1")
              ),
              # intercept loadings
              mxPath(
                from="intercept",
                to=c("x1", "x2", "x3", "x4", "x5"),
                arrows=1,
                free=FALSE,
                values=c(1, 1, 1, 1, 1)
              ),
              # slope loadings
              mxPath(
                from="slope",
                to=c("x1", "x2", "x3", "x4", "x5"),
                arrows=1,
                free=FALSE,
                values=c(0, 1, 2, 3, 4)
              ),
              # manifest means
              mxPath(
                from="one",
                to=c("x1", "x2", "x3", "x4", "x5"),
                arrows=1,
                free=FALSE,
                values=c(0, 0, 0, 0, 0)
              ),
              # latent means
              mxPath(
                from="one",
                to=c("intercept", "slope"),
                arrows=1,
                free=FALSE,
                values=c(0, -1),
                labels=c("meani1", "means1")
              ),
              # enable the likelihood vector
              mxRAMObjective(A = "A",
                             S = "S",
                             F = "F",
                             M = "M",
                             vector = TRUE)
              ) # close model

class2 <- mxModel(class1, 
              # latent variances and covariance
              mxPath(
                from=c("intercept", "slope"),
                arrows=2,
                connect="unique.pairs",
                free=TRUE,
                values=c(1, .5, 1),
                labels=c("vari2", "cov2", "vars2")
              ),
              # latent means
              mxPath(from="one",
                     to=c("intercept", "slope"),
                     arrows=1,
                     free=TRUE,
                     values=c(5, 1),
                     labels=c("meani2", "means2")
              ),
                     name="Class2"
              ) # close model

              #Specifying class probabilities
              classP <- mxMatrix("Full", 2, 1, free=c(TRUE, FALSE),
                                 values=1, lbound=0.001,
                                 labels = c("p1", "ps"), name="Props")

              classS <- mxAlgebra(Props %x% (1 / sum(Props)), name="classProbs")

              # Specifying the mixture model
              algObj <- mxAlgebra(-2*sum(
                log(classProbs[1,1] %x% Class1.objective + classProbs[2,1] %x%     Class2.objective)),
                                   name="mixtureObj")

              obj <- mxAlgebraObjective("mixtureObj")

              gmm <- mxModel("Growth Mixture Model",
                             mxData(
                               observed=myGrowthMixtureData,
                               type="raw"
                             ),
                             class1, class2,
                             classP, classS,
                             algObj, obj
              )

              gmmFit <-mxRun(gmm)

              summary(gmmFit)   

This will give me summary statistics for the 2-class solution I am running. However, I am trying to understand which cases belong to which class. How can I output the class probabilities for each case, so that I can see in which class each case falls?

Corvus
  • 4,573
  • 1
  • 27
  • 58
histelheim
  • 2,465
  • 4
  • 23
  • 40

1 Answers1

2

No credit for this as I found it on the OpenMx forum http://openmx.psyc.virginia.edu/thread/717 & http://openmx.psyc.virginia.edu/sites/default/files/gmm_0.R, however to complete the question I will post the answer here nevertheless.

Everything you posted is fine, you "only" have to add this:

 # view the class probabilities
gmmFit$classProbs

indClassProbs <- function(model, classProbs, round=NA){
    # this function takes a mixture model in OpenMx
    # and returns the posterior class probabilities
    # using Bayes rule, individual person-class likelihoods
    # and the model class probability matrix, as described in
    # Ramaswamy, Desarbo, Reibstein, and Robinson, 1993
    if (missing(model) || !(isS4(model) && is(model, "MxModel"))) {
        stop("'model' argument must be an MxModel object")
    }
    if (missing(classProbs) || !(is.character(classProbs) && length(classProbs) == 1)) {
        stop("'classProbs' argument must be a character string")
    }
    cp <- eval(substitute(mxEval(x, model), list(x = as.symbol(classProbs))))
    cp2 <- as.vector(cp)
    cps <- diag(length(cp2))
    diag(cps) <- cp2
    subs <- model@submodels
    if(min(dim(cp))!=1)stop("Class probabilities matrix must be a row or column vector.")
    if(max(dim(cp))==1)stop("Class probabilities matrix must contain two or more classes.")
    of <- function(num) {
        return(mxEval(objective, subs[[num]]))
    }
    rl <- sapply(1:length(names(subs)), of)
    raw <- (rl %*% cps)
    tot <- 1 / apply(raw, 1, sum)
    div <- matrix(rep(tot, length(cp2)), ncol=length(cp2))
    icp <- raw * div
    if (is.numeric(round)){icp <- round(icp, round)}
    return(icp)
}

To get the results you have to:

icp <- indClassProbs(gmmFit, "classProbs")

print(icp)

Which give the following results:

               [,1]         [,2]
  [1,] 9.990825e-01 9.175362e-04
  [2,] 9.497102e-01 5.028983e-02
  [3,] 2.167783e-01 7.832217e-01
  [4,] 7.517603e-01 2.482397e-01
  [5,] 8.780429e-01 1.219571e-01
   .        .            .
   .        .            .
   .        .            .
[495,] 2.401654e-03 9.975983e-01
[496,] 1.670137e-02 9.832986e-01
[497,] 3.558952e-06 9.999964e-01
[498,] 4.840615e-02 9.515939e-01
[499,] 9.087141e-03 9.909129e-01
[500,] 8.263976e-08 9.999999e-01
Huub Hoofs
  • 366
  • 1
  • 11