blob: 1f6e1a077f9234f1ee42f8a606329e32985db176 [file] [log] [blame]
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.mllib.clustering
import breeze.linalg.{max, sum, DenseMatrix => BDM, DenseVector => BDV}
import breeze.numerics._
/**
* Utility methods for LDA.
*/
private[clustering] object LDAUtils {
/**
* Log Sum Exp with overflow protection using the identity:
* For any a: $\log \sum_{n=1}^N \exp\{x_n\} = a + \log \sum_{n=1}^N \exp\{x_n - a\}$
*/
private[clustering] def logSumExp(x: BDV[Double]): Double = {
val a = max(x)
a + log(sum(exp(x :- a)))
}
/**
* For theta ~ Dir(alpha), computes E[log(theta)] given alpha. Currently the implementation
* uses [[breeze.numerics.digamma]] which is accurate but expensive.
*/
private[clustering] def dirichletExpectation(alpha: BDV[Double]): BDV[Double] = {
digamma(alpha) - digamma(sum(alpha))
}
/**
* Computes [[dirichletExpectation()]] row-wise, assuming each row of alpha are
* Dirichlet parameters.
*/
private[clustering] def dirichletExpectation(alpha: BDM[Double]): BDM[Double] = {
val rowSum = sum(alpha(breeze.linalg.*, ::))
val digAlpha = digamma(alpha)
val digRowSum = digamma(rowSum)
val result = digAlpha(::, breeze.linalg.*) - digRowSum
result
}
}