\title{Kullback-Leibler Divergence between Centered Multivariate \eqn{t} Distributions}
\usage{
kldstudent(nu1, Sigma1, nu2, Sigma2, eps = 1e-06)
}
\arguments{
\item{nu1}{numeric. The degrees of freedom of the first distribution.}
\item{Sigma1}{symmetric, positive-definite matrix. The scatter matrix of the first distribution.}
\item{nu2}{numeric. The degrees of freedom of the second distribution.}
\item{Sigma2}{symmetric, positive-definite matrix. The scatter matrix of the second distribution.}
\item{eps}{numeric. Precision for the computation of the partial derivative of the Lauricella \eqn{D}-hypergeometric function (see Details). Default: 1e-06.}
}
\value{
A numeric value: the Kullback-Leibler divergence between the two distributions,
with two attributes \code{attr(, "epsilon")} (precision of the partial derivative of the Lauricella \eqn{D}-hypergeometric function,see Details)
and \code{attr(, "k")} (number of iterations).
}
\description{
Computes the Kullback-Leibler divergence between two random vectors distributed
according to multivariate \eqn{t} distributions (MTD) with zero location vector.
}
\details{
Given \eqn{X_1}, a random vector of \eqn{R^p} distributed according to the MTD
with parameters \eqn{(0, \Sigma_1, \nu_1)}
and \eqn{X_2}, a random vector of \eqn{R^p} distributed according to the MCD
with parameters \eqn{(0, \Sigma_2, \nu_2)}.
Let \eqn{\lambda_1, \dots, \lambda_p} the eigenvalues of the square matrix \eqn{\Sigma_1 \Sigma_2^{-1}}