26 #ifndef __MLPACK_METHODS_LARS_LARS_HPP 27 #define __MLPACK_METHODS_LARS_LARS_HPP 32 namespace regression {
121 LARS(
const bool useCholesky,
122 const arma::mat& gramMatrix,
141 void Train(
const arma::mat& data,
142 const arma::vec& responses,
144 const bool transposeData =
true);
155 void Predict(
const arma::mat& points,
156 arma::vec& predictions,
157 const bool rowMajor =
false)
const;
176 template<
typename Archive>
177 void Serialize(Archive& ar,
const unsigned int );
244 void Ignore(
const size_t varInd);
248 const arma::vec& betaDirection,
249 arma::vec& yHatDirection);
256 void CholeskyInsert(
double sqNormNewX,
const arma::vec& newGramCol);
259 arma::vec::fixed<2>& rotatedX,
269 #include "lars_impl.hpp" void ComputeYHatDirection(const arma::mat &matX, const arma::vec &betaDirection, arma::vec &yHatDirection)
std::vector< bool > isIgnored
Membership indicator for set of ignored variables.
std::vector< bool > isActive
Active set membership indicator (for each dimension).
double tolerance
Tolerance for main loop.
Linear algebra utility functions, generally performed on matrices or vectors.
std::vector< double > lambdaPath
Value of lambda_1 for each solution in solution path.
bool lasso
True if this is the LASSO problem.
std::vector< size_t > activeSet
Active set of dimensions.
const arma::mat & MatUtriCholFactor() const
Access the upper triangular cholesky factor.
arma::mat matUtriCholFactor
Upper triangular cholesky factor; initially 0x0 matrix.
double lambda1
Regularization parameter for l1 penalty.
void Ignore(const size_t varInd)
Add dimension varInd to ignores set (never removed).
std::vector< arma::vec > betaPath
Solution path.
std::vector< size_t > ignoreSet
Set of ignored variables (for dimensions in span{active set dimensions}).
bool elasticNet
True if this is the elastic net problem.
const std::vector< size_t > & ActiveSet() const
Access the set of active dimensions.
An implementation of LARS, a stage-wise homotopy-based algorithm for l1-regularized linear regression...
bool useCholesky
Whether or not to use Cholesky decomposition when solving linear system.
void CholeskyInsert(const arma::vec &newX, const arma::mat &X)
Include all of the base components required to write MLPACK methods, and the main MLPACK Doxygen docu...
const std::vector< arma::vec > & BetaPath() const
Access the set of coefficients after each iteration; the solution is the last element.
void CholeskyDelete(const size_t colToKill)
double lambda2
Regularization parameter for l2 penalty.
void Activate(const size_t varInd)
Add dimension varInd to active set.
const std::vector< double > & LambdaPath() const
Access the set of values for lambda1 after each iteration; the solution is the last element...
void Deactivate(const size_t activeVarInd)
Remove activeVarInd'th element from active set.
void Predict(const arma::mat &points, arma::vec &predictions, const bool rowMajor=false) const
Predict y_i for each data point in the given data matrix, using the currently-trained LARS model (so ...
arma::mat matGramInternal
Gram matrix.
void GivensRotate(const arma::vec::fixed< 2 > &x, arma::vec::fixed< 2 > &rotatedX, arma::mat &G)
void Serialize(Archive &ar, const unsigned int)
Serialize the LARS model.
LARS(const bool useCholesky, const double lambda1=0.0, const double lambda2=0.0, const double tolerance=1e-16)
Set the parameters to LARS.
void Train(const arma::mat &data, const arma::vec &responses, arma::vec &beta, const bool transposeData=true)
Run LARS.
const arma::mat * matGram
Pointer to the Gram matrix we will use.