Skip to content

Commit

Permalink
GH-8487: add HGLM as a separate toolbox.
Browse files Browse the repository at this point in the history
GH-8487: crafting HGLM parameters.
GH-8487: implement EM algo.
GH-8487: forming the fixed matrices and vectors.
GH-8487: add test to make sure correct initialization of fixed, random coefficients, sigma values and T matrix.
GH-8487: Finished implementing EM to estimate fixed coefficients, random coefficients, tmat and tauEVar
GH-8487: finished implementing prediction but still need to figure out the model metrics calculation.
GH-8487: Adding support for models without random intercept.
GH-8487: adding normalization and denormalization of coefficients for fixed and random.
GH-8487: Completed prediction implementation and added tests to make sure prediction is correct when standardize=true/false, random_intercept = true/false.
GH-8487: fixing model metric classes.
GH-8487: add python and R tests.
GH-8487: adding hooks to generate synthetic data.
GH-8487: added scoring history, model summary, coefficient tables.
GH-8487: added modelmetrics for validation frame.
  • Loading branch information
wendycwong committed Oct 7, 2024
1 parent 733c496 commit 925042a
Show file tree
Hide file tree
Showing 81 changed files with 5,785 additions and 3,199 deletions.
20 changes: 1 addition & 19 deletions h2o-algos/src/main/java/hex/DataInfo.java
Original file line number Diff line number Diff line change
Expand Up @@ -373,7 +373,7 @@ public DataInfo validDinfo(Frame valid) {

public double[] denormalizeBeta(double [] beta) {
int N = fullN()+1;
assert (beta.length % N) == 0:"beta len = " + beta.length + " expected multiple of" + N;
assert (beta.length % N) == 0:"beta len = " + beta.length + " expected multiple of " + N;
int nclasses = beta.length/N;
beta = MemoryManager.arrayCopyOf(beta,beta.length);
if (_predictor_transform == DataInfo.TransformType.STANDARDIZE) {
Expand Down Expand Up @@ -1078,24 +1078,6 @@ public final double innerProduct(DataInfo.Row row) {
return res;
}

/***
* For HGLM, will perform multiplication of w*data part and not the random columns.
* @param w
* @param rowContent
* @param catOffsets
* @return
*/
public double[] scalarProduct(double w, double[] rowContent, int catOffsets) { // multiple a row with scaler w
rowContent[0] = w; // intercept term
for (int i = 0; i < nBins; ++i) {
rowContent[binIds[i]+1] = w; // value is absolute
}

for (int i = 0; i < numVals.length; ++i)
rowContent[i+catOffsets+1] += numVals[i]*w;

return rowContent;
}
public final double twoNormSq() {
assert !_intercept;
assert numIds == null;
Expand Down
2 changes: 1 addition & 1 deletion h2o-algos/src/main/java/hex/api/RegisterAlgos.java
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@

import hex.ModelBuilder;
import hex.anovaglm.ANOVAGLM;
import hex.modelselection.ModelSelection;
import hex.psvm.PSVM;
import hex.tree.TreeHandler;
import water.api.AlgoAbstractRegister;
Expand Down Expand Up @@ -39,6 +38,7 @@ public void registerEndPoints(RestApiContext context) {
new hex.modelselection.ModelSelection (true),
new hex.isotonic .IsotonicRegression(true),
new hex.tree.dt .DT (true),
new hex.hglm .HGLM (true),
new hex.adaboost. AdaBoost (true)
};

Expand Down
9 changes: 6 additions & 3 deletions h2o-algos/src/main/java/hex/gam/GAM.java
Original file line number Diff line number Diff line change
Expand Up @@ -46,8 +46,7 @@
import static hex.glm.GLMModel.GLMParameters.GLMType.gam;
import static hex.util.LinearAlgebraUtils.generateOrthogonalComplement;
import static hex.util.LinearAlgebraUtils.generateQR;
import static water.util.ArrayUtils.expandArray;
import static water.util.ArrayUtils.subtract;
import static water.util.ArrayUtils.*;


public class GAM extends ModelBuilder<GAMModel, GAMModel.GAMParameters, GAMModel.GAMModelOutput> {
Expand All @@ -68,7 +67,11 @@ public class GAM extends ModelBuilder<GAMModel, GAMModel.GAMParameters, GAMModel

@Override
public ModelCategory[] can_build() {
return new ModelCategory[]{ModelCategory.Regression};
return new ModelCategory[]{
ModelCategory.Regression,
ModelCategory.Binomial,
ModelCategory.Multinomial,
ModelCategory.Ordinal};
}

@Override
Expand Down
23 changes: 0 additions & 23 deletions h2o-algos/src/main/java/hex/gam/MatrixFrameUtils/GamUtils.java
Original file line number Diff line number Diff line change
Expand Up @@ -139,29 +139,6 @@ else if (!name1ContainsResp && standarNContainsResp) // if name1 does not conta
return equalNames;
}

public static void copy2DArray(double[][] src_array, double[][] dest_array) {
int numRows = src_array.length;
for (int colIdx = 0; colIdx < numRows; colIdx++) { // save zMatrix for debugging purposes or later scoring on training dataset
System.arraycopy(src_array[colIdx], 0, dest_array[colIdx], 0,
src_array[colIdx].length);
}
}

// copy a square array
public static double[][] copy2DArray(double[][] src_array) {
double[][] dest_array = MemoryManager.malloc8d(src_array.length, src_array[0].length);
copy2DArray(src_array, dest_array);
return dest_array;
}

public static void copy2DArray(int[][] src_array, int[][] dest_array) {
int numRows = src_array.length;
for (int colIdx = 0; colIdx < numRows; colIdx++) { // save zMatrix for debugging purposes or later scoring on training dataset
System.arraycopy(src_array[colIdx], 0, dest_array[colIdx], 0,
src_array[colIdx].length);
}
}

public static void copyCVGLMtoGAMModel(GAMModel model, GLMModel glmModel, GAMParameters parms, String foldColumn) {
// copy over cross-validation metrics
model._output._cross_validation_metrics = glmModel._output._cross_validation_metrics;
Expand Down
84 changes: 1 addition & 83 deletions h2o-algos/src/main/java/hex/glm/ComputationState.java
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@
import water.H2ORuntime;
import water.Job;
import water.MemoryManager;
import water.fvec.Frame;
import water.util.ArrayUtils;
import water.util.IcedHashMap;
import water.util.Log;
Expand Down Expand Up @@ -49,7 +48,6 @@ public final class ComputationState {
private boolean _dispersionEstimated;
boolean _allIn;
int _iter;
int _iterHGLM_GLMMME; // keep track of iterations used in estimating fixed/random coefficients
private double _lambda = 0;
private double _lambdaMax = Double.NaN;
private GLMGradientInfo _ginfo; // gradient info excluding l1 penalty
Expand All @@ -73,15 +71,6 @@ public final class ComputationState {
ConstraintsGram[] _gramLess = null;
private final GLM.BetaInfo _modelBetaInfo;
private double[] _beta; // vector of coefficients corresponding to active data
private double[] _ubeta; // HGLM, store coefficients of random effects;
private double[] _psi; // HGLM, psi
private double[] _phi; // HGLM, size random columns levels
private double _tau; // HGLM for ei
private double _correction_HL; // HGLM
double[] _sumEtaSquareConvergence; // HGLM: sotre sumEtaSquare, convergence
double[] _likelihoodInfo; // HGLM: stores 4 elements: hlik, pvh, pbvh, cAIC
public String[] _randCoeffNames; // store random coefficient names
private Frame _priorw_wpsi; // weight calculated for psi
final DataInfo _dinfo;
private GLMGradientSolver _gslvr;
private final Job _job;
Expand All @@ -105,11 +94,6 @@ public ComputationState(Job job, GLMParameters parms, DataInfo dinfo, BetaConstr
_nbetas = bi._nBetas;
_betaLengthPerClass = dinfo.fullN()+1;
_totalBetaLength = _betaLengthPerClass * _nbetas;
if (_parms._HGLM) {
_sumEtaSquareConvergence = new double[2];
if (_parms._calc_like)
_likelihoodInfo = new double[4];
}
_modelBetaInfo = bi;
}

Expand Down Expand Up @@ -225,10 +209,6 @@ void copyCheckModel2State(GLMModel model, int[][] _gamColIndices) {
}
}

public void set_sumEtaSquareConvergence(double[] sumInfo) {
_sumEtaSquareConvergence = sumInfo;
}

/***
* Copy GLM coefficients stored in beta to _beta of computationState
* @param beta: store coefficients to be copied from
Expand All @@ -247,61 +227,15 @@ public void set_beta_HGLM(double[] beta, int startIdx, int len, boolean intercep
System.arraycopy(beta, startIdx, _beta, 0, len);
}
}

public void set_likelihoodInfo(double hlik, double pvh, double pbvh, double cAIC) {
_likelihoodInfo[0] = hlik;
_likelihoodInfo[1] = pvh;
_likelihoodInfo[2] = pbvh;
_likelihoodInfo[3] = cAIC;
}

public void set_ubeta_HGLM(double[] ubeta, int startIdx, int len) {
if (_ubeta==null)
_ubeta = new double[len];
System.arraycopy(ubeta, startIdx, _ubeta, 0, len);
}


public void setZValues(double[] zValues, boolean dispersionEstimated) {
_zValues = zValues;
_dispersionEstimated = dispersionEstimated;
}

public double[] get_psi() {
return _psi;
}

public double get_correction_HL() {
return _correction_HL;
}

public double[] get_phi() {
return _phi;
}

public Frame get_priorw_wpsi() {
return _priorw_wpsi;
}

public double get_tau() {
return _tau;
}

public boolean getLambdaNull() { return _lambdaNull; }

public void set_tau(double tau) {
_tau=tau;
}

public void set_psi(double[] psi) {
assert _psi.length==psi.length:"Length of _psi and psi should be the same.";
System.arraycopy(psi, 0, _psi, 0, psi.length);
}

public void set_phi(double[] phi) {
assert _phi.length==phi.length:"Length of _phi and phi should be the same.";
System.arraycopy(phi, 0, _phi, 0, phi.length);
}

public GLMGradientSolver gslvr(){return _gslvr;}
public double lambda(){return _lambda;}
public double alpha() {return _alpha;}
Expand Down Expand Up @@ -339,9 +273,6 @@ public void setLambda(double lambda) {
return betaMultinomial(_activeClass,_beta);
return _beta;
}
public double[] ubeta(){
return _ubeta; // could be null. Be careful
}
public GLMGradientInfo ginfo(){return _ginfo == null?(_ginfo = gslvr().getGradient(beta())):_ginfo;}
public BetaConstraint activeBC(){return _activeBC;}
public double likelihood() {return _likelihood;}
Expand Down Expand Up @@ -1060,19 +991,6 @@ protected void setIter(int iteration) {
protected void setActiveDataMultinomialNull() { _activeDataMultinomial = null; }
protected void setActiveDataNull() { _activeData = null; }
protected void setLambdaSimple(double lambda) { _lambda=lambda; }

protected void setHGLMComputationState(double [] beta, double[] ubeta, double[] psi, double[] phi,
double hlcorrection, double tau, Frame wpsi, String[] randCoeffNames){
_beta = Arrays.copyOf(beta, beta.length);
_ubeta = Arrays.copyOf(ubeta, ubeta.length);
_randCoeffNames = Arrays.copyOf(randCoeffNames, randCoeffNames.length);
_psi = Arrays.copyOf(psi, psi.length);
_phi = Arrays.copyOf(phi, phi.length);
_correction_HL = hlcorrection;
_tau = tau;
_priorw_wpsi = wpsi; // store prior_weight and calculated wpsi value for coefficients of random columns
_iterHGLM_GLMMME = 0;
}

public double [] expandBeta(double [] beta) { // for multinomials
int fullCoefLen = (_dinfo.fullN() + 1) * _nbetas;
Expand Down
Loading

0 comments on commit 925042a

Please sign in to comment.