import * as hhmmUtils from '../utils/hhmm-utils';
/**
* Hierarchical HMM decoder <br />
* Loads a model trained by the XMM library and processes an input stream of float vectors in real-time.
* If the model was trained for regression, outputs an estimation of the associated process.
* @class
*/
class HhmmDecoder {
/**
* @param {Number} [windowSize=1] - Size of the likelihood smoothing window.
*/
constructor(windowSize = 1) {
/**
* Size of the likelihood smoothing window.
* @type {number}
* @private
*/
this._likelihoodWindow = windowSize;
/**
* The model, as generated by XMM from a training data set.
* @type {Object}
* @private
*/
this._model = undefined;
/**
* The model results, containing intermediate results that will be passed to the callback in filter.
* @type {Object}
* @private
*/
this._modelResults = undefined;
this._weights = [];
}
/**
* Callback handling estimation results.
* @callback hhmmResultsCallback
* @param {string} err - Description of a potential error.
* @param {hhmmResults} res - Object holding the estimation results.
*/
/**
* Results of the filtering process.
* @typedef hhmmResults
* @type {Object}
* @name hhmmResults
* @property {String} likeliest - The likeliest model's label.
* @property {Number} likeliestIndex - The likeliest model's index
* @property {Array.number} likelihoods - The array of all models' smoothed normalized likelihoods.
* @property {Array.number} timeProgressions - The array of all models' normalized time progressions.
* @property {Array.Array.number} alphas - The array of all models' states likelihoods array.
* @property {?Array.number} outputValues - If the model was trained with regression, the estimated float vector output.
* @property {?Array.number} outputCovariance - If the model was trained with regression, the output covariance matrix.
*/
/**
* The decoding function.
* @param {Array.number} observation - An input float vector to be estimated.
* @param {hhmmResultsCallback} [resultsCallback=null] - The callback handling the estimation results.
* @returns {hhmmResults}
*/
filter(observation, resultsCallback = null) {
let err = null;
let res = null;
if(!this._model) {
err = 'no model loaded yet';
} else {
try {
hhmmUtils.hhmmFilter(observation, this._model, this._modelResults);
// create results object from relevant modelResults values :
const likeliest = (this._modelResults.likeliest > -1)
? this._model.models[this._modelResults.likeliest].label
: null;
const likelihoods = this._modelResults.smoothed_normalized_likelihoods.slice(0);
res = {
likeliest: likeliest,
likeliestIndex: this._modelResults.likeliest,
likelihoods: likelihoods,
timeProgressions: new Array(this._model.models.length),
alphas: new Array(this._model.models.length),
outputValues: [],
outputCovariance: [],
};
for (let i = 0; i < this._model.models.length; i++) {
res.timeProgressions[i] = this._modelResults.singleClassHmmModelResults[i].progress;
if (this._model.configuration.default_parameters.hierarchical) {
res.alphas[i]
= this._modelResults.singleClassHmmModelResults[i].alpha_h[0];
} else {
res.alphas[i]
= this._modelResults.singleClassHmmModelResults[i].alpha[0];
}
}
if (this._model.shared_parameters.bimodal) {
res['outputValues'] = this._modelResults.output_values.slice(0);
res['outputCovariance']
= this._modelResults.output_covariance.slice(0);
}
} catch (e) {
err = 'problem occured during filtering : ' + e;
}
}
if (resultsCallback) {
resultsCallback(err, res);
}
return res;
}
/**
* Resets the intermediate results of the estimation (shortcut for reloading the model).
*/
reset() {
if (this._model) {
this._setModel(this._model);
}
}
//========================== GETTERS / SETTERS =============================//
/***
* Likelihood smoothing window size.
* @type {Number}
*/
// get likelihoodWindow() {
// return this._likelihoodWindow;
// }
// set likelihoodWindow(newWindowSize) {
// this._likelihoodWindow = newWindowSize;
// this._updateLikelihoodWindow();
// }
/**
* Get the likelihood smoothing window size.
* @returns {Number}
*/
getLikelihoodWindow() {
return this._likelihoodWindow;
}
/**
* Set the likelihood smoothing window size.
* @param {Number} newWindowSize - the new window size.
*/
setLikelihoodWindow(newWindowSize) {
this._likelihoodWindow = newWindowSize;
this._updateLikelihoodWindow();
}
/** @private */
_updateLikelihoodWindow() {
if (this._model === undefined) return;
const res = this._modelResults.singleClassHmmModelResults;
for (let i = 0; i < this._model.models.length; i++) {
res[i].likelihood_buffer = new Array(this._likelihoodWindow);
for (let j = 0; j < this._likelihoodWindow; j++) {
res[i].likelihood_buffer[j] = 1 / this._likelihoodWindow;
}
}
}
setWeights(newWeights) {
if (!Array.isArray(newWeights)) {
throw new Error('Weights must be an array');
}
this._weights = newWeights;
this._updateWeights();
}
/** @private */
_updateWeights() {
if (this._model === undefined) return;
const m = this._model;
const params = m.shared_parameters;
const dimIn = params.bimodal ? params.dimension_input : params.dimension;
const w = this._weights.slice();
if (w.length < dimIn) {
const onesToAdd = dimIn - w.length;
for (let i = 0; i < onesToAdd; i++) {
w.push(1);
}
} else if (w.length > dimIn) {
w.splice(dimIn - 1);
}
for (let i = 0; i < w.length; i++) {
w[i] = Math.max(w[i], 0);
}
for (let i = 0; i < m.models.length; i++) {
for (let j = 0; j < m.models[i].states.length; j++) {
for (let k = 0; k < m.models[i].states[j].components.length; k++) {
m.models[i].states[j].components[k].weights = w;
}
}
}
}
/**
* A valid XMM Hierarchical HMM model
* @typedef xmmHhmmModel
* @type {Object}
* @name xmmHhmmModel
* @property {String} TODO - LIST REAL HHMM MODEL PROPERTIES HERE
*/
/***
* The model generated by XMM.
* It is mandatory for the class to have a model in order to do its job.
* @type {xmmHhmmModel}
*/
// get model() {
// return this.getModel();
// }
// set model(model) {
// this.setModel(model);
// }
/**
* Get the actual XMM Hierarchical HMM model.
* @returns {xmmHhmmModel}
*/
getModel() {
if (this._model) {
return JSON.parse(JSON.stringify(this._model));
}
return undefined;
}
/**
* Set the actual XMM Hierarchical HMM model.
* @param {xmmHhmmModel} model
*/
setModel(model) {
this._setModel(model);
}
/** @private */
_setModel(model) {
this._model = undefined;
this._modelResults = undefined;
if (!model) return;
// test if model is valid here (TODO : write a better test)
if (model.models !== undefined) {
this._model = model;
// adds user defined weights to the model (default [1, 1, ..., 1])
this._updateWeights();
const m = this._model;
const nmodels = m.models.length;
this._modelResults = {
instant_likelihoods: new Array(nmodels),
smoothed_log_likelihoods: new Array(nmodels),
smoothed_likelihoods: new Array(nmodels),
instant_normalized_likelihoods: new Array(nmodels),
smoothed_normalized_likelihoods: new Array(nmodels),
likeliest: -1,
frontier_v1: new Array(nmodels),
frontier_v2: new Array(nmodels),
forward_initialized: false,
singleClassHmmModelResults: []
};
const params = m.shared_parameters;
const dimOut = params.dimension - params.dimension_input;
this._modelResults.output_values = new Array(dimOut);
for (let i = 0; i < dimOut; i++) {
this._modelResults.output_values[i] = 0.0;
}
let outCovarSize;
if (m.configuration.default_parameters.covariance_mode == 0) { //---- full
outCovarSize = dimOut * dimOut;
} else { //------------------------------------------------------ diagonal
outCovarSize = dimOut;
}
this._modelResults.output_covariance = new Array(outCovarSize);
for (let i = 0; i < dimOut; i++) {
this._modelResults.output_covariance[i] = 0.0;
}
for (let i = 0; i < nmodels; i++) {
this._modelResults.instant_likelihoods[i] = 0;
this._modelResults.smoothed_log_likelihoods[i] = 0;
this._modelResults.smoothed_likelihoods[i] = 0;
this._modelResults.instant_normalized_likelihoods[i] = 0;
this._modelResults.smoothed_normalized_likelihoods[i] = 0;
const nstates = m.models[i].parameters.states;
const alpha_h = new Array(3);
for (let j = 0; j < 3; j++) {
alpha_h[j] = new Array(nstates);
for (let k = 0; k < nstates; k++) {
alpha_h[j][k] = 0;
}
}
const alpha = new Array(nstates);
for (let j = 0; j < nstates; j++) {
alpha[j] = 0;
}
let likelihood_buffer = new Array(this._likelihoodWindow);
for (let j = 0; j < this._likelihoodWindow; j++) {
likelihood_buffer[j] = 0.0;
}
const hmmRes = {
hierarchical: m.configuration.default_parameters.hierarchical,
instant_likelihood: 0,
log_likelihood: 0,
// for circular buffer implementation
// (see hmmUpdateResults) :
likelihood_buffer: likelihood_buffer,
likelihood_buffer_index: 0,
progress: 0,
exit_likelihood: 0,
exit_ratio: 0,
likeliest_state: -1,
// for non-hierarchical :
previous_alpha: alpha.slice(0),
alpha: alpha,
// for hierarchical :
alpha_h: alpha_h,
// prior: new Array(nstates),
// transition: new Array(nstates),
// used in hmmUpdateAlphaWindow
window_minindex: 0,
window_maxindex: 0,
window_normalization_constant: 0,
// for non-hierarchical mode
forward_initialized: false,
singleClassGmmModelResults: [] // aka states
};
hmmRes.output_values = this._modelResults.output_values.slice(0);
hmmRes.output_covariance = this._modelResults.output_covariance.slice(0);
// add HMM states (GMMs)
for (let j = 0; j < nstates; j++) {
const gmmRes = {
instant_likelihood: 0,
log_likelihood: 0
};
gmmRes.beta = new Array(this._model.models[i].parameters.gaussians);
for (let k = 0; k < gmmRes.beta.length; k++) {
gmmRes.beta[k] = 1 / gmmRes.beta.length;
}
gmmRes.output_values = hmmRes.output_values.slice(0);
gmmRes.output_covariance = hmmRes.output_covariance.slice(0);
hmmRes.singleClassGmmModelResults.push(gmmRes);
}
this._modelResults.singleClassHmmModelResults.push(hmmRes);
}
}
}
/***
* Currently estimated likeliest label.
* @readonly
* @type {String}
*/
// get likeliestLabel() {
// return this.getLikeliestLabel();
// }
/**
* Get the currently estimated likeliest label.
* @returns {String}
*/
getLikeliestLabel() {
if (this._modelResults) {
if (this._modelResults.likeliest > -1) {
return this._model.models[this._modelResults.likeliest].label;
}
}
return 'unknown';
}
/***
* Number of classes contained in the model.
* @readonly
* @type {Number}
*/
// get nbClasses() {
// return this.getNumberOfClasses();
// }
/**
* Get the total number of classes the model was trained with.
* @returns {Number}
*/
getNumberOfClasses() {
if (this._model) {
return this._model.models.length;
}
return 0;
}
/***
* Size of the regression vector if model is bimodal.
* @readonly
* @type {Number}
*/
// get regressionSize() {
// return this.getRegressionVectorSize();
// }
/**
* Get the output dimension of the model (size of a regression vector).
* @returns {Number}
*/
getRegressionVectorSize() {
if (this._model) {
const params = this._model.shared_parameters;
return params['bimodal']
? params['dimension'] - params['dimension_input']
: 0;
}
return 0;
}
};
export default HhmmDecoder;