Hidden Markov Models
This Gist was automatically created by Carbide, a free online programming environment.
export const fairDie = { 1: 1/6, 2: 1/6, 3: 1/6, 4: 1/6, 5: 1/6, 6: 1/6, }; | |
export const loadedDie = { 1: 0.1, 2: 0.1, 3: 0.1, 4: 0.1, 5: 0.1, 6: 0.5, }; | |
export const switchProb = { 0: 0.95, 1: 0.05, }; | |
export function sample (pdf) { | |
const rand = Math.random(); | |
const outcomes = Object.keys(pdf); | |
let acc = 0; | |
for (const outcome of outcomes) { | |
const pCurrent = pdf[outcome]; | |
acc += pCurrent; | |
if (rand < acc) { | |
return outcome; | |
} | |
} | |
// Return the last outcome | |
return outcomes[outcomes.length - 1]; | |
} | |
export function sampleMany (pdf, numIterations) { | |
const results = {}; | |
for (let i = 0; i < numIterations; i += 1) { | |
const currentOutcome = sample(pdf); | |
if (currentOutcome in results) { | |
results[currentOutcome] += 1; | |
} else { | |
results[currentOutcome] = 1; | |
} | |
} | |
return results; | |
} |
///**Decoding**\\- **Given:** _**x**_, a sequence of rolls//- **Find:** _**pi**_, the portion generated with a fair die, and the portion generated with an unfair die |
///**Evaluation**\\- **Given: **_**x**_, a sequence of rolls//- **Find: P(**_**x**_**)**, likelihood of the sequence under current model |
import { HMM } from './hmm'; | |
export function generateSequence (hmm, N) { ///We can **generate a sequence of **_**N**_** observations** from an HMM using forward sampling. | |
const { alphabet, states, a, e } = hmm; | |
const observations = []; | |
let currentObservation; | |
// Draw starting state q[0] from distribution a[start] | |
let currentState = sample(a['start']); | |
for (let i = 0; i < N; i += 1) { | |
// Emit observation x[i] from emission probability e[currentState] | |
currentObservation = sample(e[currentState]); | |
observations.push(currentObservation); | |
// Transition to next state from currentState | |
currentState = sample(a[currentState]); | |
} | |
return observations; | |
} | |
generateSequence(HMM, 20); |
import { fairDie, loadedDie, sample } from './constants'; | |
/// A **Hidden Markov Model** consists of the following parts: | |
/// **Alphabet of observations** b1 ... bM | |
const alphabet = [ 1, 2, 3, 4, 5, 6, ]; | |
/// **Set of states** q1 ... qK | |
const states = ['fair', 'loaded']; | |
/// **Transition probabilities** between any two states | |
const a = { | |
fair: { fair: 0.95, loaded: 0.05, }, | |
loaded: { fair: 0.05, loaded: 0.95, }, | |
start: { fair: 0.5, loaded: 0.5, }, | |
}; | |
/// **Emission probabilities** of each state, i.e. the probability of emitting some observation given you're in some state | |
const e = { | |
fair: fairDie, | |
loaded: loadedDie, | |
}; | |
export const HMM = { alphabet, states, a, e }; |
///**Learning**\\- **Given: **_**x**_, a sequence of rolls, and a HMM _without_ any parameters known//- **Find:** parameters, aka how "loaded" is the loaded die and how "fair" is the fair die and how often we switch dice |
import { HMM } from './hmm'; | |
import math from 'mathjs'; | |
function parseLikelihood (hmm, parse, x) { ///We can compute the **probability of a parse given a sequence of observations**, P(_**x, pi**_), using the Bayes Net factorization of the HMM. | |
const { a, e } = hmm; | |
const terms = []; | |
for (let i = 0; i < parse.length; i += 1) { | |
const [ currentState, currentObservation ] = [ parse[i], x[i] ]; | |
// Transition probability to current state | |
const prevState = i > 0 ? parse[i - 1] : 'start'; | |
const pCurrentStateGivenPrevState = math.bignumber(a[prevState][currentState]); | |
terms.push(pCurrentStateGivenPrevState); | |
// Probability of emitting the i-th observation, given state | |
const pObservationGivenState = math.bignumber(e[currentState][currentObservation]); | |
terms.push(pObservationGivenState); | |
} | |
// return terms.map(x => x.toString()); | |
// Multiply all the terms | |
return terms.reduce((prev, curr) => math.multiply(prev, curr), math.bignumber(1)).toString(); | |
} | |
const ex1 = [ | |
['fair', 'fair', 'fair', 'fair', 'fair', 'fair', 'fair', 'fair', 'fair', 'fair'], | |
[1, 2, 1, 5, 6, 2, 1, 5, 2, 4], | |
]; | |
parseLikelihood(HMM, ...ex1); | |
const ex2 = [ | |
['loaded', 'loaded', 'loaded', 'loaded', 'loaded', 'loaded', 'loaded', 'loaded', 'loaded', 'loaded'], | |
[1, 2, 1, 5, 6, 2, 1, 5, 2, 4], | |
]; | |
parseLikelihood(HMM, ...ex2); | |
const ex3 = [ | |
['loaded', 'loaded', 'loaded', 'loaded', 'loaded', 'loaded', 'loaded', 'loaded', 'loaded', 'loaded'], | |
[1, 6, 6, 5, 6, 2, 6, 6, 3, 6], | |
]; | |
parseLikelihood(HMM, ...ex3); | |
const ex4 = [ | |
['fair', 'fair', 'fair', 'fair', 'fair', 'fair', 'fair', 'fair', 'fair', 'fair'], | |
[1, 6, 6, 5, 6, 2, 6, 6, 3, 6], | |
]; | |
parseLikelihood(HMM, ...ex4); |