Skip to content

Instantly share code, notes, and snippets.

View cbaragao's full-sized avatar

Chris Aragao cbaragao

View GitHub Profile
let
EWMA = Function.From(
type function(alpha as number, values as list, index as number) as number,
(params) =>
let
alpha = params{0},
values = params{1},
index = params{2},
// Accumulate EWMA values up to the specified index
resultList = List.Accumulate(
Cpl =
/*
=============EXAMPLE DATASET===================
CallID Duration_Minutes Agent Date
C1001 4.2 Agent_A 2024-01-15
C1002 3.5 Agent_B 2024-01-15
C1003 2.8 Agent_C 2024-01-15
C1004 4.6 Agent_A 2024-01-15
C1005 3.9 Agent_B 2024-01-16
C1006 3.2 Agent_C 2024-01-16
Cpu =
/*
====================EXAMPLE DATASET===========================================
LoanID Processing_Days Application_Date
L1001 18 2024-01-05
L1002 22 2024-01-08
L1003 16 2024-01-10
L1004 25 2024-01-12
L1005 19 2024-01-15
Cpk =
/*
===============EXAMPLE DATASET====================
MeasurementID Thickness_mm Date
1 10.12 2024-01-15
2 10.08 2024-01-15
3 10.25 2024-01-15
4 9.95 2024-01-15
5 10.15 2024-01-16
6 10.22 2024-01-16
3 Day Rolling Average 3 =
VAR __MaxOffset = MAX('Calendar'[CurrDayOffset])-1
VAR __MinOffset = MAX('Calendar'[CurrDayOffset])-3
VAR __PrevMaxOffset = MAX('Calendar'[CurrDayOffset])-2
VAR __PrevMinOffset = MAX('Calendar'[CurrDayOffset])-4
VAR __STDev = CALCULATE(STDEV.P(TimeSeriesData[Value]), ALL(TimeSeriesData))
VAR __Mean = CALCULATE(AVERAGE(TimeSeriesData[Value]), ALL(TimeSeriesData))
VAR __T1 = SUMMARIZE(FILTER(ALL('Calendar'),'Calendar'[CurrDayOffset] >= __MinOffset && 'Calendar'[CurrDayOffset] <= __MaxOffset), "__Value", SUM(TimeSeriesData[Value]))
VAR __T2 = ADDCOLUMNS(__T1,"__Previous_Value", CALCULATE(SUM(TimeSeriesData[Value]), ALL('Calendar'),'Calendar'[CurrDayOffset] >= __PrevMinOffset && 'Calendar'[CurrDayOffset] <= __PrevMaxOffset))
// Make Table 1
T1 = DATATABLE("EntityID",INTEGER,"Feature",STRING,{
{ 1,"A" },{ 1,"B" },{ 1,"C" },{ 2,"B" },{ 2,"C" },{ 2,"D" },{ 3,"A" },{ 3,"E" },{ 3,"F" },{ 4,"C" },{ 4,"D" },{ 4,"F" },{ 5,"A" },{ 5,"B" },{ 5,"F" }
})
// Make Table 2
T2 = DATATABLE("EntityID",INTEGER,"Feature",STRING,{
{ 101,"B" },{ 101,"C" },{ 101,"E" },{ 102,"A" },{ 102,"D" },{ 102,"F" },{ 103,"C" },{ 103,"D" },{ 103,"G" },{ 104,"A" },{ 104,"B" },{ 104,"G" },{ 105,"E" },{ 105,"F" },{ 105,"G" }
})
DEFINE
-- Sample data table for demonstration purposes
-- Replace with your actual table in production use
TABLE SampleData = DATATABLE(
"ProductID", INT64,
"Sales", CURRENCY,
{
{1, 100},
{2, 150},
{3, 200},
Median of Means =
VAR __Partition = AddColumns(SampleData, "__Group", MOD(RANKX(ALL(SampleData),SampleData[ID],,ASC),5))
VAR __Means = SUMMARIZE(__Partition,[__Group], "__GroupMean", AVERAGE(SampleData[Value1]))
VAR __Result = MEDIANX(__Means,[__GroupMean])
RETURN __Result
PERT ESTIMATION =
VAR __Optimistic = 100
VAR __Likely = 70
VAR __Pessimistic = 60
VAR __Result = (__Optimistic+ (4*__Likely)+__Pessimistic)/6
RETURN __Result
@cbaragao
cbaragao / NPEM.dax
Last active September 29, 2025 17:40
RPEM =
/*RPEM(Risk-Adjusted Point Estimate Method)
📌 Purpose:
Estimate a risk-adjusted cost(or value) using the Rosenblueth Point Estimate Method(PEM),accounting for correlation between two uncertain variables and a user-defined risk tolerance.
🛠️ How to Use:
-Set low,moderate,and high values for two variables(A and B).
-Set the correlation between them(-1 to+1).
-Set a risk tolerance factor(e.g.,1.0=1 standard deviation).
-Set __IsCostIncreasing=TRUE()if higher values are worse(e.g.,cost).