Forked from karlrwjohnson/In-SQL Rules Engine Demo.sql
Created
April 4, 2019 19:29
-
-
Save megabites2013/56f97cbe126233c779647a205b61d8b3 to your computer and use it in GitHub Desktop.
In-SQL Rules Engine Demo
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
--------------------------------------------------------- | |
-- Boilerplate mocking out what we already have today. -- | |
-- Just scroll down to the bottom. -- | |
--------------------------------------------------------- | |
-- Cleanup | |
drop table if exists | |
import_log, | |
my_data_source, | |
meta_record, | |
my_campaign; | |
drop function if exists | |
insert_meta_record (text) | |
; | |
-- Our "data source" table | |
create table my_data_source ( | |
cust_id bigint, | |
name text, | |
dob date, | |
market text, | |
nps int | |
); | |
-- Some test data | |
insert into my_data_source (cust_id, name, dob, market, nps) values | |
(123, 'Adam', '11/22/93'::date, 'Minnesota', 8), | |
(456, 'Benny', '4/5/86'::date, 'Wisconsin', 3), | |
(789, 'Chris', '7/18/89'::date, 'Minnesota', 6), | |
(1011, 'Dave', '3/23/91'::date, 'Minnesota', 1), | |
(1213, 'Eric', '1/1/81'::date, 'Illinois', 7) | |
; | |
create table meta_record ( | |
id bigserial primary key, | |
campaign_name text, | |
created_dt timestamp, | |
modified_dt timestamp, | |
assignee text, | |
priority int default 0 | |
); | |
-- Simplified version of existing stored procedure to create meta-records for each campaign record | |
create function insert_meta_record ( | |
campaign_name_ text | |
) | |
returns bigint | |
language plpgsql | |
as $$ | |
declare | |
now_ timestamp = now(); | |
id_ bigint; | |
begin | |
insert into meta_record (campaign_name, created_dt, modified_dt) | |
values (campaign_name_, now_, now_) | |
returning id into id_; | |
return id_; | |
end; | |
$$; | |
create table my_campaign ( | |
meta_record_id bigint references meta_record(id), | |
cust_id bigint, | |
name text, | |
dob date, | |
nps int | |
); | |
-------------------------------------- | |
-- Now for the interesting stuff! -- | |
-------------------------------------- | |
-- New table to store | |
create table import_log ( | |
campaign_name text, | |
process_time timestamp default now(), | |
actions jsonb, | |
row jsonb | |
); | |
do $$ -- This is a raw PL/pgSQL block = "Procedural Language: Postgres SQL". | |
declare -- DECLARE blocks both declare variables and define the scope where they exist, like Python's "with" statement | |
r my_data_source; | |
inserted_id bigint; | |
actions jsonb; | |
skip_record boolean; | |
begin | |
-- Loop over every record in the data source. | |
-- We don't use a single INSERT/SELECT statement because we need to log things as a side-effect. | |
for r in select * from my_data_source | |
loop | |
skip_record = false; | |
actions = '[]'::jsonb; -- Log of all the rules we process | |
-- Rules which cause us to skip a record | |
if r.market != 'Minnesota' then | |
skip_record = true; | |
actions = jsonb_insert(actions, '{-1}', '{"skip": true}', true); -- append action to the rules log | |
-- We might also want to record which rule was being processed | |
end if; | |
if not(skip_record) then | |
insert into my_campaign ( | |
meta_record_id, | |
cust_id, name, dob, nps | |
) values ( | |
insert_meta_record('my_campaign'), | |
r.cust_id, r.name, r.dob, r.nps | |
) | |
returning meta_record_id into inserted_id; | |
-- Process additional rules in order | |
if r.nps <= 6 then | |
update meta_record | |
set assignee = 'Jakub' | |
where id = inserted_id; | |
actions = jsonb_insert(actions, '{-1}', jsonb_build_object('assignee', 'Jakub'), true); | |
end if; | |
if r.nps <= 3 then | |
update meta_record | |
set priority = 9 | |
where id = inserted_id; | |
actions = jsonb_insert(actions, '{-1}', jsonb_build_object('priority', 9), true); | |
end if; | |
end if; | |
insert into import_log (campaign_name, process_time, actions, row) | |
values ('my_campaign', now(), actions, row_to_json(r.*)); | |
end loop; | |
end; | |
$$; | |
-- These are the records that got inserted: | |
select * from meta_record full join my_campaign on my_campaign.meta_record_id = meta_record.id; | |
-- id campaign_name created_dt modified_dt assignee priority cid name dob nps | |
-- 1 my_campaign 2017-09-13 15:52:00.123611 2017-09-13 15:52:00.123611 0 123 Adam 1993-11-22 8 | |
-- 2 my_campaign 2017-09-13 15:52:00.123611 2017-09-13 15:52:00.123611 Jakub 0 789 Chris 1989-07-18 6 | |
-- 3 my_campaign 2017-09-13 15:52:00.123611 2017-09-13 15:52:00.123611 Jakub 9 1011 Dave 1991-03-23 1 | |
-- And we also have a log! | |
select * from import_log; | |
-- campgn_name process_time actions row | |
-- my_campaign 2017-09-13 15:52:00.123611 [] {"dob": "1993-11-22", "nps": 8, "name": "Adam", "market": "Minnesota", "cust_id": 123} | |
-- my_campaign 2017-09-13 15:52:00.123611 [{"skip": true}] {"dob": "1986-04-05", "nps": 3, "name": "Benny", "market": "Wisconsin", "cust_id": 456} | |
-- my_campaign 2017-09-13 15:52:00.123611 [{"assignee": "Jakub"}] {"dob": "1989-07-18", "nps": 6, "name": "Chris", "market": "Minnesota", "cust_id": 789} | |
-- my_campaign 2017-09-13 15:52:00.123611 [{"assignee": "Jakub"}, {"priority": 9}] {"dob": "1991-03-23", "nps": 1, "name": "Dave", "market": "Minnesota", "cust_id": 1011} | |
-- my_campaign 2017-09-13 15:52:00.123611 [{"skip": true}] {"dob": "1981-01-01", "nps": 7, "name": "Eric", "market": "Illinois", "cust_id": 1213} | |
-- For some reason every record has the same log time. Not sure why that is... |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment