Last active
July 5, 2024 10:04
-
-
Save arshpreetsingh/fa1987e97910d43924b934bf5919fc36 to your computer and use it in GitHub Desktop.
complex sql quiries
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
SELECT pg_background.background('run_performance_tests', '(10)'); | |
SELECT pg_sleep(900); -- Sleeps for 900 seconds (15 minutes) | |
SELECT pg_background.background('run_performance_tests', '(10)'); | |
CREATE OR REPLACE FUNCTION run_performance_tests(iterations INT) | |
RETURNS VOID AS $$ | |
DECLARE | |
i INT; | |
BEGIN | |
FOR i IN 1..iterations LOOP -- Run the tests for the specified number of iterations | |
-- Create Tables | |
-- (Table creation statements here, same as you provided) | |
-- Insert Data | |
-- (Data insertion statements here, same as you provided) | |
-- Heavy Operations | |
-- (All your complex queries and operations here) | |
-- Clean Up (Drop Tables) | |
DROP TABLE IF EXISTS simple_table, indexed_table, time_series_data, wide_table; | |
RAISE NOTICE 'Iteration % completed', i; -- Log progress | |
END LOOP; | |
END $$ LANGUAGE plpgsql; | |
-- Simple Table | |
CREATE TABLE simple_table ( | |
id SERIAL PRIMARY KEY, | |
name VARCHAR(255), | |
value INTEGER | |
); | |
-- Table with Indexes | |
CREATE TABLE indexed_table ( | |
id SERIAL PRIMARY KEY, | |
name VARCHAR(255), | |
category VARCHAR(50), | |
value INTEGER, | |
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP | |
); | |
CREATE INDEX idx_indexed_table_name ON indexed_table (name); | |
CREATE INDEX idx_indexed_table_category ON indexed_table (category); | |
-- Time-Series Data | |
CREATE TABLE time_series_data ( | |
timestamp TIMESTAMP NOT NULL, | |
sensor_id INTEGER, | |
value DOUBLE PRECISION, | |
PRIMARY KEY (timestamp, sensor_id) | |
); | |
-- Wide Table with Many Columns (Example with 5 columns) | |
CREATE TABLE wide_table ( | |
id SERIAL PRIMARY KEY, | |
col1 VARCHAR(255), | |
col2 INTEGER, | |
col3 TIMESTAMP, | |
col4 BOOLEAN, | |
col5 JSONB | |
); | |
-- Simple Table | |
DO $$ | |
DECLARE | |
i INT; | |
BEGIN | |
FOR i IN 1..10000 LOOP | |
INSERT INTO simple_table (name, value) | |
VALUES ('Name ' || i, floor(random() * 1000)); | |
END LOOP; | |
END $$; | |
-- Indexed Table | |
DO $$ | |
DECLARE | |
i INT; | |
categories TEXT[] := '{"Electronics", "Clothing", "Home", "Books"}'; | |
BEGIN | |
FOR i IN 1..100000 LOOP | |
INSERT INTO indexed_table (name, category, value, created_at) | |
VALUES ( | |
'Product ' || i, | |
categories[floor(random() * 4 + 1)], | |
floor(random() * 1000), | |
NOW() - random() * interval '1 year' | |
); | |
END LOOP; | |
END $$; | |
-- Wide Table | |
DO $$ | |
DECLARE | |
i INT; | |
BEGIN | |
FOR i IN 1..1000 LOOP | |
INSERT INTO wide_table ( | |
id, col1, col2, col3, col4, col5 | |
) | |
VALUES ( | |
i, | |
md5(random()::text), | |
floor(random() * 100000), | |
NOW() - random() * interval '10 years', | |
random() > 0.5, -- Random boolean | |
jsonb_build_object('key1', random()*100, 'key2', md5(random()::text)) | |
); | |
END LOOP; | |
END $$; | |
-- Complex Joins | |
SELECT a.name, b.name | |
FROM indexed_table a | |
JOIN indexed_table b ON a.category = b.category | |
WHERE a.id <> b.id; | |
-- Subqueries & Window Functions | |
SELECT | |
name, | |
value, | |
AVG(value) OVER (PARTITION BY category) AS avg_value_by_category | |
FROM indexed_table | |
WHERE value > (SELECT AVG(value) FROM indexed_table); | |
SELECT c.id, c.name, c.parent_id | |
FROM indexed_table c | |
INNER JOIN category_tree ct ON c.parent_id = ct.id | |
) | |
SELECT * FROM category_tree; | |
-- Time-Series Aggregations | |
SELECT | |
sensor_id, | |
AVG(value) AS avg_value, | |
MAX(value) AS max_value, | |
MIN(value) AS min_value | |
FROM time_series_data | |
GROUP BY sensor_id; | |
-- Time-Series Moving Average | |
SELECT | |
timestamp, | |
sensor_id, | |
AVG(value) OVER (PARTITION BY sensor_id ORDER BY timestamp ROWS BETWEEN 9 PRECEDING AND CURRENT ROW) AS moving_avg | |
FROM time_series_data; | |
-- JSONB Query (Assuming 'wide_table' has a JSONB column named 'col5') | |
SELECT id, col5->>'key1' AS key1_value | |
FROM wide_table | |
WHERE col5->>'key2' LIKE '%abc%'; | |
-- Bulk Update on Indexed Column | |
UPDATE indexed_table | |
SET value = value * 1.1 -- 10% increase | |
WHERE category = 'Electronics'; | |
-- Bulk Inserts | |
INSERT INTO indexed_table (name, category, value, created_at) | |
SELECT | |
'New Product ' || i, | |
'Electronics', | |
floor(random() * 1000), | |
NOW() - random() * interval '1 year' | |
FROM generate_series(100001, 200000) AS i; -- Insert 100,000 more rows | |
-- Full Table Scan | |
SELECT COUNT(*) FROM wide_table; | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment