These queries were used at the video tutorial at https://www.youtube.com/watch?v=bYlcnkgH0zA&ab_channel=QuestDB.
All the queries can be ran at https://demo.questdb.io
These queries were used at the video tutorial at https://www.youtube.com/watch?v=bYlcnkgH0zA&ab_channel=QuestDB.
All the queries can be ran at https://demo.questdb.io
Just create a subfolder for your test with a subfolder named volume and another subfolder inside volume named plugins.
Unzip the plugin zip directly into the plugins folder. A subfolder with the plugin name should appear
Now start grafana as in
docker run --rm -p 3000:3000 --name=grafana-plugin --user "$(id -u)" --volume "$PWD/volume:/var/lib/grafana" grafana/grafana-oss| CREATE TABLE CameraDetails ( | |
| Manufacturer SYMBOL, | |
| Model SYMBOL, | |
| Orientation SYMBOL, | |
| Software STRING, | |
| DateAndTime timestamp, | |
| YCbCrPositioning SYMBOL, | |
| Compression SYMBOL, | |
| XResolution LONG, | |
| YResolution LONG, |
| #!/bin/bash | |
| # This script needs both curl and jq installed. | |
| # It will go over all the tables with daily partitioning and will remove all partitions older than 5000 days (change that after you are sure it works as you expect) | |
| # It uses jq to parse the JSON output from the REST API, extracting the "dataset" element and flatten all the rows. | |
| # Then it reads line by line and calls the QuestDB API with each ALTER TABLE statement. | |
| # We get all the tables with daily partitioning and compose the ALTER TABLE statements |
| # configuring ODBC https://solutions.posit.co/connections/db/best-practices/drivers/ | |
| # using ODBC from python https://github.com/mkleehammer/pyodbc/wiki/Getting-started | |
| import pyodbc | |
| con = pyodbc.connect( | |
| driver = 'PostgreSQL Driver', | |
| database = 'qdb', | |
| server = 'localhost', | |
| port = 8812, | |
| uid = 'admin', |
| $milliseconds = bcmul(microtime(true), 1000000000) ; | |
| echo strval($milliseconds); |
| import requests | |
| import csv | |
| def download_query(query): | |
| with open("output.csv", "w") as outfile: | |
| writer = csv.writer(outfile) | |
| last_line = None | |
| page = 0 | |
| while last_line != 0: | |
| row_from = page * 1000000 |
| CREATE TABLE sensors (ID LONG, make STRING, city STRING); | |
| INSERT INTO sensors | |
| SELECT | |
| x ID, --increasing integer | |
| rnd_str('Eberle', 'Honeywell', 'Omron', 'United Automation', 'RS Pro') make, | |
| rnd_str('New York', 'Miami', 'Boston', 'Chicago', 'San Francisco') city | |
| FROM long_sequence(10000) x | |
| ; |