Created
November 12, 2025 02:30
-
-
Save linuxbender/34c55cbee297f8ed9b24c8da788e7d83 to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| import json | |
| import requests | |
| import sys | |
| SUPERSET_URL = "http://localhost:8088" | |
| USERNAME = "admin" | |
| PASSWORD = "admin" | |
| session = requests.Session() | |
| def get_access_token(): | |
| url = f"{SUPERSET_URL}/api/v1/security/login" | |
| payload = { | |
| "username": USERNAME, | |
| "password": PASSWORD, | |
| "provider": "db", | |
| "refresh": True | |
| } | |
| try: | |
| resp = session.post(url, json=payload, timeout=10) | |
| resp.raise_for_status() | |
| access_token = resp.json()["access_token"] | |
| return access_token | |
| except requests.ConnectionError: | |
| print("❌ Fehler: Kann keine Verbindung zu Superset herstellen. Ist Superset auf http://localhost:8088 gestartet?") | |
| sys.exit(1) | |
| except requests.Timeout: | |
| print("❌ Fehler: Timeout beim Verbindungsaufbau zu Superset") | |
| sys.exit(1) | |
| except requests.HTTPError as e: | |
| print(f"❌ HTTP Fehler beim Login: {e}") | |
| print(f"Response: {resp.text}") | |
| sys.exit(1) | |
| except KeyError: | |
| print("❌ Fehler: Unerwartete Antwort vom Server") | |
| print(f"Response: {resp.text}") | |
| sys.exit(1) | |
| def get_csrf_token(token): | |
| """Holt den CSRF-Token, der für SQL-Ausführung benötigt wird""" | |
| url = f"{SUPERSET_URL}/api/v1/security/csrf_token/" | |
| headers = { | |
| "Authorization": f"Bearer {token}", | |
| } | |
| resp = session.get(url, headers=headers) | |
| resp.raise_for_status() | |
| return resp.json()["result"] | |
| def get_databases(token): | |
| """Listet verfügbare Datenbanken auf""" | |
| url = f"{SUPERSET_URL}/api/v1/database/" | |
| headers = { | |
| "Authorization": f"Bearer {token}", | |
| } | |
| resp = session.get(url, headers=headers) | |
| resp.raise_for_status() | |
| return resp.json()["result"] | |
| def get_datasets(token): | |
| """Listet verfügbare Datasets auf""" | |
| url = f"{SUPERSET_URL}/api/v1/dataset/" | |
| headers = { | |
| "Authorization": f"Bearer {token}", | |
| } | |
| resp = session.get(url, headers=headers) | |
| resp.raise_for_status() | |
| return resp.json()["result"] | |
| def execute_sql(token, csrf_token, sql: str, database_id: int): | |
| url = f"{SUPERSET_URL}/api/v1/sqllab/execute/" | |
| headers = { | |
| "Authorization": f"Bearer {token}", | |
| "Content-Type": "application/json", | |
| "X-CSRFToken": csrf_token, | |
| } | |
| payload = { | |
| "database_id": database_id, | |
| "sql": sql, | |
| "schema": "public", | |
| } | |
| resp = session.post(url, json=payload, headers=headers) | |
| try: | |
| resp.raise_for_status() | |
| except requests.HTTPError: | |
| print(f"HTTP Error: {resp.status_code}") | |
| print(f"Request URL: {resp.url}") | |
| print(f"Request Headers: {headers}") | |
| print(f"Request Payload: {payload}") | |
| print(f"Response: {resp.text}") | |
| raise | |
| data = resp.json() | |
| return data | |
| def create_chart(token, csrf_token, dataset_id: int, chart_type: str, chart_name: str): | |
| url = f"{SUPERSET_URL}/api/v1/chart/" | |
| headers = { | |
| "Authorization": f"Bearer {token}", | |
| "Content-Type": "application/json", | |
| "X-CSRFToken": csrf_token, | |
| } | |
| if chart_type == "table": | |
| params = { | |
| "query_context": { | |
| "datasource": { | |
| "id": dataset_id, | |
| "type": "table" | |
| }, | |
| "queries": [ | |
| { | |
| "columns": ["name", "platform", "year", "genre", "publisher", "eu_sales"], | |
| "metrics": [], | |
| "orderby": [["eu_sales", False]], | |
| "row_limit": 100 | |
| } | |
| ] | |
| }, | |
| "all_columns": ["name", "platform", "year", "genre", "publisher", "eu_sales"], | |
| "order_by_cols": ["[\"eu_sales\", false]"], | |
| "row_limit": 100, | |
| "include_search": True, | |
| "table_timestamp_format": "%Y-%m-%d %H:%M:%S", | |
| "page_length": 0 | |
| } | |
| elif chart_type == "line": | |
| params = { | |
| "query_context": { | |
| "datasource": { | |
| "id": dataset_id, | |
| "type": "table" | |
| }, | |
| "queries": [ | |
| { | |
| "columns": ["year"], | |
| "metrics": [ | |
| { | |
| "expressionType": "SIMPLE", | |
| "column": {"column_name": "eu_sales", "type": "DOUBLE"}, | |
| "aggregate": "AVG", | |
| "label": "AVG(eu_sales)" | |
| } | |
| ], | |
| "orderby": [["year", True]], | |
| "row_limit": 10000 | |
| } | |
| ] | |
| }, | |
| "x_axis": "year", | |
| "metrics": ["AVG(eu_sales)"], | |
| "groupby": ["year"], | |
| "row_limit": 10000 | |
| } | |
| else: | |
| params = { | |
| "query_context": { | |
| "datasource": { | |
| "id": dataset_id, | |
| "type": "table" | |
| }, | |
| "queries": [ | |
| { | |
| "columns": ["name", "eu_sales"], | |
| "metrics": [], | |
| "row_limit": 100 | |
| } | |
| ] | |
| } | |
| } | |
| payload = { | |
| "slice_name": chart_name, | |
| "viz_type": chart_type, | |
| "datasource_id": dataset_id, | |
| "datasource_type": "table", | |
| "params": json.dumps(params) | |
| } | |
| resp = session.post(url, json=payload, headers=headers) | |
| try: | |
| resp.raise_for_status() | |
| except requests.HTTPError: | |
| print(f"Chart HTTP Error: {resp.status_code}") | |
| print(f"Response: {resp.text}") | |
| raise | |
| result = resp.json() | |
| chart_id = result["id"] | |
| chart_url = f"{SUPERSET_URL}/superset/explore/?datasource_type=table&datasource_id={dataset_id}&slice_id={chart_id}" | |
| return chart_id, chart_url | |
| def create_chart_with_query_context(token, csrf_token, sql: str, database_id: int, chart_name: str): | |
| """Alternative Chart-Erstellung mit SQL Query Context""" | |
| url = f"{SUPERSET_URL}/api/v1/chart/" | |
| headers = { | |
| "Authorization": f"Bearer {token}", | |
| "Content-Type": "application/json", | |
| "X-CSRFToken": csrf_token, | |
| } | |
| payload = { | |
| "slice_name": chart_name, | |
| "viz_type": "table", | |
| "datasource_id": database_id, | |
| "datasource_type": "query", | |
| "params": json.dumps({ | |
| "sql": sql, | |
| "database_id": database_id | |
| }) | |
| } | |
| resp = session.post(url, json=payload, headers=headers) | |
| try: | |
| resp.raise_for_status() | |
| except requests.HTTPError: | |
| print(f"Chart HTTP Error: {resp.status_code}") | |
| print(f"Response: {resp.text}") | |
| raise | |
| result = resp.json() | |
| chart_id = result["id"] | |
| chart_url = f"{SUPERSET_URL}/superset/explore/?datasource_type=query&datasource_id={database_id}&slice_id={chart_id}" | |
| return chart_id, chart_url | |
| if __name__ == "__main__": | |
| print("🚀 Starte Superset API Test...") | |
| print(f"📡 Verbinde zu: {SUPERSET_URL}") | |
| sql = "SELECT * FROM public.video_game_sales WHERE eu_sales > 9.00 AND eu_sales < 18.00 ORDER BY eu_sales DESC;" | |
| print("🔐 Hole Access Token...") | |
| token = get_access_token() | |
| print("✅ Access Token erhalten") | |
| csrf_token = get_csrf_token(token) | |
| databases = get_databases(token) | |
| print("Verfügbare Datenbanken:") | |
| for db in databases: | |
| print(f"ID: {db['id']}, Name: {db['database_name']}") | |
| datasets = get_datasets(token) | |
| print("\nVerfügbare Datasets:") | |
| for ds in datasets: | |
| print(f"ID: {ds['id']}, Name: {ds['table_name']}, Database: {ds['database']['database_name']}") | |
| if databases: | |
| database_id = databases[0]['id'] | |
| print(f"\nVerwende Datenbank ID: {database_id}") | |
| result = execute_sql(token, csrf_token, sql, database_id=database_id) | |
| print("✅ SQL executed") | |
| print(result) | |
| print(result.get("data")) | |
| else: | |
| print("Keine Datenbanken verfügbar!") | |
| if 'result' in locals() and databases and datasets: | |
| dataset_id = datasets[0]['id'] | |
| chart_id, url = create_chart( | |
| token, | |
| csrf_token, | |
| dataset_id=dataset_id, | |
| chart_type="table", | |
| chart_name="Video Game Sales EU" | |
| ) | |
| print("✅ Chart created:", url) |
Author
linuxbender
commented
Nov 12, 2025
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment