This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import re | |
from enum import Enum | |
import streamlit as st | |
import os | |
import json | |
import asyncio | |
import concurrent.futures | |
from typing import Optional, List, Dict, Any | |
from datetime import datetime, date |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import streamlit as st | |
import base64 | |
import os | |
import json | |
from openai import AzureOpenAI | |
from dotenv import load_dotenv | |
from datetime import date, datetime | |
from typing import Optional | |
from pydantic import BaseModel, Field | |
import tempfile |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
// Databricks notebook source | |
import org.graphframes.GraphFrame | |
import org.apache.spark.sql.functions._ | |
import org.apache.spark.sql.types._ | |
import org.apache.spark.sql.{Row, SparkSession} | |
import org.apache.spark.sql.functions.struct | |
sc.setCheckpointDir("/dbfs/cp") | |
// COMMAND ---------- |
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
// Fix SQL queries being returned as literals | |
import org.apache.spark.sql.jdbc.{JdbcDialect, JdbcDialects} | |
JdbcDialects.registerDialect(new JdbcDialect() { | |
override def canHandle(url: String): Boolean = url.toLowerCase.startsWith("jdbc:databricks:") | |
override | |
def quoteIdentifier(column: String): String = column | |
}) |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import time | |
from queue import Queue | |
from threading import Thread | |
table_list = [ | |
"table1", | |
"table2", | |
"table3", | |
"table4", | |
"table5", |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import pandas as pd | |
import os | |
folder_path = "/os/folder/path" | |
summaryDF = pd.DataFrame() | |
data = pd.concat( | |
[ | |
pd.read_excel(os.path.join(folder_path, f), sheet_name="Data") | |
for f in os.listdir(folder_path) | |
], |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
az account list-locations --query "[?not_null(metadata.latitude)] .{RegionName:name, PairedRegion:metadata.pairedRegion[0].name}" --output json |
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
NewerOlder