This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def create_new_table(tablename, columnmapping,delete_vectors,partitions,columnmappingbyid,liquidclustering,optimize,vacuum,writeStatsAsStruct,writeStatsAsJson): | |
# What the function does goes here | |
# use the default sample table, publicholidays | |
tablename = "PARQUET_2_0" | |
if columnmapping==True and columnmappingbyid==False: | |
tablename=tablename+"_columnmapping_by_name" | |
if delete_vectors==True: | |
tablename=tablename+"_delete_vectors" | |
if partitions==True: |
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# details from Article : https://learn.microsoft.com/en-us/fabric/data-warehouse/collation | |
# default collation is Latin1_General_100_BIN2_UTF8 | |
# new collation is Latin1_General_100_CI_AS_KS_WS_SC_UTF8 | |
#sempy version 0.4.0 or higher | |
!pip install semantic-link --q | |
import json | |
import sempy.fabric as fabric | |
from sempy.fabric.exceptions import FabricHTTPException, WorkspaceNotFoundException |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import pandas as pd | |
import struct | |
import sqlalchemy | |
import pyodbc | |
import notebookutils | |
import sempy.fabric as fabric | |
import time | |
from pyspark.sql import functions as fn | |
from datetime import datetime | |
def create_engine(connection_string : str): |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import pandas as pd | |
import struct | |
import sqlalchemy | |
import pyodbc | |
import notebookutils | |
import sempy.fabric as fabric | |
import time | |
from pyspark.sql import functions as fn | |
from datetime import datetime | |
def create_engine(connection_string : str): |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import pandas as pd | |
import struct | |
import sqlalchemy | |
import pyodbc | |
import notebookutils | |
import sempy.fabric as fabric | |
def create_engine(connection_string : str): | |
token = notebookutils.credentials.getToken('https://analysis.windows.net/powerbi/api').encode("UTF-16-LE") | |
token_struct = struct.pack(f'<I{len(token)}s', len(token), token) | |
SQL_COPT_SS_ACCESS_TOKEN = 1256 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# Purpose: Print out details of partitions, files per partitions, and size per partition in GB. | |
from notebookutils import mssparkutils | |
# Define ABFSS path for your delta table. You can get ABFSS path of a delta table by simply right-clicking on table name and selecting COPY PATH from the list of options. | |
# Remove the path and the lakehouse name. | |
delta_table_path = "abfss://[email protected]/" | |
HowManyLogsIsTooManyLogs = 50 ## 50 feels like a biggest number | |
HowLargeALogCanBeBeforeItsAIssue= 1 ## Value in MB | |
# List all partitions for given delta table |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# Purpose: Print out details of partitions, files per partitions, and size per partition in GB. | |
from notebookutils import mssparkutils | |
# Define ABFSS path for your delta table. You can get ABFSS path of a delta table by simply right-clicking on table name and selecting COPY PATH from the list of options. | |
# Remove the path and the lakehouse name. | |
delta_table_path = "abfss://[email protected]/" | |
HowManyLogsIsTooManyLogs = 50 ## 50 feels like a biggest number | |
HowLargeALogCanBeBeforeItsAIssue= 1 ## Value in MB | |
# List all partitions for given delta table |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# Purpose: Print out details of partitions, files per partitions, and size per partition in GB. | |
from notebookutils import mssparkutils | |
# Define ABFSS path for your delta table. You can get ABFSS path of a delta table by simply right-clicking on table name and selecting COPY PATH from the list of options. | |
delta_table_path = "abfss://ITEM_NAME@ONELAKE_PATH.fabric.microsoft.com/YOURLAKEHOUSE_NAME.Lakehouse/Tables" | |
# List all partitions for given delta table | |
tables_list = mssparkutils.fs.ls(delta_table_path) | |
# Initialize a dictionary to store partition details |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import pandas as pd | |
import struct | |
import sqlalchemy | |
import pyodbc | |
import notebookutils | |
def create_engine(connection_string : str): | |
token = notebookutils.credentials.getToken('https://analysis.windows.net/powerbi/api').encode("UTF-16-LE") | |
token_struct = struct.pack(f'<I{len(token)}s', len(token), token) | |
SQL_COPT_SS_ACCESS_TOKEN = 1256 | |
return sqlalchemy.create_engine("mssql+pyodbc://", creator=lambda: pyodbc.connect(connection_string, attrs_before={SQL_COPT_SS_ACCESS_TOKEN: token_struct})) |