Last active
April 29, 2022 22:46
-
-
Save MarkPryceMaherMSFT/943159cb578c532b3ccccbb4b012502a to your computer and use it in GitHub Desktop.
proc to export all the tables to storage. You will need to manually create the credential, file format and data source. There is a wild card for the schema and tables to export. There is a TOP variable you will need to change. Its currently set to 1, just in case someone executes this proc.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
/* | |
--https://docs.microsoft.com/en-us/sql/t-sql/statements/create-database-scoped-credential-transact-sql?view=sql-server-ver15 | |
CREATE DATABASE SCOPED CREDENTIAL AppCred WITH IDENTITY = 'user', SECRET = '<EnterStrongPasswordHere>'; | |
--https://docs.microsoft.com/en-us/azure/synapse-analytics/sql/develop-tables-external-tables?tabs=hadoop#create-external-data-source | |
CREATE EXTERNAL FILE FORMAT [parquet_snappy] WITH (FORMAT_TYPE = PARQUET, DATA_COMPRESSION = N'org.apache.hadoop.io.compress.SnappyCodec') | |
CREATE EXTERNAL DATA SOURCE [ds_xxxxxxx] WITH (TYPE = HADOOP, LOCATION = N'wasbs://[email protected]/', CREDENTIAL = [AppCred]) | |
*/ | |
-- export_data 'dbo','%','parquet_snappy','ds_xxxxxxx' | |
Create proc export_data | |
@input_schema_name varchar(50), @input_table_name varchar(500) ,@FILE_FORMAT varchar(2000) , @DATA_SOURCE varchar(2000) | |
as | |
begin | |
declare @del_table int =1; -- Set to 1 to generate a list of tables to export, set to 0 - to process an existing list | |
declare @clean_ext_table int =1; -- set to 1 to delete the external tables | |
DECLARE @sSQL varchar(8000); | |
--------------------------------------------------------------------------- | |
--- JUST FOR TESTING.. just incase someone just executes everything | |
declare @top varchar(50)= 'top 1'; | |
------------------------------------------------------------------------- | |
-- start and end time for performance | |
declare @start_datetime datetime2; | |
declare @end_datetime datetime2; | |
-- generating a random number | |
DECLARE @myid uniqueidentifier = newid() | |
declare @myid_s varchar(50); | |
set @myid_s = convert(varchar(50),@myid) | |
declare @location varchar(500); | |
-- generating date and time | |
declare @dt datetime2; | |
declare @sdt varchar(50) | |
set @dt = getdate(); | |
print @dt | |
set @sdt = convert(varchar(50), datepart(yyyy, @dt) ) | |
+ '-' + convert(varchar(50), datepart(mm, @dt) ) | |
+ '-' + convert(varchar(50), datepart(dd, @dt) ) | |
+ '--' + convert(varchar(50), datepart(hh, @dt) ) | |
+ '-' + convert(varchar(50), datepart(mi, @dt) ) | |
+ '-' + convert(varchar(50), datepart(ss, @dt) ) | |
if @del_table =1 | |
begin | |
if exists ( | |
select 1 from sys.schemas s inner join sys.tables t | |
on s.schema_id = t.schema_id | |
where s.name = 'dbo' and t.name = 'tmp_tables' ) | |
BEGIN | |
PRINT 'DELETE TABLE' | |
set @sSQL = 'DROP TABLE [dbo].[tmp_tables];' | |
EXEC (@sSQL); | |
END | |
ELSE | |
BEGIN | |
PRINT 'TABLE DOES NOT EXISTS' | |
END | |
-- create a perm table -we work through this table and log to it. | |
create table dbo.tmp_tables | |
( | |
seq_id bigint, | |
[schema_name] varchar(255), | |
[table_name] varchar(255), | |
[state] varchar(255), | |
[start_datetime] datetime2, | |
[end_datetime] datetime2, | |
[ext_table_name] varchar(255), | |
[label] varchar(500), | |
[export_cmd] varchar(max), | |
[data_source] varchar(500), | |
[FILE_FORMAT] varchar(500), | |
[Location] varchar(500) | |
) | |
with | |
( distribution=round_robin, heap) | |
-- generating a list of tables to work on | |
insert into dbo.tmp_tables | |
select ROW_NUMBER() OVER(ORDER BY (SELECT NULL)) AS [seq_nmbr], s.name as [schema_name], | |
t.name as [table_name],'Ready for Export',null,null,null,null,null,null,null,null from sys.tables t | |
inner join sys.schemas s on s.schema_id = t.schema_id | |
where t.is_external = 0 | |
and t.name like @input_table_name and s.name = @input_schema_name | |
end | |
declare @sql2 varchar(8000); | |
DECLARE | |
@i INT = 1 | |
, @t INT = (SELECT COUNT(*) FROM dbo.tmp_tables) | |
,@schema_name varchar(255) | |
,@table_name varchar(255) | |
,@state varchar(255) | |
WHILE @i <= @t | |
BEGIN | |
select @schema_name = [schema_name] , @table_name=[table_name],@state =[state] from dbo.tmp_tables | |
where @i = seq_id | |
declare @ext_table_name varchar(200) = 'ext_' + @table_name + '_' + convert(varchar(50),@myid) | |
set @start_datetime = getdate(); | |
if @state = 'Ready for Export' | |
begin | |
begin try | |
print 'Exporting' | |
--set @location ='/exports/' + @sdt + '/' + @table_name + '/' | |
set @location ='/exports/tables/' + @table_name + '/' | |
set @sql2 = ' | |
CREATE EXTERNAL TABLE dbo.[' + @ext_table_name + '] | |
WITH (DATA_SOURCE = [' + @DATA_SOURCE + '], | |
LOCATION = N''' + @location + ''', | |
FILE_FORMAT = [' + @FILE_FORMAT + '], | |
REJECT_TYPE = VALUE, | |
REJECT_VALUE = 99999) | |
as select ' + @top + ' * from [' + @schema_name + '].[' + @table_name + '] option (label=''' + @myid_s + ''')' | |
print @sql2 | |
update dbo.tmp_tables set state = 'Exporting', | |
start_datetime = @start_datetime, | |
ext_table_name = @ext_table_name , | |
[label] = @myid_s, | |
[export_cmd] = @sql2 , | |
[data_source] = @DATA_SOURCE, | |
[FILE_FORMAT] = @FILE_FORMAT, | |
[Location] = @location | |
where @i = seq_id | |
exec (@sql2) | |
set @state = 'Exported'; | |
if @clean_ext_table = 1 | |
begin | |
set @state = 'Deleting external table'; | |
declare @ssql3 varchar(8000); | |
set @ssql3 = 'DROP EXTERNAL TABLE dbo.[' + @ext_table_name + ']' | |
exec(@ssql3) | |
set @state = 'Done'; | |
end | |
end try | |
begin catch | |
set @state = 'Error:' + ERROR_MESSAGE(); | |
end catch | |
end | |
set @end_datetime = getdate(); | |
update dbo.tmp_tables set state = @state, end_datetime = @end_datetime where @i = seq_id | |
SET @i+=1; | |
END | |
select * from dbo.tmp_tables ; | |
end | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment