This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
--you should substitute 'dbname' with the database name you want to drop | |
use master | |
go | |
exec sp_helpreplicationdb | |
exec sp_removedbreplication 'dbname' | |
ALTER DATABASE dbname SET SINGLE_USER WITH ROLLBACK IMMEDIATE | |
ALTER DATABASE dbname SET MULTI_USER |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
--********** Execute at the Distributor in the master database **********-- | |
USE master; | |
go | |
--Is the current server a Distributor? | |
--Is the distribution database installed? | |
--Are there other Publishers using this Distributor? | |
EXEC sp_get_distributor | |
--Is the current server a Distributor? |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
// scalastyle:off println | |
import duplicate.SimHash | |
import org.apache.commons.lang3.StringUtils | |
import org.apache.log4j.{Level, Logger} | |
import org.apache.spark.{SparkConf, SparkContext} | |
import scopt.OptionParser | |
import scala.collection.mutable.ArrayBuffer | |
object GroupedDuplicate { |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
// scalastyle:off println | |
import duplicate.SimHash | |
import org.apache.commons.lang3.StringUtils | |
import org.apache.log4j.{Level, Logger} | |
import org.apache.spark.rdd.RDD | |
import org.apache.spark.storage.StorageLevel | |
import org.apache.spark.{SparkConf, SparkContext} | |
import scopt.OptionParser | |
import scala.collection.mutable.ArrayBuffer |