start new:
tmux
start new with session name:
tmux new -s myname
| //https://stackoverflow.com/questions/33882894/spark-sql-apply-aggregate-functions-to-a-list-of-columns | |
| val Claim1 = StructType(Seq(StructField("pid", StringType, true),StructField("diag1", StringType, true),StructField("diag2", StringType, true), StructField("allowed", IntegerType, true), StructField("allowed1", IntegerType, true))) | |
| val claimsData1 = Seq(("PID1", "diag1", "diag2", 100, 200), ("PID1", "diag2", "diag3", 300, 600), ("PID1", "diag1", "diag5", 340, 680), ("PID2", "diag3", "diag4", 245, 490), ("PID2", "diag2", "diag1", 124, 248)) | |
| val claimRDD1 = sc.parallelize(claimsData1) | |
| val claimRDDRow1 = claimRDD1.map(p => Row(p._1, p._2, p._3, p._4, p._5)) | |
| val claimRDD2DF1 = sqlContext.createDataFrame(claimRDDRow1, Claim1) | |
| val l = List("allowed", "allowed1") | |
| val exprs = l.map((_ -> "sum")).toMap |
| import javax.crypto.Cipher; | |
| import javax.crypto.spec.IvParameterSpec; | |
| import javax.crypto.spec.SecretKeySpec; | |
| import java.security.SecureRandom; | |
| import java.util.Base64; | |
| public class AESUtil { | |
| private static final String ALGORITHM = "AES"; | |
| private static final String CIPHER_ALGORITHM = "AES/CFB/PKCS5Padding"; | |
| private static final int KEY_SIZE = 128; |