I hereby claim:
- I am jacobjohansen on github.
- I am johansenj (https://keybase.io/johansenj) on keybase.
- I have a public key ASB_xkm6FdE1GuRJPpoy1mtgdS6m--JuLiySNeCs3hIQnwo
To claim this, I am signing this object:
| <script type="text/javascript"> | |
| function idleTimer() { | |
| var t; | |
| //window.onload = resetTimer; | |
| window.onmousemove = resetTimer; // catches mouse movements | |
| window.onmousedown = resetTimer; // catches mouse movements | |
| window.onclick = resetTimer; // catches mouse clicks | |
| window.onscroll = resetTimer; // catches scrolling | |
| window.onkeypress = resetTimer; //catches keyboard actions |
| package main | |
| import ( | |
| "bytes" | |
| "crypto/rand" | |
| "crypto/rsa" | |
| "crypto/tls" | |
| "crypto/x509" | |
| "crypto/x509/pkix" | |
| "encoding/pem" |
| [ | |
| { | |
| "Classification":"spark-hive-site", | |
| "Properties":{ | |
| "hive.metastore.client.factory.class":"com.amazonaws.glue.catalog.metastore.AWSGlueDataCatalogHiveClientFactory", | |
| "hive.metastore.schema.verification": "false" | |
| } | |
| }, | |
| { | |
| "Classification":"hive-site", |
| %spark.pyspark | |
| def fieldPair(field): return "{} {}".format(field.name, field.dataType.typeName()) | |
| sc.addPyFile("/home/hadoop/extrajars/delta-core_2.12-0.6.1.jar") | |
| from delta.tables import * | |
| delta_dim_path = "s3://delta_dim_path" | |
| df_dim.write.format("delta").save(delta_dim_path) |
| BEGIN MESSAGE. | |
| 2EuI4oSc4adzTjk 9O7QMfBoliMJQXt LtYzzKr6XOFUkyE LdEgnTuj1CTTZSo | |
| 9fmanIW63ZCQHY5 BBo9e6gUHQATCKq 6Xr2MZHgg6S0Th0 c8rfMkbxyXeJ07W | |
| YvSSev3VoD8CNL8 II05Z9CfiIEWXq4 KkoNtOdslFUEi18 YZnxUlpRYDzYdbe | |
| DxqHZIu9N4EIqBI AMz2I8vYo2BYkge vaH7y8sSTVh227E D. | |
| END MESSAGE. |
I hereby claim:
To claim this, I am signing this object:
| . {(cmd /c "dir /o:n /b /s")} | out-string -stream | Select-String -Pattern ".*\..*" | out-string -stream | %{ $_.Split('\')[-1] } > cleanedFilename.csv |
-XX:+UseContainerSupport -XX:MaxRAMPercentage=90 -XX:MinRAMPercentage=90
| private fun <K, V> Map<K, V>.mergeReduce(other: Map<K, V>, reduce: (V, V) -> V = { a, b -> b }): Map<K, V> { | |
| val result = LinkedHashMap<K, V>(this.size() + other.size()) | |
| result.putAll(this) | |
| other.forEach { e -> | |
| val existing = result[e.key] | |
| if (existing == null) { | |
| result[e.key] = e.value | |
| } | |
| else { |
| -----BEGIN PGP PUBLIC KEY BLOCK----- | |
| mDMEX9FnZBYJKwYBBAHaRw8BAQdAtPW3v52nqKbm4BCWGS4GKvQAui/uzqkU3QHB | |
| a2+ndF60KUphY29iIE0gSm9oYW5zZW4gPGpvaGFuc2VuanV3cEBnbWFpbC5jb20+ | |
| iJAEExYKADgWIQQSHFwWzzJTY96gvZHOJHCadP8uLQUCX9FnZAIbAwULCQgHAwUV | |
| CgkICwUWAgMBAAIeAQIXgAAKCRDOJHCadP8uLWdPAQDTXFF6bATOJt+4EDFdFgqs | |
| JQjmFqJrtf0Kb7wpx1ZkUgEAz9zihvuy+yI0//VMjF2D9BgAy48X8LgEmgQXOZgT | |
| +g+4MwRf0WdkFgkrBgEEAdpHDwEBB0Dz2HctW9qw/LkZZxg/jNUIEj5jiPUTC7kv | |
| EQqN2yuE+Yh4BBgWCgAgFiEEEhxcFs8yU2PeoL2RziRwmnT/Li0FAl/RZ2QCGyAA | |
| CgkQziRwmnT/Li3U0QD/dQM1nBVrcj1M91pir41bs2BeGKrOVx9SOr8NEU88S24A |