https://github.com/drone/drone-wall https://github.com/drone/drone
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/env node | |
// Replace the | |
// chmod +x pg-test.js | |
// npm install --save pg | |
// ./pg-test.js | |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
package main | |
import ( | |
"encoding/json" | |
"fmt" | |
"github.com/ghodss/yaml" | |
"io/ioutil" | |
"path/filepath" | |
) |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
You can use select with varargs including *: | |
import spark.implicits._ | |
df.select($"*" +: Seq("A", "B", "C").map(c => | |
sum(c).over(Window.partitionBy("ID").orderBy("time")).alias(s"cum$c") | |
): _*) | |
This: | |
Maps columns names to window expressions with Seq("A", ...).map(...) |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
// Restify Server CheatSheet. | |
// More about the API: http://mcavage.me/node-restify/#server-api | |
// Install restify with npm install restify | |
// 1.1. Creating a Server. | |
// http://mcavage.me/node-restify/#Creating-a-Server | |
var restify = require('restify'); |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import java.sql._ | |
dataframe.coalesce("NUMBER OF WORKERS").mapPartitions((d) => Iterator(d)).foreach { batch => | |
val dbc: Connection = DriverManager.getConnection("JDBCURL") | |
val st: PreparedStatement = dbc.prepareStatement("YOUR PREPARED STATEMENT") | |
batch.grouped("# Of Rows you want per batch").foreach { session => | |
session.foreach { x => | |
st.setDouble(1, x.getDouble(1)) | |
st.addBatch() | |
} |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/bin/sh | |
str_in_list() { | |
str="$1" | |
shift | |
list="$@" | |
if test "${list#*$str}" != "$list" | |
then | |
return 0 # $str is in $list | |
else |
http://henningpetersen.com/post/22/running-apache-spark-jobs-from-applications
// NOTE: This is an expensive operation in terms of time (10 seconds+). Use sparingly.
private def runSparkSubmit(args: Seq[String]): Unit = {
val sparkHome = sys.props.getOrElse("spark.test.home", fail("spark.test.home is not set!"))
val process = Utils.executeCommand(
Seq("./bin/spark-submit") ++ args,
apache-spark-stderr-and-stdout
Try this in log4j.properties passed to Spark (or modify default configuration under Spark/conf)
# Log to stdout and stderr
log4j.rootLogger=INFO, stdout, stderr
NewerOlder