This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import { shallow, mount, configure, render } from 'enzyme'; | |
import Adapter from 'enzyme-adapter-react-16'; | |
import waitUntil from 'async-wait-until'; | |
configure({ adapter: new Adapter() }); | |
global.exceptRenderFine = async (component) => { | |
// full mount the component (shallow can work and will be faster) | |
const root = mount(component) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import org.apache.spark.sql.functions.udf | |
import org.apache.spark.sql.catalyst.expressions.GenericRowWithSchema | |
import org.apache.spark.sql.Row | |
spark.udf.register("struct_def", (root:GenericRowWithSchema, path: String, defaultValue: String) => { | |
var fields = path.split("\\.") | |
var buffer:Row = root | |
val lastItem = fields.last | |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
private static String snakeCaseFormat(String name) { | |
final StringBuilder result = new StringBuilder(); | |
boolean lastUppercase = false; | |
for (int i = 0; i < name.length(); i++) { | |
char ch = name.charAt(i); | |
char lastEntry = i == 0 ? 'X' : result.charAt(result.length() - 1); | |
if (ch == ' ' || ch == '_' || ch == '-' || ch == '.') { | |
lastUppercase = false; |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
setup: | |
# elasticsearch index template | |
template: | |
enabled: true | |
overwrite: true | |
# kibana | |
dashboards.enabled: true | |
kibana.host: '${KIBANA_HOST}' | |
http: |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
apiVersion: batch/v1 | |
kind: Job | |
metadata: | |
name: {{ stack_name }}-security-setup-users | |
spec: | |
template: | |
spec: | |
containers: | |
- name: update-credentials | |
image: radial/busyboxplus |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import org.apache.spark.sql.Column | |
import org.apache.spark.sql.types.StructType | |
import org.apache.spark.sql.functions.col | |
def flattenSchema(schema: StructType, prefix: String = null) : Array[Column] = { | |
schema.fields.flatMap(f => { | |
val colName = if (prefix == null) f.name else (prefix + "." + f.name) | |
f.dataType match { | |
case st: StructType => flattenSchema(st, colName) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
val df = sqlContext.read.format("es") | |
.option("es.nodes.wan.only", "true") | |
.option("es.mapping.date.rich", "false") | |
.option("es.read.field.exclude", "dimensions.geo_point") | |
.option("es.nodes", "conso-es:9200") | |
.load("clicks") |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
//create properties object | |
val prop = new java.util.Properties | |
prop.setProperty("driver", "com.mysql.jdbc.Driver") | |
prop.setProperty("user", "XXX") | |
prop.setProperty("password", "XXXX") | |
//jdbc mysql url - destination database is named "data" | |
val url = "jdbc:mysql://XXXXX:3306/datahub" | |
//write data from spark dataframe to database |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/python3 | |
import argparse, json, requests | |
parser = argparse.ArgumentParser(description='Create automatically index patterns, from elasticsearch indices.') | |
parser.add_argument('elasticsearch_url',type=str, help='elasticsearch full URL') | |
parser.add_argument('kibana_url', type=str, help='kibana full URL') | |
args = parser.parse_args() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
--- | |
- hosts: recette | |
become: yes | |
become_user: root | |
tasks: | |
- name: "Get Linux uname's" | |
command: "uname -{{ item }}" | |
register: unames | |
with_items: ["s", "m"] | |
- set_fact: |