Skip to content

Instantly share code, notes, and snippets.

View ebuildy's full-sized avatar
🤟
South of France

Thomas Decaux ebuildy

🤟
South of France
View GitHub Profile
@ebuildy
ebuildy / setupTest.js
Created February 27, 2019 10:19
Async React component testing with jest & enzyme
import { shallow, mount, configure, render } from 'enzyme';
import Adapter from 'enzyme-adapter-react-16';
import waitUntil from 'async-wait-until';
configure({ adapter: new Adapter() });
global.exceptRenderFine = async (component) => {
// full mount the component (shallow can work and will be faster)
const root = mount(component)
@ebuildy
ebuildy / udf_struct_get.scala
Created June 28, 2019 07:51
Apache Spark SQL UDF to get or default value in Struct, with dot notation path
import org.apache.spark.sql.functions.udf
import org.apache.spark.sql.catalyst.expressions.GenericRowWithSchema
import org.apache.spark.sql.Row
spark.udf.register("struct_def", (root:GenericRowWithSchema, path: String, defaultValue: String) => {
var fields = path.split("\\.")
var buffer:Row = root
val lastItem = fields.last
@ebuildy
ebuildy / snakeCaseFormat.java
Created August 23, 2019 19:26
snakeCaseFormat Java
private static String snakeCaseFormat(String name) {
final StringBuilder result = new StringBuilder();
boolean lastUppercase = false;
for (int i = 0; i < name.length(); i++) {
char ch = name.charAt(i);
char lastEntry = i == 0 ? 'X' : result.charAt(result.length() - 1);
if (ch == ' ' || ch == '_' || ch == '-' || ch == '.') {
lastUppercase = false;
@ebuildy
ebuildy / gist:3380e4a8e9201dc78301fa311c8f9a02
Created November 15, 2019 08:33
Elastic packetbeat, log elasticsearch search query took time.
setup:
# elasticsearch index template
template:
enabled: true
overwrite: true
# kibana
dashboards.enabled: true
kibana.host: '${KIBANA_HOST}'
http:
apiVersion: batch/v1
kind: Job
metadata:
name: {{ stack_name }}-security-setup-users
spec:
template:
spec:
containers:
- name: update-credentials
image: radial/busyboxplus
@ebuildy
ebuildy / gist:736ddb7160b587f6405c88c556287a17
Last active February 20, 2020 17:40
Flatten Apache Spark Data Frame
import org.apache.spark.sql.Column
import org.apache.spark.sql.types.StructType
import org.apache.spark.sql.functions.col
def flattenSchema(schema: StructType, prefix: String = null) : Array[Column] = {
schema.fields.flatMap(f => {
val colName = if (prefix == null) f.name else (prefix + "." + f.name)
f.dataType match {
case st: StructType => flattenSchema(st, colName)
@ebuildy
ebuildy / gist:a0d244adaf39d5be3f8e1dbacdc5bd30
Created February 20, 2020 17:42
Read elasticsearch data
val df = sqlContext.read.format("es")
.option("es.nodes.wan.only", "true")
.option("es.mapping.date.rich", "false")
.option("es.read.field.exclude", "dimensions.geo_point")
.option("es.nodes", "conso-es:9200")
.load("clicks")
//create properties object
val prop = new java.util.Properties
prop.setProperty("driver", "com.mysql.jdbc.Driver")
prop.setProperty("user", "XXX")
prop.setProperty("password", "XXXX")
//jdbc mysql url - destination database is named "data"
val url = "jdbc:mysql://XXXXX:3306/datahub"
//write data from spark dataframe to database
#!/usr/bin/python3
import argparse, json, requests
parser = argparse.ArgumentParser(description='Create automatically index patterns, from elasticsearch indices.')
parser.add_argument('elasticsearch_url',type=str, help='elasticsearch full URL')
parser.add_argument('kibana_url', type=str, help='kibana full URL')
args = parser.parse_args()
@ebuildy
ebuildy / playbook_install_docker_compose.yaml
Created May 11, 2020 08:56
Install docker-compose via Ansible
---
- hosts: recette
become: yes
become_user: root
tasks:
- name: "Get Linux uname's"
command: "uname -{{ item }}"
register: unames
with_items: ["s", "m"]
- set_fact: