Will stick vertically element with class "sticky-item".
Usage is for Table > sticky cells, so this code check element left to create sticky groups.
| //create properties object | |
| val prop = new java.util.Properties | |
| prop.setProperty("driver", "com.mysql.jdbc.Driver") | |
| prop.setProperty("user", "XXX") | |
| prop.setProperty("password", "XXXX") | |
| //jdbc mysql url - destination database is named "data" | |
| val url = "jdbc:mysql://XXXXX:3306/datahub" | |
| //write data from spark dataframe to database |
| val df = sqlContext.read.format("es") | |
| .option("es.nodes.wan.only", "true") | |
| .option("es.mapping.date.rich", "false") | |
| .option("es.read.field.exclude", "dimensions.geo_point") | |
| .option("es.nodes", "conso-es:9200") | |
| .load("clicks") |
| import org.apache.spark.sql.Column | |
| import org.apache.spark.sql.types.StructType | |
| import org.apache.spark.sql.functions.col | |
| def flattenSchema(schema: StructType, prefix: String = null) : Array[Column] = { | |
| schema.fields.flatMap(f => { | |
| val colName = if (prefix == null) f.name else (prefix + "." + f.name) | |
| f.dataType match { | |
| case st: StructType => flattenSchema(st, colName) |
| apiVersion: batch/v1 | |
| kind: Job | |
| metadata: | |
| name: {{ stack_name }}-security-setup-users | |
| spec: | |
| template: | |
| spec: | |
| containers: | |
| - name: update-credentials | |
| image: radial/busyboxplus |
| setup: | |
| # elasticsearch index template | |
| template: | |
| enabled: true | |
| overwrite: true | |
| # kibana | |
| dashboards.enabled: true | |
| kibana.host: '${KIBANA_HOST}' | |
| http: |
| private static String snakeCaseFormat(String name) { | |
| final StringBuilder result = new StringBuilder(); | |
| boolean lastUppercase = false; | |
| for (int i = 0; i < name.length(); i++) { | |
| char ch = name.charAt(i); | |
| char lastEntry = i == 0 ? 'X' : result.charAt(result.length() - 1); | |
| if (ch == ' ' || ch == '_' || ch == '-' || ch == '.') { | |
| lastUppercase = false; |
| import org.apache.spark.sql.functions.udf | |
| import org.apache.spark.sql.catalyst.expressions.GenericRowWithSchema | |
| import org.apache.spark.sql.Row | |
| spark.udf.register("struct_def", (root:GenericRowWithSchema, path: String, defaultValue: String) => { | |
| var fields = path.split("\\.") | |
| var buffer:Row = root | |
| val lastItem = fields.last | |
| import { shallow, mount, configure, render } from 'enzyme'; | |
| import Adapter from 'enzyme-adapter-react-16'; | |
| import waitUntil from 'async-wait-until'; | |
| configure({ adapter: new Adapter() }); | |
| global.exceptRenderFine = async (component) => { | |
| // full mount the component (shallow can work and will be faster) | |
| const root = mount(component) |
| plugins { | |
| ... | |
| id "org.ajoberstar.grgit" version "3.0.0" | |
| } | |
| task createReleaseReportFile() { | |
| doLast { | |
| file("$projectDir/src/main/resources/release_note.properties").text = """## Automated generated by Gradle ## |
Will stick vertically element with class "sticky-item".
Usage is for Table > sticky cells, so this code check element left to create sticky groups.