yarn application -kill application_1428487296152_25597
https://stackoverflow.com/questions/29565716/spark-kill-running-application
| SELECT * | |
| FROM customer | |
| WHERE country = '${VAR_COUNTRY}' |
| IMPORT INTO mytable | |
| FROM LOCAL SECURE CSV | |
| FILE /file.csv | |
| (1, 2 FORMAT = 'YYYY-MM-DD', 3..12) | |
| ENCODING = 'ASCII' | |
| ROW SEPARATOR = 'LF' | |
| SKIP = 1; | |
| COMMIT; | |
| docker run -u 0 -it myImage:tag bash |
yarn application -kill application_1428487296152_25597
https://stackoverflow.com/questions/29565716/spark-kill-running-application
| // ... | |
| def endDate = new Date().clearTime() // today | |
| def startDate = endDate - 30 | |
| def newDateParsed | |
| startDate.upto(endDate) { | |
| newDateParsed = it.format("yyyy-MM-dd") | |
| println(newDateParsed) | |
| // ... | |
| def DAYS_BACK = 30 | |
| def iterDate = new Date() - DAYS_BACK | |
| def newDateParse | |
| for (i=0; i <DAYS_BACK; i++) { | |
| iterDate = iterDate + 1 | |
| newDateParse = iterDate.format("yyyy-MM-dd") | |
| stage("newDateParsed ${newDateParse}") { |
| FROM alpine:3.10.3 | |
| ENV AWSCLI_VERSION "1.14.10" | |
| RUN apk add --no-cache \ | |
| openssh \ | |
| python \ | |
| py-pip | |
| # installing aws cli |
| FROM exasol/docker-db:latest | |
| ENV EXA_BUCKET_PATH="/exa/data/bucketfs/bfsdefault/default" | |
| ENV CLOUD_STORAGE_VERSION="0.6.0" | |
| ENV JAR_FILENAME="cloud-storage-etl-udfs-$CLOUD_STORAGE_VERSION.jar" | |
| ADD https://github.com/exasol/cloud-storage-etl-udfs/releases/download/v$CLOUD_STORAGE_VERSION/$JAR_FILENAME $EXA_BUCKET_PATH/$JAR_FILENAME | |
| RUN chmod 775 $EXA_BUCKET_PATH/$JAR_FILENAME | |
| #RUN chown exadefusr:exausers $EXA_BUCKET_PATH/$JAR_FILENAME |