brew install --cask graalvm/tap/graalvm-ce-java11
export GRAALVM_HOME="/Library/Java/JavaVirtualMachines/graalvm-ce-java11-21.0.0/Contents/Home/"
sudo xattr -r -d com.apple.quarantine /Library/Java/JavaVirtualMachines/graalvm-ce-java11-21.0.0
unset AWS_ACCESS_KEY_ID | |
unset AWS_SECRET_ACCESS_KEY | |
unset AWS_SESSION_TOKEN | |
OTP_ID=$1 | |
OTP=$(op item get $OTP_ID --otp) | |
DEVICE_ID=$(aws iam list-mfa-devices | jq -r '.MFADevices[0].SerialNumber') | |
RESULT=$(aws sts get-session-token --serial-number $DEVICE_ID --duration-seconds 129600 --token-code $OTP) |
ncat localhost 8888 |
package functions.spark | |
import java.sql.Timestamp | |
import java.time.Instant | |
import java.util.UUID | |
case class CDFRecord[Out <: Product, In <: Convertable[Out]](value: In) { | |
def convert: Out = { | |
val ingestedAt = Timestamp.from(Instant.now()) | |
val ingestId = UUID.randomUUID().toString |
val oracleLinux = "8" | |
val jvmVersion = "17" | |
val graalVersion = "22" | |
GraalVMNativeImage / containerBuildImage := GraalVMNativeImagePlugin | |
.generateContainerBuildImage("ghcr.io/graalvm/graalvm-ce:ol$oracleLinux-java$jvmVersion-$graalVersion") | |
.value, | |
graalVMNativeImageOptions := Seq( | |
"--static", | |
"--verbose", |
brew install --cask graalvm/tap/graalvm-ce-java11
export GRAALVM_HOME="/Library/Java/JavaVirtualMachines/graalvm-ce-java11-21.0.0/Contents/Home/"
sudo xattr -r -d com.apple.quarantine /Library/Java/JavaVirtualMachines/graalvm-ce-java11-21.0.0
environment: minikube | |
serviceAccount: spark-spark | |
arguments: | |
- "/mnt/data-in/" | |
- "/mnt/data-out/" | |
volumes: | |
- name: input-data | |
hostPath: | |
path: /input-data | |
- name: output-data |
apiVersion: sparkoperator.k8s.io/v1beta2 | |
kind: SparkApplication | |
metadata: | |
name: {{ .Release.Name | trunc 63 }} | |
labels: | |
chartname: {{ .Chart.Name | trunc 63 | quote }} | |
release: {{ .Release.Name | trunc 63 | quote }} | |
revision: {{ .Release.Revision | quote }} | |
sparkVersion: {{ .Values.sparkVersion | quote }} | |
version: {{ .Chart.Version | quote }} |
# Information | |
# Main Spark Docker file code: https://github.com/apache/spark/blob/master/resource-managers/kubernetes/docker/src/main/dockerfiles/spark/Dockerfile | |
ARG SPARK_IMAGE=gcr.io/spark-operator/spark:v2.4.4 | |
FROM ${SPARK_IMAGE} | |
ARG VERSION | |
ARG VCS_REF | |
ARG BUILD_DATE |
lazy val root = (project in file(".")) | |
.enablePlugins(sbtdocker.DockerPlugin) | |
.enablePlugins(AshScriptPlugin) | |
.settings( | |
commonSettings, | |
assemblySettings, | |
dockerSettings, | |
runLocalSettings, | |
name := "transform-movie-ratings", | |
Compile / mainClass := Some("xyz.graphiq.BasicSparkJob"), |
// Task to create helm chart | |
lazy val createImporterHelmChart: Def.Initialize[Task[Seq[File]]] = Def.task { | |
val chartFile = baseDirectory.value / "helm" / "Chart.yaml" | |
val valuesFile = baseDirectory.value / "helm" / "values.yaml" | |
val chartContents = | |
s"""# Generated by build.sbt. Please don't manually update | |
|apiVersion: v1 | |
|name: $domain-${name.value} | |
|version: ${version.value} |