- AVRO most used serialization protocol
- with Schema registry => allows more compact data serialisation with versioning in bonus
- easy way to use avro tools :
cs launch org.apache.avro:avro-tools:1.10.2 -M org.apache.avro.tool.Main -- idl file.avdl
mkdir $HOME/kafka
cd $HOME/kafka
curl -o kafka.tar.gz https://mirrors.ircam.fr/pub/apache/kafka/2.8.0/kafka_2.13-2.8.0.tgz
tar xvfz kaftar.tar.gz
ln -s kafka_* current
export KAFKA_HOME=$HOME/kafka/current
export KAFKA_BIN=$KAFKA_HOME/bin
export PATH=$PATH:$KAFKA_BIN
export MYIP=$(hostname -I | awk '{print $1}')
export KAFKA_IP=127.0.0.1
export KAFKA_ENDPOINT=$KAFKA_IP:9092
alias topics="$KAFKA_BIN/kafka-topics.sh --bootstrap-server $KAFKA_ENDPOINT"
alias console-producer="$KAFKA_BIN/kafka-console-producer.sh --broker-list $KAFKA_ENDPOINT"
alias console-consumer="$KAFKA_BIN/kafka-console-consumer.sh --bootstrap-server $KAFKA_ENDPOINT --timeout-ms 5000"
alias consumer-groups="$KAFKA_BIN/kafka-consumer-groups.sh --bootstrap-server $KAFKA_ENDPOINT"
alias configs="$KAFKA_BIN/kafka-configs.sh --bootstrap-server $KAFKA_ENDPOINT"
nohup $KAFKA_BIN/zookeeper-server-start.sh $KAFKA_HOME/config/zookeeper.properties > zookeeper.nohup &
echo $! > zookeeper.pid
nohup $KAFKA_BIN/kafka-server-start.sh $KAFKA_HOME/config/server.properties > kafka.nohup &
echo $! > kafka.pid
CLUSTER_ID=$(kafka-storage.sh random-uuid)
kafka-storage.sh format --config server.properties --cluster-id $CLUSTER_ID
curl -o server.properties https://raw.githubusercontent.com/apache/kafka/6d1d68617ecd023b787f54aafc24a4232663428d/config/kraft/server.properties
MYIP=$(hostname -I | awk '{print $1}')
sed -i -E "[email protected]=PLAINTEXT://localhost:[email protected]=PLAINTEXT://$MYIP:9092@" server.properties
nohup $KAFKA_BIN/kafka-server-start.sh server.properties > kafka.nohup &
echo $! > kafka.pid
topics --create --replication-factor 1 --partitions 1 --topic test
topics --list
echo '{"message":"Hello world"}' | console-producer --topic test
console-consumer --from-beginning --topic test --group myconsumer
console-consumer --from-beginning --topic test --group myconsumer
consumer-groups --list
consumer-groups --describe --group myconsumer
consumer-groups --execute --reset-offsets --to-earliest --group myconsumer --all-topics
console-consumer --from-beginning --topic test --group myconsumer
configs --describe --topic test --all
topics --delete --topic test
MYIP=$(hostname -I | awk '{print $1}')
docker run -it -p 9000:9000 \
-e KAFKA_BROKERCONNECT=$MYIP:9092 \
obsidiandynamics/kafdrop
Visit -> http://127.0.0.1:9000/