Setup
bin/kafka-topics.sh \
--zookeeper zookeeper.example.com:2181 \
--create \
var nconf = require('nconf'); | |
var yaml = require('js-yaml'); | |
var app_config = __dirname + '../config/application.yml'; | |
// load cmd line args and environment vars | |
nconf.argv().env(); | |
// load a yaml file using a custom formatter | |
nconf.file({ |
#!/bin/sh | |
readonly cluster_topology=$(redis-cli -h redis-cluster cluster nodes) | |
readonly slaves=$(echo "${cluster_topology}" | grep slave | cut -d' ' -f2,4 | tr ' ' ',') | |
readonly backup_dir="/opt/redis-backup" | |
mkdir -p ${backup_dir} | |
for slave in ${slaves}; do | |
master_id=$(echo "${slave}" | cut -d',' -f2) |
import static org.junit.Assert.assertThat; | |
import static org.springframework.kafka.test.hamcrest.KafkaMatchers.hasKey; | |
import static org.springframework.kafka.test.hamcrest.KafkaMatchers.hasValue; | |
import static org.springframework.kafka.test.utils.KafkaTestUtils.getSingleRecord; | |
import java.util.Map; | |
import org.apache.kafka.clients.consumer.Consumer; | |
import org.apache.kafka.clients.consumer.ConsumerConfig; | |
import org.apache.kafka.clients.consumer.ConsumerRecord; | |
import org.apache.kafka.common.serialization.Deserializer; |