Skip to content

Instantly share code, notes, and snippets.

@dcode
Last active September 27, 2017 13:58
Show Gist options
  • Select an option

  • Save dcode/5e58fed4df358983738244ade4d100e3 to your computer and use it in GitHub Desktop.

Select an option

Save dcode/5e58fed4df358983738244ade4d100e3 to your computer and use it in GitHub Desktop.
Patches needed for bro RPM packages for ROCK NSM
diff -u --recursive bro-2.5.orig/aux/plugins/af_packet/cmake/FindKernelHeaders.cmake bro-2.5/aux/plugins/af_packet/cmake/FindKernelHeaders.cmake
--- bro-2.5.orig/aux/plugins/af_packet/cmake/FindKernelHeaders.cmake 2017-05-26 03:15:47.598844347 +0000
+++ bro-2.5/aux/plugins/af_packet/cmake/FindKernelHeaders.cmake 2017-05-26 03:21:44.099990030 +0000
@@ -20,10 +20,18 @@
OUTPUT_STRIP_TRAILING_WHITESPACE
)
+# Admittedly a hack, but in practice we just need *some* headers
+execute_process(
+ COMMAND bash "-c" "find /usr/src/kernels -print | grep -E '/include/linux/user.h' | cut -d/ -f5 | sort -u | tail -1"
+ OUTPUT_VARIABLE KERNEL_FALLBACK
+ OUTPUT_STRIP_TRAILING_WHITESPACE
+)
+
find_path(KERNELHEADERS_ROOT_DIR
NAMES include/linux/user.h
PATHS /usr/src/linux-headers-${KERNEL_RELEASE}
/usr/src/kernels/${KERNEL_RELEASE}
+ /usr/src/kernels/${KERNEL_FALLBACK}
)
include(FindPackageHandleStandardArgs)
@@ -33,4 +41,4 @@
mark_as_advanced(
KERNELHEADERS_ROOT_DIR
-)
\ No newline at end of file
+)
Only in bro-2.5/aux/plugins: .git
diff -ru --no-dereference bro-2.5.orig/aux/plugins/kafka/CMakeLists.txt bro-2.5/aux/plugins/kafka/CMakeLists.txt
--- bro-2.5.orig/aux/plugins/kafka/CMakeLists.txt 2016-11-16 22:53:58.000000000 +0000
+++ bro-2.5/aux/plugins/kafka/CMakeLists.txt 2017-05-21 23:49:35.880510471 +0000
@@ -6,7 +6,7 @@
if (LIBRDKAFKA_FOUND AND OPENSSL_FOUND)
include_directories(BEFORE ${LibRDKafka_INCLUDE_DIR} ${OpenSSL_INCLUDE_DIR})
- bro_plugin_begin(BRO KAFKA)
+ bro_plugin_begin(Bro Kafka)
bro_plugin_cc(src/KafkaWriter.cc)
bro_plugin_cc(src/Plugin.cc)
bro_plugin_cc(src/TaggedJSON.cc)
diff -ru --no-dereference bro-2.5.orig/aux/plugins/kafka/README bro-2.5/aux/plugins/kafka/README
--- bro-2.5.orig/aux/plugins/kafka/README 2016-11-16 22:53:58.000000000 +0000
+++ bro-2.5/aux/plugins/kafka/README 2017-05-21 15:54:33.805959518 +0000
@@ -1,4 +1,3 @@
-
===============================
Writing Logging Output to Kafka
===============================
@@ -14,10 +13,10 @@
Install librdkafka (https://github.com/edenhill/librdkafka), a native client
library for Kafka. This plugin has been tested against the latest release of
-librdkafka, which at the time of this writing is v0.8.6::
+librdkafka, which at the time of this writing is v0.8.6, v0.9.0, v0.9.1:
- # curl -L https://github.com/edenhill/librdkafka/archive/0.8.6.tar.gz | tar xvz
- # cd librdkafka-0.8.6/
+ # curl -L https://github.com/edenhill/librdkafka/archive/0.9.1.tar.gz | tar xvz
+ # cd librdkafka-0.9.1/
# ./configure
# make
# sudo make install
@@ -48,19 +47,20 @@
.. console::
@load Bro/Kafka/logs-to-kafka.bro
- redef Kafka::logs_to_send = set(Conn::LOG, HTTP::LOG);
+ redef Kafka::include_logs = set(Conn::LOG, HTTP::LOG);
redef Kafka::kafka_conf = table(
["metadata.broker.list"] = "localhost:9092"
);
If all log streams need to be sent to the same topic, define the name of
the topic in a variable called ``topic_name``. In this example, both
-``Conn::LOG`` and ``HTTP::LOG`` will be sent to the topic named ``bro``.
+``DNS::LOG`` and ``HTTP::LOG`` will be excluded in the topic named ``bro``.
+All other Bro logs will be sent, by default.
.. console::
@load Bro/Kafka/logs-to-kafka.bro
- redef Kafka::logs_to_send = set(Conn::LOG, HTTP::LOG);
+ redef Kafka::exclude = set(DNS::LOG, HTTP::LOG);
redef Kafka::kafka_conf = table(
["metadata.broker.list"] = "localhost:9092"
);
@@ -72,6 +72,8 @@
example (look for the $path_func field)
http://blog.bro.org/2012/02/filtering-logs-with-bro.html.
+
+
Settings
--------
@@ -107,11 +109,41 @@
redef Kafka::max_wait_on_shutdown = 3000;
-``tag_json``
+``json_format``
-If true, a log stream identifier is appended to each JSON-formatted message. For
+If set to ``Kafka::JS_DEFAULT``, log events will use the default Bro JSON format.
+
+If set to ``Kafka::JS_TAGGED``, a log stream identifier is appended to each JSON-formatted message. For
example, a Conn::LOG message will look like ``{ 'conn' : { ... }}``.
.. console::
- redef Kafka::tag_json = T;
+ redef Kafka::json_format = JSON::TS_TAGGED;
+
+``json_timestamps``
+
+Uses the same enum as the Ascii log writer on timestamp format. Default is ``JSON::TS_EPOCH``. Other options
+are ``JSON::TS_MILLIS`` and ``JSON::TS_ISO8601``.
+
+.. console::
+
+ redef Kafka::json_timestamps = JSON::TS_ISO8601;
+
+
+Operationally Useful Example
+------------------------------
+
+.. console::
+
+ @load Bro/Kafka/logs-to-kafka
+
+ # Include all logs by default
+ redef Kafka::kafka_conf = table (
+ ["metadata.broker.list"] = "localhost:9092",
+ ["client.id"] = "bro"
+
+ );
+ redef Kafka::topic_name = "bro";
+ redef Kafka::json_timestamps = JSON::TS_ISO8601;
+
+
Only in bro-2.5/aux/plugins/kafka: README.rst
diff -ru --no-dereference bro-2.5.orig/aux/plugins/kafka/scripts/Bro/Kafka/logs-to-kafka.bro bro-2.5/aux/plugins/kafka/scripts/Bro/Kafka/logs-to-kafka.bro
--- bro-2.5.orig/aux/plugins/kafka/scripts/Bro/Kafka/logs-to-kafka.bro 2016-11-16 22:53:58.000000000 +0000
+++ bro-2.5/aux/plugins/kafka/scripts/Bro/Kafka/logs-to-kafka.bro 2017-05-21 15:57:55.910064639 +0000
@@ -3,27 +3,47 @@
module Kafka;
export {
- ##
- ## which log streams should be sent to kafka?
- ## example:
- ## redef Kafka::logs_to_send = set(Conn::Log, HTTP::LOG, DNS::LOG);
- ##
- const logs_to_send: set[Log::ID] &redef;
+ # redefine this in your script to identify the sepcific logs
+ # that should be sent to Kafka. By default, all will be sent.
+ # for example:
+ #
+ # redef KafkaLogger::include_logs = set(HTTP::LOG, Conn::Log, DNS::LOG);
+ #
+ # that will send the HTTP, Conn, and DNS logs up to Kafka.
+ #
+ const include_logs: set[Log::ID] &redef;
+
+ # redefine this in your script to identify the logs
+ # that should be excluded from sending to Kafka. By default, all
+ # will be sent.
+ # for example:
+ #
+ # redef KafkaLogger::exclude_logs = set(HTTP::LOG, Conn::Log, DNS::LOG);
+ #
+ # that will send all except the HTTP, Conn, and DNS logs up to Kafka.
+ #
+ const exclude_logs: set[Log::ID] &redef;
}
event bro_init() &priority=-5
{
for (stream_id in Log::active_streams)
{
- if (stream_id in Kafka::logs_to_send)
- {
- local filter: Log::Filter = [
+ # Skip if `include_logs` is configured and this stream isn't a member
+ if (|include_logs| > 0 && stream_id !in include_logs){
+ next;
+ }
+ # Skip if `exclude_logs` is configured and this stream is a member
+ if (|exclude_logs| > 0 && stream_id in exclude_logs) {
+ next;
+ }
+
+ local filter: Log::Filter = [
$name = fmt("kafka-%s", stream_id),
$writer = Log::WRITER_KAFKAWRITER,
$config = table(["stream_id"] = fmt("%s", stream_id))
- ];
+ ];
- Log::add_filter(stream_id, filter);
- }
+ Log::add_filter(stream_id, filter);
}
}
diff -ru --no-dereference bro-2.5.orig/aux/plugins/kafka/scripts/init.bro bro-2.5/aux/plugins/kafka/scripts/init.bro
--- bro-2.5.orig/aux/plugins/kafka/scripts/init.bro 2016-11-16 22:53:58.000000000 +0000
+++ bro-2.5/aux/plugins/kafka/scripts/init.bro 2017-05-21 15:59:42.261848643 +0000
@@ -3,7 +3,24 @@
export {
const topic_name: string = "" &redef;
const max_wait_on_shutdown: count = 3000 &redef;
- const tag_json: bool = F &redef;
+
+ type JSONFormat: enum {
+ ## JSON will be formatted using default Bro JSON formatting with only
+ ## log data as fields
+ ## example:
+ ## { "id.orig_h":"...", }
+ JS_DEFAULT,
+ ## JSON will be formatted with the log path name tagging the log data
+ ## example:
+ ## { "conn": { "id.orig_h": "...", ... }}
+ JS_TAGGED,
+ };
+
+ const json_format: Kafka::JSONFormat = Kafka::JS_DEFAULT &redef;
+ const json_timestamps: JSON::TimestampFormat = JSON::TS_EPOCH &redef;
+
+ ## This table allows you to pass arbitrary configuration options to
+ ## the librdkafka backend configuration interface
const kafka_conf: table[string] of string = table(
["metadata.broker.list"] = "localhost:9092"
) &redef;
diff -ru --no-dereference bro-2.5.orig/aux/plugins/kafka/src/kafka.bif bro-2.5/aux/plugins/kafka/src/kafka.bif
--- bro-2.5.orig/aux/plugins/kafka/src/kafka.bif 2016-11-16 22:53:58.000000000 +0000
+++ bro-2.5/aux/plugins/kafka/src/kafka.bif 2017-05-21 16:11:05.451082443 +0000
@@ -3,4 +3,5 @@
const kafka_conf: config;
const topic_name: string;
const max_wait_on_shutdown: count;
-const tag_json: bool;
+const json_format: Kafka::JSONFormat;
+const json_timestamps: JSON::TimestampFormat;
diff -ru --no-dereference bro-2.5.orig/aux/plugins/kafka/src/KafkaWriter.cc bro-2.5/aux/plugins/kafka/src/KafkaWriter.cc
--- bro-2.5.orig/aux/plugins/kafka/src/KafkaWriter.cc 2016-11-16 22:53:58.000000000 +0000
+++ bro-2.5/aux/plugins/kafka/src/KafkaWriter.cc 2017-05-22 01:47:48.958728916 +0000
@@ -13,15 +13,22 @@
using namespace logging;
using namespace writer;
-KafkaWriter::KafkaWriter(WriterFrontend* frontend): WriterBackend(frontend), formatter(NULL), rd_producer(NULL)
+KafkaWriter::KafkaWriter(WriterFrontend* frontend):
+ WriterBackend(frontend),
+ formatter(NULL),
+ rd_producer(NULL)
+{
+ topic_name = "";
+
+ InitConfigOptions();
+}
+
+void KafkaWriter::InitConfigOptions()
{
// need thread-local copy of all user-defined settings coming from
// bro scripting land. accessing these is not thread-safe and 'DoInit'
// is potentially accessed from multiple threads.
- // tag_json - thread local copy
- tag_json = BifConst::Kafka::tag_json;
-
// topic name - thread local copy
topic_name.assign(
(const char*)BifConst::Kafka::topic_name->Bytes(),
@@ -44,6 +51,20 @@
Unref(index);
delete k;
}
+
+ ODesc tsfmt;
+ BifConst::Kafka::json_timestamps->Describe(&tsfmt);
+ json_timestamps.assign(
+ (const char*) tsfmt.Bytes(),
+ tsfmt.Len()
+ );
+
+ ODesc jsonfmt;
+ BifConst::Kafka::json_format->Describe(&jsonfmt);
+ json_format.assign(
+ (const char*) jsonfmt.Bytes(),
+ jsonfmt.Len()
+ );
}
KafkaWriter::~KafkaWriter()
@@ -51,18 +72,40 @@
bool KafkaWriter::DoInit(const WriterInfo& info, int num_fields, const threading::Field* const* fields)
{
+ delete formatter;
+ formatter = 0;
+
+ threading::formatter::JSON::TimeFormat tf = threading::formatter::JSON::TS_EPOCH;
+
+ // Format timestamps
+ if ( strcmp(json_timestamps.c_str(), "JSON::TS_EPOCH") == 0 )
+ tf = threading::formatter::JSON::TS_EPOCH;
+ else if ( strcmp(json_timestamps.c_str(), "JSON::TS_MILLIS") == 0 )
+ tf = threading::formatter::JSON::TS_MILLIS;
+ else if ( strcmp(json_timestamps.c_str(), "JSON::TS_ISO8601") == 0 )
+ tf = threading::formatter::JSON::TS_ISO8601;
+ else
+ {
+ Error(Fmt("Invalid JSON timestamp format: %s", json_timestamps.c_str()));
+ return false;
+ }
+
+ // Format JSON
+ if( strcmp(json_format.c_str(), "Kafka::JS_DEFAULT") == 0 )
+ formatter = new threading::formatter::JSON(this, tf);
+ else if( strcmp(json_format.c_str(), "Kafka::JS_TAGGED") == 0 )
+ formatter = new threading::formatter::TaggedJSON(info.path, this, tf);
+ else
+ {
+ Error(Fmt("Invalid JSON format: %s", json_format.c_str()));
+ return false;
+ }
+
// if no global 'topic_name' is defined, use the log stream's 'path'
if(topic_name.empty()) {
topic_name = info.path;
}
- // initialize the formatter
- if(tag_json) {
- formatter = new threading::formatter::TaggedJSON(info.path, this, threading::formatter::JSON::TS_EPOCH);
- } else {
- formatter = new threading::formatter::JSON(this, threading::formatter::JSON::TS_EPOCH);
- }
-
// kafka global configuration
string err;
rd_conf = RdKafka::Conf::create(RdKafka::Conf::CONF_GLOBAL);
@@ -70,14 +113,14 @@
// apply the user-defined settings to kafka
map<string,string>::iterator i;
for (i = kafka_conf.begin(); i != kafka_conf.end(); ++i) {
- string key = i->first;
- string val = i->second;
+ string key = i->first;
+ string val = i->second;
// apply setting to kafka
- if (RdKafka::Conf::CONF_OK != rd_conf->set(key, val, err)) {
- reporter->Error("Failed to set '%s'='%s': %s", key.c_str(), val.c_str(), err.c_str());
- return false;
- }
+ if (RdKafka::Conf::CONF_OK != rd_conf->set(key, val, err)) {
+ reporter->Error("Failed to set '%s'='%s': %s", key.c_str(), val.c_str(), err.c_str());
+ return false;
+ }
}
// create kafka producer
@@ -161,7 +204,7 @@
/**
* Writer-specific method implementing a change of fthe buffering
- * state. If buffering is disabled, the writer should attempt to
+ * state. If buffering is disabled, the writer should attempt to
* write out information as quickly as possible even if doing so may
* have a performance impact. If enabled (which is the default), it
* may buffer data as helpful and write it out later in a way
diff -ru --no-dereference bro-2.5.orig/aux/plugins/kafka/src/KafkaWriter.h bro-2.5/aux/plugins/kafka/src/KafkaWriter.h
--- bro-2.5.orig/aux/plugins/kafka/src/KafkaWriter.h 2016-11-16 22:53:58.000000000 +0000
+++ bro-2.5/aux/plugins/kafka/src/KafkaWriter.h 2017-05-21 16:10:48.223409474 +0000
@@ -6,6 +6,8 @@
#include <logging/WriterBackend.h>
+#include "kafka.bif.h"
+
namespace RdKafka {
class Conf;
class Producer;
@@ -34,6 +36,7 @@
}
protected:
+
virtual bool DoInit(const WriterBackend::WriterInfo& info, int num_fields, const threading::Field* const* fields);
virtual bool DoWrite(int num_fields, const threading::Field* const* fields, threading::Value** vals);
virtual bool DoSetBuf(bool enabled);
@@ -43,11 +46,16 @@
virtual bool DoHeartbeat(double network_time, double current_time);
private:
+ void InitConfigOptions();
+
static const string default_topic_key;
string stream_id;
string topic_name;
- bool tag_json;
+ string meta_json;
+ bool init_options;
+ string json_format;
+ string json_timestamps;
map<string, string> kafka_conf;
threading::formatter::Formatter *formatter;
--- CMakeLists.txt 2015-02-13 19:35:23.000000000 +0000
+++ CMakeLists.txt 2015-02-21 06:52:53.494337656 +0000
@@ -2,7 +2,7 @@
# When changing the minimum version here, also adapt
# aux/bro-aux/plugin-support/skeleton/CMakeLists.txt
-cmake_minimum_required(VERSION 2.8 FATAL_ERROR)
+cmake_minimum_required(VERSION 2.6.3 FATAL_ERROR)
include(cmake/CommonCMakeConfig.cmake)
--- aux/bro-aux/CMakeLists.txt 2015-02-13 19:35:25.000000000 +0000
+++ aux/bro-aux/CMakeLists.txt 2015-02-21 06:54:24.762338349 +0000
@@ -1,5 +1,5 @@
project(BroAux C CXX)
-cmake_minimum_required(VERSION 2.8 FATAL_ERROR)
+cmake_minimum_required(VERSION 2.6.3 FATAL_ERROR)
include(cmake/CommonCMakeConfig.cmake)
########################################################################
@@ -49,10 +49,11 @@
"${CMAKE_SOURCE_DIR}" STREQUAL "${PROJECT_SOURCE_DIR}")
install(TARGETS ${_target} DESTINATION bin)
else ()
+ get_property(_target_loc TARGET ${_target} PROPERTY LOCATION)
add_custom_target(install-${_target}
COMMAND ${CMAKE_COMMAND} -E make_directory
${CMAKE_INSTALL_PREFIX}/bin
- COMMAND ${CMAKE_COMMAND} -E copy $<TARGET_FILE:${_target}>
+ COMMAND ${CMAKE_COMMAND} -E copy ${_target_loc}
${CMAKE_INSTALL_PREFIX}/bin)
add_dependencies(install-${_target} ${_target})
set(AUX_TARGETS install-${_target};${AUX_TARGETS})
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment