英題:Fluentd Hacking Guide
30分しかないため斜線部分は今回省く
- Fluentd の起動シーケンスとプラグインの読み込み
Fluentd の設定ファイルのパース- Input Plugin から Output Plugin にデータが渡る流れ
from deployment.cuisine import * | |
from fabric.api import * | |
from fabric.context_managers import * | |
from fabric.utils import puts | |
from fabric.colors import red, green | |
import simplejson | |
import os |
from flask import Flask, render_template_string, request | |
class CustomFlask(Flask): | |
jinja_options = Flask.jinja_options.copy() | |
jinja_options.update(dict( | |
block_start_string='<%', | |
block_end_string='%>', | |
variable_start_string='%%', | |
variable_end_string='%%', | |
comment_start_string='<#', |
import org.apache.spark.{AccumulableParam, SparkConf} | |
import org.apache.spark.serializer.JavaSerializer | |
import scala.collection.mutable.{ HashMap => MutableHashMap } | |
/* | |
* Allows a mutable HashMap[String, Int] to be used as an accumulator in Spark. | |
* Whenever we try to put (k, v2) into an accumulator that already contains (k, v1), the result | |
* will be a HashMap containing (k, v1 + v2). | |
* | |
* Would have been nice to extend GrowableAccumulableParam instead of redefining everything, but it's |
#include "FluentLogger.h" | |
FluentLogger logger("192.168.10.100"); | |
// for fluentd | |
void send_fluentd() | |
{ | |
uMP mp(64); | |
mp.init(); | |
mp.set_map(3); // 3 items |
#!/bin/bash | |
init_fs01bu() | |
{ | |
usb_modeswitch -t <<EOF | |
DefaultVendor= 0x$1 | |
DefaultProduct= 0x$2 | |
TargetVendor= 0x$3 | |
TargetProduct= 0x$4 | |
MessageEndpoint= not set | |
MessageContent="55534243123456780000000080000606f50402527000000000000000000000" |