Skip to content

Instantly share code, notes, and snippets.

@djptek
Created January 7, 2019 16:59
Show Gist options
  • Save djptek/6fc081c4626a82f681ea4a4c45f8698f to your computer and use it in GitHub Desktop.
Save djptek/6fc081c4626a82f681ea4a4c45f8698f to your computer and use it in GitHub Desktop.
Plot some lines using VEGA in Kibana
#############
# tidy up
DELETE benchmarking-results
# create a mapping to mandate single shard + apply date format
PUT benchmarking-results
{
"settings": {
"number_of_shards": 1,
"number_of_replicas": 0
},
"mappings": {
"_doc": {
"properties": {
"@timestamp": {
"type": "date",
"format": "yyyy-MM-dd HH:mm:ss Z"
}
}
}
}
}
# add our sample document
PUT benchmarking-results/_doc/-KxuKGgBUjlJtYwzh8Ww
{
"@timestamp": "2019-01-07 14:09:18 +0100",
"event": {
"description": "Default run",
"category": "simple",
"action": "ping",
"duration": 0.796628,
"stastistics": {
"mean": 0.9189119000016944,
"median": 0.8142949999892153,
"max": 1.2433289999898989,
"min": 0.7944230000139214,
"standard_deviation": 0.17753083375659404
},
"repetitions": {
"warmup": 1,
"measured": 10
}
},
"agent": {
"version": "7.0.0.pre",
"name": "elasticsearch-ruby-client",
"git": {
"branch": "benchmarking",
"sha": "78c88443ef0a01c80d67ff0520bc9b7f1b88ad91",
"commit_message": "[COMMON] All benchmarking indices have 'benchmarking' prefix",
"repository": "elasticsearch-ruby"
},
"language": {
"runtime_version": "2.5.1, x86_64-darwin17, x86_64-apple-darwin17.5.0"
},
"os": {
"platform": "2.5.1, x86_64-darwin17, x86_64-apple-darwin17.5.0",
"type": "darwin",
"architecture": "x86_64"
},
"adapter": "net_http"
},
"server": {
"version": "6.5.3",
"nodes_info": {
"_nodes": {
"total": 1,
"successful": 1,
"failed": 0
},
"cluster_name": "elasticsearch",
"nodes": {
"fmo0iLxISDa2SPtUaJxHfw": {
"name": "fmo0iLx",
"transport_address": "127.0.0.1:9300",
"host": "127.0.0.1",
"ip": "127.0.0.1",
"version": "6.5.3",
"build_flavor": "default",
"build_type": "tar",
"build_hash": "159a78a",
"roles": [
"master",
"data",
"ingest"
],
"attributes": {
"ml.machine_memory": "17179869184",
"xpack.installed": "true",
"ml.max_open_jobs": "20",
"ml.enabled": "true"
},
"os": {
"refresh_interval_in_millis": 1000,
"name": "Mac OS X",
"arch": "x86_64",
"version": "10.14.1",
"available_processors": 8,
"allocated_processors": 8
}
}
}
}
}
}
# check the mappings
GET benchmarking-results/_mapping/field/@timestamp
=> "@timestamp" : {
"full_name" : "@timestamp",
"mapping" : {
"@timestamp" : {
"type" : "date",
"format" : "yyyy-MM-dd HH:mm:ss Z"
}
}
}
GET benchmarking-results/_mapping/field/event.stastistics.mean
=> "event.stastistics.mean" : {
"full_name" : "event.stastistics.mean",
"mapping" : {
"mean" : {
"type" : "float"
}
}
}
#query the sample document
GET benchmarking-results/_search
{
"_source": ["@timestamp","event.action","event.stastistics.mean"]
}
=> "_source" : {
"@timestamp" : "2019-01-07 14:09:18 +0100",
"event" : {
"stastistics" : {
"mean" : 0.9189119000016944
},
"action" : "ping"
}
}
# add some more (fake) sample documents
PUT benchmarking-results/_doc/_bulk
{"index":{}}
{"@timestamp":"2019-01-07 14:09:19 +0100","event":{"stastistics":{"mean":0.8189119000016944},"action":"ping"}}
{"index":{}}
{"@timestamp":"2019-01-07 14:09:20 +0100","event":{"stastistics":{"mean":0.7189119000016944},"action":"ping"}}
{"index":{}}
{"@timestamp":"2019-01-07 14:09:19 +0100","event":{"stastistics":{"mean":0.0189119000016944},"action":"pong"}}
{"index":{}}
{"@timestamp":"2019-01-07 14:09:19 +0100","event":{"stastistics":{"mean":0.1189119000016944},"action":"pong"}}
{"index":{}}
{"@timestamp":"2019-01-07 14:09:20 +0100","event":{"stastistics":{"mean":0.2189119000016944},"action":"pong"}}
GET benchmarking-results/_search
{
"query": {
"bool": {
"must": [
{
"match": {
"event.action.keyword": "ping"
}
}
],
"filter": {
"range": {
"@timestamp": {
"gte": "2019-01-07 14:09:17 +0100",
"lte": "2019-01-07 14:09:27 +0100"
}
}
}
}
}
}
####
# Now let's try that as a line chart in VEGA
# In Kibana => Management => Index Patterns:
# 1. create an index pattern for
# "benchmarking-results"
# 1.1 in the workflow, ensure that "@timestamp" is selected as a timefield
# 2. In Visualize => "+" a new Visualization
# 2.2 select visualization "VEGA"
# Cut and paste _this_ code into the schema and click on the _Run_ icon, you should see a line with a negative gradient based on the "ping" data
{
$schema: https://vega.github.io/schema/vega-lite/v2.json
data: {
# URL object is a context-aware query to Elasticsearch
url: {
# The %-enclosed keys are handled by Kibana to modify the query
# before it gets sent to Elasticsearch. Context is the search
# filter as shown above the dashboard. Timefield uses the value
# of the time picker from the upper right corner.
index: benchmarking-results
body: {
size: 10000
_source: ["@timestamp", "event.action.keyword", "event.stastistics.mean"]
query: {
bool: {
must: {
match: {"event.action.keyword": "ping"}
},
filter: {
range: {
"@timestamp": {
"gte": "2019-01-07 14:09:17 +0100",
"lte": "2019-01-07 14:09:27 +0100"
}
}
}
}
}
}
}
# We only need the content of hits.hits array
format: {property: "hits.hits"}
}
# Parse timestamp into a javascript date value
transform: [
{calculate: "toDate(datum._source['@timestamp'])", as: "time"}
]
# Draw a circle, with x being the time field, and y - number of bytes
mark: line
encoding: {
x: {field: "time", type: "temporal"}
y: {field: "_source.event.stastistics.mean", type: "quantitative"}
}
}
# now edit line 17 changing "ping" to "pong"
# the gradient should now be increasing to reflect plotting the alternative points
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment