- Get source
$ go get -u github.com/cendhu/fetch-block
- Get govendor
$ go get -u github.com/kardianos/govendor
- build
{ | |
"id": "iperf3-test", | |
"description": "iperf3 network test", | |
"labels": {}, | |
"run": { | |
"cpus": 1, | |
"mem": 128, | |
"disk": 0, | |
"env": { | |
"IPERF_MODE": "client", |
{ | |
"datapoints":[ | |
{"name":"cpus.limit","value":1.1,"unit":"count","timestamp":"2017-07-20T02:00:34.11517831Z","tags":{"container_id":"e6c0b967-fa45-42f1-8b2c-d979f1ddf8bf","executor_id":"ceph.mon.598f9b7e-3931-4b39-86e7-1b4607001ea7","executor_name":"Command Executor (Task: ceph.mon.598f9b7e-3931-4b39-86e7-1b4607001ea7) (Command: sh -c '\necho \"$CEPH...')","framework_id":"ab856169-0aa1-4d11-ae56-0da524f8b70b-0012","source":"ceph.mon.598f9b7e-3931-4b39-86e7-1b4607001ea7"}}, | |
{"name":"cpus.throttled.time","value":0,"unit":"seconds","timestamp":"2017-07-20T02:02:34.412170697Z","tags":{"container_id":"e6c0b967-fa45-42f1-8b2c-d979f1ddf8bf","executor_id":"ceph.mon.598f9b7e-3931-4b39-86e7-1b4607001ea7","executor_name":"Command Executor (Task: ceph.mon.598f9b7e-3931-4b39-86e7-1b4607001ea7) (Command: sh -c '\necho \"$CEPH...')","framework_id":"ab856169-0aa1-4d11-ae56-0da524f8b70b-0012","source":"ceph.mon.598f9b7e-3931-4b39-86e7-1b4607001ea7"} }, | |
{"name":"net.tx.errors","value":0,"unit":"count","timesta |
{ | |
"id": "/drake/volumetest", | |
"backoffFactor": 1.15, | |
"backoffSeconds": 1, | |
"constraints": [ | |
[ | |
"hostname", | |
"LIKE", | |
"192.10.250.234" | |
] |
tadpole_1 | 2017-12-01 08:38:59,033 ERROR - TadpoleSystem_UserQuery : saveLoginHistory save login history | |
tadpole_1 | com.ibatis.common.jdbc.exception.NestedSQLException: | |
tadpole_1 | --- The error occurred in com/hangum/tadpole/engine/query/internal/system/Tadpole-System-Common.xml. | |
tadpole_1 | --- The error occurred while executing update. | |
tadpole_1 | --- Check the INSERT INTO login_history(user_seq, login_ip, succes_yn, fail_reason) VALUES (?, ?, ?, ?); . | |
tadpole_1 | --- Check the SQL Statement (preparation failed). | |
tadpole_1 | --- Cause: java.sql.SQLException: [SQLITE_ERROR] SQL error or missing database (no such table: login_history) | |
tadpole_1 | at com.ibatis.sqlmap.engine.mapping.statement.MappedStatement.executeUpdate(MappedStatement.java:107) | |
tadpole_1 | at com.ibatis.sqlmap.engine.impl.SqlMapExecutorDelegate.insert(SqlMapExecutorDelegate.java:393) | |
tadpole_1 | at com.ibatis.sqlmap.engine.impl.SqlMapSessionImpl.insert(SqlMapSessionImpl.java:82) |
{ | |
"id": "/mw/memory-stat", | |
"cpus": 1, | |
"mem": 2048, | |
"instances": 1, | |
"container": { | |
"type": "DOCKER", | |
"docker": { | |
"forcePullImage": true, | |
"image": "harbor.ajway.kr/middleware/mw:1.0", |
For own self-education purpose
To be able to use custom endpoints with the latest Spark distribution, one needs to add an external package (hadoop-aws
). Then, custum endpoints can be configured according to docs.
bin/spark-shell --packages org.apache.hadoop:hadoop-aws:2.7.2
--- | |
apiVersion: apps/v1beta2 | |
kind: Deployment | |
metadata: | |
name: guestbook-back | |
spec: | |
replicas: 1 | |
selector: | |
matchLabels: | |
type: app |
apiVersion: networking.k8s.io/v1beta1 | |
kind: Ingress | |
metadata: | |
name: kubernetes-dashboard | |
namespace: kube-system | |
spec: | |
rules: | |
- host: dashboard.192.168.100.12.nip.io | |
http: | |
paths: |