- Get and Start Kong and Co
git clone [email protected]:Mashape/docker-kong.git
cd docker-kong/compose
docker-compose up
- Create Kong API Route
server { | |
listen 80; | |
server_name api-phalcon.aam; | |
index index.php; | |
root /home/aamsur/workspace/api-phalcon/public; | |
access_log /var/log/nginx/api-phalcon.aam.access.log; | |
error_log /var/log/nginx/api-phalcon.aam.error.log warn; |
tail -50000 /var/log/nginx/chat/access.log | grep "12/Dec/2019:11:13" | awk '{print $NF}' | sort | uniq -c | sort |
git clone [email protected]:Mashape/docker-kong.git
cd docker-kong/compose
docker-compose up
# to list running malware | |
# this syntax will show the script path of 'minning malware' called kdevtmpfs | |
ps -ef | grep kdevtmpfs | |
# also we can check using iftop & iotop & top | |
# analyze the cpu load usage | |
top | |
# analyze the network device | |
iftop | |
# analyze the input / output network usage |
SELECT table_schema,table_name,column_name as field,ordinal_position,data_type,column_type FROM | |
( | |
SELECT | |
table_schema,column_name,ordinal_position,table_name, | |
data_type,column_type,COUNT(1) rowcount | |
FROM information_schema.columns | |
WHERE | |
( | |
(table_schema='DB1') OR | |
(table_schema='DB2') |
#!/bin/bash | |
## check cpu and memory usage linux | |
## tpk 20190308 | |
thold=60 | |
if [ "$1" = "cpu" ] | |
then | |
cores=`cat /proc/cpuinfo | grep processor | wc -l` | |
usage=`ps axfu | grep -v grep | awk '{sum += $3} END {print sum}'` |
#!/bin/bash | |
## check cpu usage linux | |
## tpk 20190308 | |
cores=`cat /proc/cpuinfo | grep processor | wc -l` | |
c_usage=`ps axfu | grep -v grep | awk '{sum += $3} END {print sum}'` | |
c=`echo "scale=2; $c_usage / $cores" | bc` | |
m_usage=`ps axfu | grep -v grep | awk '{sum += $4} END {print sum}'` |
dpkg -l | grep php | tee packages.txt | |
# This will save your current packages to packages.txt file in your working directory. |
===DOCKER COMPOSE=== | |
version: '3' | |
services: | |
selenium_hub: | |
image: selenium/hub | |
ports: | |
- "4444:4444" | |
emulator: |
This is an example configuration to have nginx output JSON logs to make it easier for Logstash processing. I was trying to get nginx > Filebeat > Logstash > ES working and it wasn't until I connected Filebeat directly to Elasticsearch that I saw the expected data. Google led me to ingest-convert.sh
and I realized filebeat setup
works for Filebeat > ES but not Filebeat > Logstash > ES. This is because Logstash does not use ingest pipelines by default. You have to enable them in the elasticsearch
output block.
Having nginx log JSON in the format required for Elasticsearch means there's very little processing (i.e. grok
) to be done in Logstash. nginx can only output JSON for access logs; the error_log
format cannot be changed.
Extra fields are output and not used by the Kibana dashboards. I included them in case they might be useful. Since they are not declared in the filebeat setup
, their default is "string" when yo