Install the required packages (Ubuntu 12.04):
apt-get install nfs-kernel-server portmap
Open the exports file:
vim /etc/exports
from impacket import smb | |
from struct import pack | |
import sys | |
''' | |
PoC for trigger EternalBlue bug (BSOD) | |
Reference: | |
- http://blogs.360.cn/360safe/2017/04/17/nsa-eternalblue-smb/ | |
''' |
-- show running queries (pre 9.2) | |
SELECT procpid, age(query_start, clock_timestamp()), usename, current_query | |
FROM pg_stat_activity | |
WHERE current_query != '<IDLE>' AND current_query NOT ILIKE '%pg_stat_activity%' | |
ORDER BY query_start desc; | |
-- show running queries (9.2) | |
SELECT pid, age(query_start, clock_timestamp()), usename, query | |
FROM pg_stat_activity | |
WHERE query != '<IDLE>' AND query NOT ILIKE '%pg_stat_activity%' |
%% -*- mode: erlang -*- | |
%% ---------------------------------------------------------------------------- | |
%% RabbitMQ Sample Configuration File. | |
%% | |
%% Related doc guide: http://www.rabbitmq.com/configure.html. See | |
%% http://rabbitmq.com/documentation.html for documentation ToC. | |
%% ---------------------------------------------------------------------------- | |
[ | |
{rabbit, | |
[%% |
Below are a set of best practices that I recommend for most customers. This information is based on my experience helping hundreds of Azure Redis customers investigate various issues.
/* | |
* For your reference: | |
* | |
* SinglyLinkedListNode { | |
* int data; | |
* SinglyLinkedListNode* next; | |
* }; | |
* | |
*/ | |
SinglyLinkedListNode* insertNodeAtTail(SinglyLinkedListNode* head, int data) { |
#include <iostream> | |
using namespace std; | |
int main(){ | |
int a = 3; | |
int *p, *q; | |
p = &a; | |
q = p; |
from sys import stdin, stdout | |
from math import floor | |
def main(): | |
global n,m,a | |
n,m=[int(x) for x in stdin.readline().split()] | |
a=[] | |
for i in range(m): | |
a.append([int(x) for x in stdin.readline().split()]) | |
g=[] | |
G=[] |
Helper setup to edit .yaml files with Vim:
List of general purpose commands for Kubernetes management:
echo "generating sample data for hive table"
echo {-1..-181451}hours | xargs -n1 date +"%Y-%m-%d %H:%M:%S" -d >> /tmp/dates.data
echo {-1..-18145}minutes | xargs -n1 date +"%Y-%m-%d %H:%M:%S" -d >> /tmp/dates.data
echo {-1..-1825}days | xargs -n1 date +"%Y-%m-%d %H:%M:%S" -d >> /tmp/dates.data
cat /tmp/dates.data | while read LINE ; do echo $LINE,"user"$((1 + RANDOM % 10000)),$((1 + RANDOM % 1000)) >> /tmp/hive_user_table.data; done