$ sudo su - postgres
$ createuser <username>
ref: http://www.postgresql.org/docs/9.5/static/app-createuser.html
:root { | |
--c-apricot: #E2B49A; | |
--c-aquapoise: #7BD4CC; | |
--c-army: #7B895B; | |
--c-blackcherry: #39324B; | |
--c-blacksand: #3F3931; | |
--c-blush: #EABCAC; | |
--c-bondiblue: #2A93D5; | |
--c-borabora: #8BCBC8; | |
--c-botanica: #3F4234; |
$ sudo su - postgres
$ createuser <username>
ref: http://www.postgresql.org/docs/9.5/static/app-createuser.html
escape_quote() { | |
echo $1 | sed "s/'/''/g" | |
} | |
# Get commands to insert data to MySQL. | |
# | |
# Params: | |
# $1 -- table to insert data to | |
# $2 -- number of fields | |
# $3 -- data file (fields delimited by '\1', also note that '\1' after last |
# configure hadoop | |
wget http://public-repo-1.hortonworks.com/HDP/tools/2.0.6.0/hdp_manual_install_rpm_helper_files-2.0.6.76.tar.gz | |
tar xf hdp_manual_install_rpm_helper_files-2.0.6.76.tar.gz | |
(TODO ...) |
tlbuser=clay | |
totalgb=$(( 32*1024*1024*1024 )) | |
pgsize=$(( 2048*1024 )) | |
sysctl=/etc/sysctl.conf | |
limits=/etc/security/limits.conf | |
configured=`grep kernel.shmmax $sysctl` | |
if [ -n "$configured" ]; then | |
echo 'hugepage is configured' | |
exit | |
else |
sudo salt '*' cp.get_file salt://storm-0.9.0.1.tar.gz /tmp/storm-0.9.0.1.tar.gz | |
sudo salt '*' cmd.retcode 'tar zxf /tmp/storm-0.9.0.1.tar.gz -C /home/hadoop/apps/' 'runas=hadoop' | |
sudo salt '*' cp.get_file salt://storm.yaml /home/hadoop/apps/storm-0.9.0.1/conf/storm.yaml | |
sudo salt '*' file.chown /home/hadoop/apps/storm-0.9.0.1/conf/storm.yaml hadoop hadoop | |
sudo salt '*' file.mkdir /var/lib/storm | |
sudo salt '*' file.chown /var/lib/storm hadoop hadoop | |
# install storm native libraries | |
sudo salt '*' cp.get_file salt://zeromq-2.1.7.tar.gz /tmp/zeromq-2.1.7.tar.gz | |
sudo salt '*' cmd.retcode 'tar zxf /tmp/zeromq-2.1.7.tar.gz -C /tmp' |
sudo -u hdfs hadoop fs -mkdir /solr | |
sudo -u hdfs hadoop fs -chown solr /solr | |
solrctl init |
sudo chkconfig hadoop-hdfs-namenode on | |
sudo chkconfig hadoop-hdfs-datanode on | |
sudo chkconfig hadoop-yarn-resourcemanager on | |
sudo chkconfig hadoop-yarn-nodemanager on | |
sudo chkconfig hadoop-mapreduce-historyserver on |
JDK_VERSION=1.7.0_45 | |
sudo alternatives --install /usr/bin/java java /usr/java/jdk${JDK_VERSION}/jre/bin/java 200000 | |
sudo alternatives --install /usr/bin/javaws javaws /usr/java/jdk${JDK_VERSION}/jre/bin/javaws 200000 | |
sudo alternatives --install /usr/lib/mozilla/plugins/libjavaplugin.so libjavaplugin.so /usr/java/jdk${JDK_VERSION}/jre/lib/i386/libnpjp2.so 200000 | |
sudo alternatives --install /usr/lib64/mozilla/plugins/libjavaplugin.so libjavaplugin.so.x86_64 /usr/java/jdk${JDK_VERSION}/jre/lib/amd64/libnpjp2.so 200000 | |
sudo alternatives --install /usr/bin/javac javac /usr/java/jdk${JDK_VERSION}/bin/javac 200000 | |
sudo alternatives --install /usr/bin/jar jar /usr/java/jdk${JDK_VERSION}/bin/jar 200000 | |
sudo alternatives --install /usr/lib/jvm/java-1.7.0 java_sdk_1.7.0 /usr/java/jdk${JDK_VERSION} 200000 | |
sudo alternatives --install /usr/lib/jvm/jre-1.7.0 jre_1.7.0 /usr/java/jdk${JDK_VERSION}/jre 200000 |
# A code snippet that solve Exercise 3.3.1(b) of *Mining of Massive Datasets*. | |
def permute(items): | |
"""Iterate all permutations of a list of items.""" | |
length = len(items) | |
iternum = reduce(lambda x, y: x * y, range(1, length + 1)) | |
for i in range(iternum): | |
digs = fac_base_digits(i) |