Skip to content

Instantly share code, notes, and snippets.

View gbraccialli's full-sized avatar

Gui Braccialli gbraccialli

View GitHub Profile
function startWait(){
curl -s -u $user:$pass -H 'X-Requested-By: ambari' -X PUT -d \
'{"RequestInfo": {"context" :"Start '"$1"' via REST"}, "Body": {"ServiceInfo": {"state": "STARTED"}}}' \
http://$host/api/v1/clusters/$cluster/services/$1
wait $1 "STARTED"
}
function stopWait(){
curl -s -u $user:$pass -H 'X-Requested-By: ambari' -X PUT -d \
'{"RequestInfo": {"context" :"Stop '"$1"' via REST"}, "Body": {"ServiceInfo": {"state": "INSTALLED"}}}' \
import sys
import requests
import json
import argparse
import pprint
import time
import datetime
import pysolr
rmServer = "seregiondev01.cloud.hortonworks.com"
CREATE USER 'hive'@'localhost' IDENTIFIED BY 'pwd';
CREATE USER 'hive'@'hostxxx' IDENTIFIED BY 'pwd';
GRANT ALL ON hive.* TO 'hive'@'localhost';
GRANT ALL ON hive.* TO 'hive'@'hostxx';
flush privileges;
CREATE USER 'root'@'%';
GRANT ALL PRIVILEGES ON *.* to 'root'@'%' WITH GRANT OPTION;
SET PASSWORD FOR 'root'@'%' = PASSWORD('hortonworks');
SET PASSWORD = PASSWORD('hortonworks');
using System;
using System.Net;
using System.Collections.Generic;
using Newtonsoft.Json;
using System.Text;
namespace ConsoleApplication1
{
class PhoenixSample
{
//spark-shell --packages sramirez:spark-infotheoretic-feature-selection:1.1,sramirez:spark-MDLP-discretization:1.0
import org.apache.spark.SparkContext._
import org.apache.spark.mllib.linalg.Vectors
import org.apache.spark.mllib.regression.LabeledPoint
import org.apache.spark.mllib.util.MLUtils
import org.apache.spark.mllib.feature._
import org.apache.spark.mllib.feature.MDLPDiscretizer
import org.apache.spark.mllib.classification.{LogisticRegressionWithLBFGS, LogisticRegressionModel}
import org.apache.spark.mllib.evaluation.MulticlassMetrics
#!/bin/sh -
if [ "$(id -ru)" != 0 ]; then
printf >&2 'Error: this installer needs the ability to run commands as root.\n'
printf >&2 'Install as root or with sudo\n'
exit 1
fi
my_disable_thp() {
( cat > /usr/local/sbin/ambari-thp-disable.sh <<-'EOF'
#export below to allow nohup
#export HADOOP_CLIENT_OPTS="$HADOOP_CLIENT_OPTS -Djline.terminal=jline.UnsupportedTerminal"
beeline -u "jdbc:hive2://localhost:10000" -n admin "$@"
#export PHOENIX=`find /usr/hdp/current/phoenix-client/lib/ -name "*.jar" | xargs ls -m | tr -d '\n'`
#echo $PHOENIX
export $1=`find $2 -name "*.jar" | xargs ls -m | tr -d '\n'`
yarn application -list | grep application_ | cut -f1 > tmpyarnkill.txt
while read LINE
do
echo "killing $LINE"
yarn application -kill $LINE
done < tmpyarnkill.txt
rm tmpyarnkill.txt
/usr/hdp/current/phoenix-client/bin/sqlline.py localhost:2181:/hbase-unsecure "$@"