This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
--- | |
# ^^^ YAML documents must begin with the document separator "---" | |
# | |
#### Example docblock, I like to put a descriptive comment at the top of my | |
#### playbooks. | |
# | |
# Overview: Playbook to bootstrap a new host for configuration management. | |
# Applies to: production | |
# Description: | |
# Ensures that a host is configured for management with Ansible. |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import win32serviceutil | |
import win32service | |
import win32event | |
import servicemanager | |
import socket | |
import time | |
import logging | |
logging.basicConfig( | |
filename = 'c:\\Temp\\hello-service.log', |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from org.apache.nifi.processors.script import ExecuteScript | |
from org.apache.nifi.processor.io import InputStreamCallback | |
from java.io import BufferedReader, InputStreamReader | |
class ReadFirstLine(InputStreamCallback) : | |
__line = None; | |
def __init__(self) : | |
pass |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# Set up spark configuration | |
conf = SparkConf().setMaster("yarn-client").setAppName("sparK-mer") | |
#conf = SparkConf().setMaster("local[16]").setAppName("sparK-mer") | |
conf.set("yarn.nodemanager.resource.cpu_vcores",args.C) | |
# Saturate with executors | |
conf.set("spark.executor.instances",executorInstances) | |
conf.set("spark.executor.heartbeatInterval","5s") | |
# cores per executor | |
conf.set("spark.executor.cores",args.E) | |
# set driver cores |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
%matplotlib inline | |
import pandas as pd | |
import numpy as np | |
from sklearn.ensemble import RandomForestClassifier | |
from sklearn.cross_validation import train_test_split | |
from sklearn.metrics import classification_report | |
from sklearn.metrics import confusion_matrix | |
from sklearn.metrics import accuracy_score | |
from sklearn.metrics import recall_score |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#from pyspark.sql import HiveContext | |
#sqlContext = HiveContext(sc) | |
query = """ | |
select * from db.sometable where col>50 | |
""" | |
results = sqlContext.sql(query) | |
result_writer = pyspark.sql.DataFrameWriter(results) | |
result_writer.saveAsTable('db.new_table_name',format='parquet', mode='overwrite',path='/path/to/new/data/files') |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
// Set logging level for spark scala | |
Logger.getLogger("org").setLevel(Level.WARN) | |
Logger.getLogger("akka").setLevel(Level.WARN) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# pyspark --packages com.databricks:spark-avro_2.10:1.0.0 | |
# read avro files from 1.3.0 spark | |
df = sqlCtx.load("/path/to/my_avro", "com.databricks.spark.avro") |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import scala.sys.process._ | |
//"ls -la".!! | |
val result = "ls -la".!! |
NewerOlder