Created
November 19, 2016 01:13
-
-
Save ASvyatkovskiy/a0d4b49586ee1c9401b566648a4a1280 to your computer and use it in GitHub Desktop.
Example of a stacked glyph superimposed with another glyph in histogrammar-scala with Bokeh
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
//Dark Matter with Spark | |
import org.dianahep.histogrammar._ | |
import org.dianahep.histogrammar.sparksql._ | |
import org.dianahep.histogrammar.bokeh._ | |
case class InfoVars(var runNum: Long, var lumiSec: Long, var evtNum: Long, var metfilter: Long, var scale1fb: Double, var evtWeight: Double, var pfmet: Double, var pfmetphi: Double, var puppet: Double, var puppetphi: Double, var fakepfmet: Double, var fakepfmetphi: Double, var fakepuppet: Double, var fakepuppetphi: Double) | |
case class GenEvtInfoVars(var genVPt: Double, var genVPhi: Double) | |
case class MuonVars(var pt: Double, var eta: Double, var phi: Double, var m: Double) | |
case class ElectronVars(var pt: Double, var eta: Double, var phi: Double, var m: Double) | |
case class TauVars(var pt: Double, var eta: Double, var phi: Double) | |
case class PhotonVars(var NLoose: Int, var NMedium: Int, var pt: Double, var eta: Double, var phi: Double) | |
case class JetVars(var N: Int, var NdR15: Int, var NbtagLdR15: Int, var pt: Double, var eta: Double, var phi: Double, var m: Double, var csv: Double, var CHF: Double, var NHF: Double, var NEMF: Double, var mindPhi: Double, var mindFPhi: Double) | |
case class VJetVars(var N: Int, var pt: Double, var eta: Double, var phi: Double, var m: Double, var csv: Double, var CHF: Double, var NHF: Double, var NEMF: Double, var tau21: Double, var tau32: Double, var msd: Double, var minsubcsv: Double, var maxsubcsv: Double) | |
case class AllVars(var infovars: InfoVars = null, var genevtinfovars: GenEvtInfoVars = null, var muonvars: MuonVars = null, var electronvars: ElectronVars = null, var tauvars: TauVars = null, var photonvars: PhotonVars = null, var jetvars: JetVars = null, var vjetvars: VJetVars = null) | |
val sqlContext = new org.apache.spark.sql.SQLContext(sc) | |
val dfMET = sqlContext.read.load("/user/csuarez/monoxbits/MET").as[AllVars] | |
val dfQCD = sqlContext.read.load("/user/csuarez/monoxbits/QCD").as[AllVars] | |
val dfWJetsToLNu = sqlContext.read.load("/user/csuarez/monoxbits/WJetsToLNu").as[AllVars] | |
val dfZJetsToNuNu = sqlContext.read.load("/user/csuarez/monoxbits/ZJetsToNuNu").as[AllVars] | |
val dfDYJetsToLL = sqlContext.read.load("/user/csuarez/monoxbits/DYJetsToLL").as[AllVars] | |
val dfGJets = sqlContext.read.load("/user/csuarez/monoxbits/GJets").as[AllVars] | |
val dfSingleTop = sqlContext.read.load("/user/csuarez/monoxbits/SingleTop").as[AllVars] | |
val dfTT = sqlContext.read.load("/user/csuarez/monoxbits/TT").as[AllVars] | |
val dfDiboson = sqlContext.read.load("/user/csuarez/monoxbits/Diboson").as[AllVars] | |
val puppetMET = dfMET.filter(x => (x.infovars != null)).map(x => x.infovars.puppet) | |
val puppetQCD = dfQCD.filter(x => (x.infovars != null)).map(x => x.infovars.puppet) | |
val puppetWJetsToLNu = dfWJetsToLNu.filter(x => (x.infovars != null)).map(x => x.infovars.puppet) | |
val puppetZJetsToNuNu = dfZJetsToNuNu.filter(x => (x.infovars != null)).map(x => x.infovars.puppet) | |
val puppetDYJetsToLL = dfDYJetsToLL.filter(x => (x.infovars != null)).map(x => x.infovars.puppet) | |
val puppetGJets = dfGJets.filter(x => (x.infovars != null)).map(x => x.infovars.puppet) | |
val puppetSingleTop = dfSingleTop.filter(x => (x.infovars != null)).map(x => x.infovars.puppet) | |
val puppetTT = dfTT.filter(x => (x.infovars != null)).map(x => x.infovars.puppet) | |
val puppetDiboson = dfDiboson.filter(x => (x.infovars != null)).map(x => x.infovars.puppet) | |
val histo = Histogram(10,0,1000,{x: Double => x}) | |
val final_histogramMET = puppetMET.rdd.aggregate(histo)(new Increment, new Combine) | |
val final_histogramQCD = puppetQCD.rdd.aggregate(histo)(new Increment, new Combine) | |
val final_histogramWJetsToLNu = puppetWJetsToLNu.rdd.aggregate(histo)(new Increment, new Combine) | |
val final_histogramZJetsToNuNu = puppetZJetsToNuNu.rdd.aggregate(histo)(new Increment, new Combine) | |
val final_histogramDYJetsToLL = puppetDYJetsToLL.rdd.aggregate(histo)(new Increment, new Combine) | |
val final_histogramGJets = puppetGJets.rdd.aggregate(histo)(new Increment, new Combine) | |
val final_histogramSingleTop = puppetSingleTop.rdd.aggregate(histo)(new Increment, new Combine) | |
val final_histogramTT = puppetTT.rdd.aggregate(histo)(new Increment, new Combine) | |
val final_histogramDiboson = puppetDiboson.rdd.aggregate(histo)(new Increment, new Combine) | |
val stack = Stack.build(final_histogramQCD,final_histogramWJetsToLNu,final_histogramZJetsToNuNu,final_histogramDYJetsToLL,final_histogramGJets,final_histogramSingleTop,final_histogramTT,final_histogramDiboson) | |
import io.continuum.bokeh._ | |
val stack_glyphs = stack.bokeh(glyphTypes=List("histogram","histogram","histogram","histogram","histogram","histogram","histogram","histogram"),glyphSizes=List(1,1,1,1,1,1,1,1),fillColors=List(Color.Blue,Color.Aqua,Color.Green,Color.Red,Color.Bisque,Color.Yellow,Color.Orange,Color.Brown),lineColors=List(Color.Blue,Color.Aqua,Color.Green,Color.Red,Color.Bisque,Color.Yellow,Color.Orange,Color.Brown)) | |
val met_glyph = final_histogramMET.bokeh(glyphType="circle",glyphSize=5,fillColor=Color.Black) | |
val plot_both = plot(stack_glyphs,met_glyph) | |
save(plot_both,"result.html") |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment