Simple collection of Groovy scripts to help me maintain some Jenkins systems.
See also https://wiki.jenkins-ci.org/display/JENKINS/Jenkins+Script+Console
// Licensed under MIT | |
// author : Damien Nozay | |
// for all builds from build-flow-plugin whose parameters include a GIT_BRANCH paramater, | |
// change the displayName to include branch and build number | |
import com.cloudbees.plugins.flow.*; | |
jobs = Jenkins.instance.getAllItems(BuildFlow); | |
jobs.each { it -> | |
it.builds.each { b -> | |
GIT_BRANCH = b.envVars['GIT_BRANCH'] | |
( GIT_BRANCH =~ /(?:refs\/remotes\/)?(.+)/ ).each { full,branch -> | |
b.displayName = branch + ' (#' + b.number + ')' | |
} | |
} | |
} |
// Licensed under MIT | |
// author : Damien Nozay | |
// --------------------------------------------------------- | |
// This script goes through all the jobs and checks the disk usage. | |
// prints some disk usage stats as well as the retention policy. | |
// --------------------------------------------------------- | |
// e.g.: | |
// | |
// JOB: playground/test-python | |
// -> lastbuild: #1 = FAILURE, time: 2015-02-12T20:56:16Z | |
// -> builds=12, average=8 KB, max=9 KB, total=97 KB, worstCase=113 KB | |
// | |
// | |
def printDiscarder(job) { | |
d = job.buildDiscarder | |
if (d) { | |
println(" -> keep: builds=(${d.daysToKeep} days, ${d.numToKeep} total); artifacts=(${d.artifactDaysToKeep} days, ${d.artifactNumToKeep} total)") | |
} else { | |
println(" -> no retention policy.") | |
} | |
} | |
import hudson.plugins.disk_usage.BuildDiskUsageAction | |
import hudson.plugins.disk_usage.DiskUsageUtil | |
def getDiskUsage(build) { | |
usage = null | |
build.getTransientActions().each { | |
action -> | |
if (action instanceof BuildDiskUsageAction) { | |
// println action.buildUsageString | |
// println action.allDiskUsage | |
usage = action | |
} | |
} | |
return usage | |
} | |
def printDiskUsage(job) { | |
maxUsage = 0 | |
totalUsage = 0 | |
numBuilds = 0 | |
job.builds.each() { | |
build -> | |
usage = getDiskUsage(build) | |
if (usage.allDiskUsage > maxUsage) { maxUsage = usage.allDiskUsage } | |
totalUsage += usage.allDiskUsage | |
numBuilds += 1 | |
// println(" * build ${build.number} - ${usage.buildUsageString}") | |
} | |
averageUsage = 0 | |
if (numBuilds) { averageUsage = (totalUsage / numBuilds).longValue() } | |
worstCase = numBuilds * maxUsage | |
println(" -> builds=${numBuilds}, average=${DiskUsageUtil.getSizeString(averageUsage)}, max=${DiskUsageUtil.getSizeString(maxUsage)}, total=${DiskUsageUtil.getSizeString(totalUsage)}, worstCase=${DiskUsageUtil.getSizeString(worstCase)}") | |
} | |
jobs = Jenkins.instance.getAllItems() | |
jobs.each { j -> | |
if (j instanceof com.cloudbees.hudson.plugins.folder.Folder) { return } | |
numbuilds = j.builds.size() | |
println 'JOB: ' + j.fullName | |
if (numbuilds == 0) { | |
println ' -> no build' | |
} else { | |
lastbuild = j.builds[numbuilds - 1] | |
println ' -> lastbuild: ' + lastbuild.displayName + ' = ' + lastbuild.result + ', time: ' + lastbuild.timestampString2 | |
} | |
printDiscarder(j) | |
printDiskUsage(j) | |
println '' | |
} | |
'' |
Simple collection of Groovy scripts to help me maintain some Jenkins systems.
See also https://wiki.jenkins-ci.org/display/JENKINS/Jenkins+Script+Console
// author : Ahmed Mubbashir Khan | |
// --------------------------------------------------------- | |
// This script goes through all the jobs in a view, filters succesful and failed jobs seprately | |
// Then prints outs them along with the time they took | |
// --------------------------------------------------------- | |
import hudson.model.* | |
def str_view = "Pipeline Tests" | |
def view = Hudson.instance.getView(str_view) | |
def successfulJobs = view.getItems().findAll{job -> job.lastBuild != null && job.lastBuild.result == hudson.model.Result.SUCCESS} | |
def faildJobs = view.getItems().findAll{job -> job.lastBuild != null && job.lastBuild.result == hudson.model.Result.FAILURE} | |
def disabledJob = view.getItems().findAll{job -> job.disabled == true} | |
def enabledJob = view.getItems().findAll{job -> job.disabled != true} | |
println "Total jobs: " + view.getItems().size +" Successful: " +successfulJobs.size+ | |
" Failed: " + faildJobs.size + " Enabled jobs: " +enabledJob.size + " Disabled jobs: " +disabledJob.size | |
println "Current Successful job:" | |
successfulJobs.each{job -> printInfo(job)} | |
println "Current Fail job:" | |
faildJobs.each{job -> printInfo(job)} | |
println "Current disabled job:" | |
disabledJob.each{job -> printInfo(job)} | |
println "Current enabled job:" | |
enabledJob.each{job -> printInfo(job)} | |
def printInfo(job){ | |
println "Job: ${job.name} build on ${job.getAssignedLabelString()}, "+ | |
"took ${job.lastBuild.getDurationString()} to build, is disabled : ${job.disabled}" | |
} |
// author : Ahmed Mubbashir Khan | |
// Licensed under MIT | |
// --------------------------------------------------------- | |
// This script prints out information of last dwonstream job of Upstream job | |
// e.g. printInformationOfDownstreamJobs("ChangeListner", 11, "All Tests") | |
// will print all the downstream jobs invodked by ChangeListner build 11 in the view "All Tests" | |
// --------------------------------------------------------- | |
import hudson.model.* | |
//printInformationOfDownstreamJobs("ChangeListner", 11, "All Tests") | |
def printInformationOfDownstreamJobs(jobName, buildnumber, viewName){ | |
def build = Jenkins.getInstance().getItemByFullName(jobName).getBuildByNumber(buildnumber) | |
println "${build.fullDisplayName} ${build.getCause(hudson.model.Cause.UpstreamCause).upstreamRun}" | |
def cause_pattern = /.*${jobName}.*${buildnumber}.*/ | |
println "Cause pattern: ${cause_pattern}" | |
def view = Hudson.instance.getView(viewName) | |
def jobsByCause = view.getItems().findAll{job -> job.lastBuild != null && | |
job.lastBuild.getCause(hudson.model.Cause.UpstreamCause)!= null && | |
job.lastBuild.getCause(hudson.model.Cause.UpstreamCause).upstreamRun==~cause_pattern | |
} | |
jobsByCause.each{ d_build-> | |
// def d_build = job.lastBuild | |
println("Build: ${d_build.lastBuild.fullDisplayName}->"+ | |
"result: ${d_build.lastBuild.result}->${d_build.lastBuild.buildStatusSummary.message}, " + | |
"(was triggered by: ${d_build.lastBuild.getCause(hudson.model.Cause.UpstreamCause).upstreamRun})" ) | |
} | |
} | |
// Licensed under MIT | |
// author : Damien Nozay | |
// scan all jobs and check if the last build was aborted (e.g. maintenance) | |
// and output user / timestamp | |
jobs = Jenkins.instance.getAllItems() | |
jobs.each { j -> | |
if (j instanceof com.cloudbees.hudson.plugins.folder.Folder) { return } | |
numbuilds = j.builds.size() | |
if (numbuilds == 0) { return } | |
lastbuild = j.builds[numbuilds - 1] | |
if (lastbuild.result == Result.ABORTED) { | |
println 'JOB: ' + j.fullName | |
abortCause = lastbuild.getAction(InterruptedBuildAction).getCauses()[0] | |
println ' -> lastbuild: ' + lastbuild.displayName + ' = ' + lastbuild.result + ', cause: ' + abortCause.shortDescription + ', time: ' + lastbuild.timestampString2 | |
} | |
} | |
'' |
// Licensed under MIT | |
// author : Damien Nozay | |
// list jobs and their last build. | |
jobs = Jenkins.instance.getAllItems() | |
jobs.each { j -> | |
if (j instanceof com.cloudbees.hudson.plugins.folder.Folder) { return } | |
println 'JOB: ' + j.fullName | |
numbuilds = j.builds.size() | |
if (numbuilds == 0) { | |
println ' -> no build' | |
return | |
} | |
lastbuild = j.builds[numbuilds - 1] | |
println ' -> lastbuild: ' + lastbuild.displayName + ' = ' + lastbuild.result + ', time: ' + lastbuild.timestampString2 | |
} | |
// returns blank | |
'' |
// Licensed under MIT | |
// author : Ahmed Mubbashir Khan | |
// --------------------------------------------------------- | |
// This script goes through all the jobs and checks if they configured SCM is hudson.scm.NullSCM | |
// if they are, then prints it's info | |
// --------------------------------------------------------- | |
counter = 0 | |
jobs = Jenkins.instance.getAllItems() | |
for (job in jobs) { | |
if (job.scm instanceof hudson.scm.NullSCM){ | |
println "Job= '${counter++}' '${job.name}' scm '${job.scm}'" | |
} | |
} |
// Licensed under MIT | |
// author : Damien Nozay | |
// list jobs not run in the last N days / last N months | |
import groovy.time.TimeCategory | |
use ( TimeCategory ) { | |
// e.g. find jobs not run in last 3 months | |
sometimeago = (new Date() - 3.months) | |
} | |
jobs = Jenkins.instance.getAllItems() | |
lastabort = null | |
jobs.each { j -> | |
if (j instanceof com.cloudbees.hudson.plugins.folder.Folder) { return } | |
numbuilds = j.builds.size() | |
if (numbuilds == 0) { | |
println 'JOB: ' + j.fullName | |
println ' -> no build' | |
return | |
} | |
lastbuild = j.builds[numbuilds - 1] | |
if (lastbuild.timestamp.getTime() < sometimeago) { | |
println 'JOB: ' + j.fullName | |
println ' -> lastbuild: ' + lastbuild.displayName + ' = ' + lastbuild.result + ', time: ' + lastbuild.timestampString2 | |
} | |
} | |
'' |
// Licensed under MIT | |
// author : Damien Nozay | |
// --------------------------------------------------------- | |
// This script cleans a subdir in all existing workspaces for a selected job. | |
// node -> check workspace (concurrent too) -> check subdir -> delete | |
// --------------------------------------------------------- | |
job = Jenkins.instance.getItemByFullName('SomeJobFolder/myJob') | |
subdir = 'dist' | |
println "Looking for job: " + job.fullName | |
import hudson.slaves.WorkspaceList | |
combinator = System.getProperty(WorkspaceList.class.getName(),"@"); | |
for (node in Jenkins.instance.getNodes()) { | |
println "Node: '" + node.getSelfLabel().toString() + "' (" + node.getAssignedLabels().join(",") + ")" | |
workspacePathBase = node.getWorkspaceFor(job) | |
// handle concurrent workspaces | |
for (int i=1; ; i++) { | |
// they are suffixed... | |
workspacePath = i==1 ? workspacePathBase : workspacePathBase.withSuffix(combinator+i); | |
// stop checking (unlikely to have higher suffix) | |
if (!workspacePath.exists()) { | |
break; | |
} else { | |
println " * found workspace: " + workspacePath.getRemote() | |
targetDir = workspacePath.child(subdir) | |
if (targetDir.exists()) { | |
println " * found target directory" | |
if (!job.isBuilding()) { | |
println " * removing directory (job is not building)" | |
targetDir.deleteRecursive() | |
} else { | |
println " * not removing directory (job is building)" | |
} | |
} | |
} | |
} | |
} |
// Licensed under MIT | |
// author : Damien Nozay | |
// --------------------------------------------------------- | |
// Retroactively add badges to promoted builds. | |
// --------------------------------------------------------- | |
import hudson.plugins.promoted_builds.*; | |
import org.jvnet.hudson.plugins.groovypostbuild.*; | |
// customize these | |
project_name = "project/full/name" | |
promotion_name = "promotion_process_name" | |
// look up promoted builds for project + promotion process. | |
project = Jenkins.instance.getItemByFullName(project_name) | |
action = project.getAction(PromotedProjectAction.class) | |
promotion = action.getProcess(promotion_name) | |
promoted_builds = action.getPromotions(promotion) | |
// check this other gist: | |
// https://gist.github.com/dnozay/fc528b43cf27755017cc | |
def add_release_version(promo_build) { | |
target = promo_build.target; | |
// access the promotion build environment and the RELEASE_VERSION parameter. | |
release_version = promo_build.environment.get('RELEASE_VERSION'); | |
// create the summary with the gold star icon and attach to target. | |
GroovyPostbuildSummaryAction action = new GroovyPostbuildSummaryAction("star-gold.png"); | |
target.getActions().add(action); | |
// customize text for the summary. | |
action.appendText("RELEASE VERSION = " + release_version, false); | |
// also add a short text that will appear in the build history | |
target.getActions().add(GroovyPostbuildAction.createShortText(release_version)); | |
// save build | |
target.save(); | |
} | |
// do stuff; e.g. add release version in the description. | |
for (Promotion promo: promoted_builds) { | |
add_release_version(promo) | |
} |
// Licensed under MIT | |
// author : Damien Nozay | |
// --------------------------------------------------------- | |
// This script goes through all the jobs and checks if they are using the Groovy Postbuild | |
// if they are, then it computes the hash value, and checks against the ones that are approved. | |
// --------------------------------------------------------- | |
import org.jenkinsci.plugins.scriptsecurity.scripts.*; | |
import org.jenkinsci.plugins.scriptsecurity.sandbox.groovy.*; | |
import hudson.model.RootAction; | |
import org.jvnet.hudson.plugins.groovypostbuild.*; | |
// instance containing the approvals | |
// list of approved hashes: println instance.approvedScriptHashes | |
ScriptApproval instance = Jenkins.getInstance().getExtensionList(RootAction.class).get(ScriptApproval.class); | |
approvedScriptHashes = instance.approvedScriptHashes | |
import java.util.* | |
import java.security.MessageDigest; | |
def hash(String script, String language) { | |
MessageDigest digest = MessageDigest.getInstance("SHA-1"); | |
digest.update(language.getBytes("UTF-8")); | |
digest.update((byte) ':'); | |
digest.update(script.getBytes("UTF-8")); | |
return Util.toHexString(digest.digest()); | |
} | |
jobs = hudson.model.Hudson.instance.getAllItems(FreeStyleProject) | |
for (job in jobs) { | |
for (publisher in job.publishersList) { | |
if (publisher instanceof GroovyPostbuildRecorder) { | |
hashval = hash(publisher.script.script, "groovy") | |
println "#" * 80 | |
println "job: " + job.getFullName() | |
println "script hash: " + hashval | |
println "approved: " + (hashval in approvedScriptHashes) | |
println "script: " | |
println "-" * 80 | |
println publisher.script.script | |
println "#" * 80 | |
} | |
} | |
} |