-
-
Save maxandersen/2088692be9e6f76a325b2c8c64a2ad9a to your computer and use it in GitHub Desktop.
Maven Project Statistics
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
///usr/bin/env jbang "$0" "$@" ; exit $? | |
//JDK 17+ | |
//DEPS io.quarkus:quarkus-bom:2.4.1.Final@pom | |
//DEPS io.quarkus:quarkus-picocli | |
//DEPS io.quarkus:quarkus-jackson | |
//DEPS io.quarkus:quarkus-smallrye-context-propagation | |
//DEPS com.fasterxml.jackson.dataformat:jackson-dataformat-xml:2.12.5 | |
//DEPS com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.12.5 | |
//DEPS org.zeroturnaround:zt-exec:1.12 | |
//DEPS commons-io:commons-io:2.11.0 | |
//Q:CONFIG quarkus.banner.enabled=false | |
//Q:CONFIG quarkus.log.level=WARN | |
//Q:CONFIG quarkus.log.console.format=%d{HH:mm:ss} %-5p %s%e%n | |
//Q:CONFIG quarkus.log.category."cli".level=${logging.level:INFO} | |
//Q:CONFIG quarkus.log.console.stderr=${logging.stderr:true} | |
//TODO find out why these properties do not work if set as Q:CONFIG | |
//JAVA_OPTIONS -Dmavenstats.parallel.enabled=${parallel.enabled:true} | |
//JAVA_OPTIONS -Dmavenstats.parallel.max-async=${parallel.max-async:0} | |
//JAVA_OPTIONS -Dmavenstats.parallel.max-queue=${parallel.max-queue:2048} | |
/* | |
Example use: | |
stats from: | |
- quarkus project | |
- only tags 2.4.x.Final (--tag-regex=...) | |
- only analyze modules: (--module=regex=...) | |
* core/... | |
* extensions/... | |
- reuse previously generated tag information (--no-override-existing-release) | |
j! -Dlogging.level=DEBUG mavenstats.java \ | |
--tag-regex='2\.4\..*\.Final' \ | |
--module-regex='(core|extensions)/.*' \ | |
--no-override-existing-release \ | |
--maven-settings=/usr/local/Cellar/maven/3.8.2/libexec/conf/settings.xml \ | |
--output=quarkus \ | |
~/Projects/Upstream/quarkus | |
*/ | |
import com.fasterxml.jackson.annotation.JsonInclude; | |
import com.fasterxml.jackson.annotation.JsonInclude.Include; | |
import com.fasterxml.jackson.databind.JsonNode; | |
import com.fasterxml.jackson.databind.ObjectMapper; | |
import com.fasterxml.jackson.databind.SerializationFeature; | |
import com.fasterxml.jackson.databind.cfg.MapperBuilder; | |
import com.fasterxml.jackson.databind.json.JsonMapper; | |
import com.fasterxml.jackson.dataformat.xml.XmlMapper; | |
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; | |
import io.quarkus.arc.log.LoggerName; | |
import io.quarkus.runtime.Quarkus; | |
import io.smallrye.config.ConfigMapping; | |
import java.io.IOException; | |
import java.io.OutputStream; | |
import java.nio.file.Files; | |
import java.nio.file.OpenOption; | |
import java.nio.file.Path; | |
import java.nio.file.StandardOpenOption; | |
import java.time.OffsetDateTime; | |
import java.util.ArrayList; | |
import java.util.Arrays; | |
import java.util.Comparator; | |
import java.util.HashMap; | |
import java.util.List; | |
import java.util.Map; | |
import java.util.Objects; | |
import java.util.concurrent.Callable; | |
import java.util.concurrent.ExecutionException; | |
import java.util.function.Consumer; | |
import java.util.regex.Pattern; | |
import java.util.stream.Collectors; | |
import java.util.stream.StreamSupport; | |
import javax.enterprise.context.ApplicationScoped; | |
import javax.enterprise.inject.Produces; | |
import javax.inject.Inject; | |
import javax.inject.Named; | |
import org.apache.commons.io.FilenameUtils; | |
import org.eclipse.microprofile.context.ManagedExecutor; | |
import org.jboss.logging.BasicLogger; | |
import org.jboss.logging.Logger; | |
import org.jboss.logging.Logger.Level; | |
import org.zeroturnaround.exec.ProcessExecutor; | |
import org.zeroturnaround.exec.stream.LogOutputStream; | |
import picocli.CommandLine; | |
/** | |
* app configuration | |
*/ | |
@ConfigMapping(prefix = "mavenstats") | |
interface MavenStatsConfig { | |
MavenStatsParallelExecutionConfig parallel(); | |
} | |
interface MavenStatsParallelExecutionConfig { | |
boolean enabled(); | |
int maxAsync(); | |
int maxQueue(); | |
} | |
/** | |
* CLI: receives, parses and validates user supplied parameters | |
*/ | |
@CommandLine.Command(mixinStandardHelpOptions = true, name = "mavenstats", version = "0.4.2") | |
public class mavenstats implements Runnable { | |
@LoggerName("cli.main") | |
Logger logger; | |
@CommandLine.Option( | |
names = {"-b", "--main-branch"}, paramLabel = "branch-name", defaultValue = "main", | |
description = "Main branch name (default: ${DEFAULT-VALUE})") | |
String mainBranch; | |
@CommandLine.Option( | |
names = {"--tag-regex"}, paramLabel = "tag-regex", defaultValue = "", | |
description = "Regular expression to filter tags (default: ${DEFAULT-VALUE})") | |
String tagRegex; | |
@CommandLine.Option( | |
names = {"--module-regex"}, paramLabel = "module-regex", defaultValue = "", | |
description = "Regular expression to filter list of modules (default: ${DEFAULT-VALUE})") | |
String moduleRegex; | |
@CommandLine.Option( | |
names = {"--maven-settings"}, paramLabel = "settings.xml", | |
description = "Use a different maven settings file") | |
Path mavenSettingsFile; | |
@CommandLine.Option( | |
names = {"-o", "--output"}, paramLabel = "output-dir", | |
description = "Folder to write output files (default: standard output)") | |
Path outputDir; | |
@CommandLine.Option( | |
names = {"--exclude-dependency-stats"}, | |
description = "Exclude stats about dependencies (default: false)") | |
boolean excludeDependencyStats; | |
@CommandLine.Option( | |
names = {"--exclude-file-stats"}, | |
description = "Exclude stats about files (default: false)") | |
boolean excludeFileStats; | |
@CommandLine.Option( | |
names = {"-E", "--no-each", | |
"--no-save-each-release"}, negatable = true, defaultValue = "true", | |
description = "Output a JSON file on each release. Only if output-dir set. (default: true)") | |
boolean saveEachRelease; | |
@CommandLine.Option( | |
names = {"-A", "--no-all", "--no-save-all-releases"}, negatable = true, defaultValue = "true", | |
description = "Output a JSON file with all releases (default: true)") | |
boolean saveAllReleases; | |
@CommandLine.Option( | |
names = {"--no-override-existing-release"}, negatable = true, defaultValue = "true", | |
description = "Override a previously generated JSON release file (if negated, load previous instead) (default: true)") | |
boolean overrideExistingRelease; | |
@CommandLine.Parameters( | |
index = "0", paramLabel = "project-dir", | |
description = "The root folder of the project") | |
Path projectDir; | |
@Inject | |
ProcessManager proc; | |
@Inject | |
MavenStatsAnalyzer analyzer; | |
@Override | |
public void run() { | |
// some validations to avoid possible mistakes | |
logger.debugf("project path: %s", projectDir); | |
if (!Files.isDirectory(projectDir)) { | |
logger.fatalf("project-dir is not a valid directory: %s", projectDir); | |
Quarkus.asyncExit(1); | |
return; | |
} | |
if (!Files.isDirectory(projectDir.resolve(".git"))) { | |
logger.fatalf("project doesn't contain a git repository"); | |
Quarkus.asyncExit(1); | |
return; | |
} | |
if (!proc.output(projectDir, "git", "status", "--porcelain=v1").trim().isEmpty()) { | |
logger.fatalf("project contains git uncommitted changes"); | |
Quarkus.asyncExit(1); | |
return; | |
} | |
if (outputDir != null) { | |
if (!Files.exists(outputDir)) { | |
try { | |
Files.createDirectories(outputDir); | |
} catch (IOException e) { | |
logger.fatalf("I/O error creating output directory: %s", e.getMessage()); | |
Quarkus.asyncExit(1); | |
return; | |
} | |
} | |
if (!Files.isDirectory(outputDir)) { | |
logger.fatalf("output-dir is not a valid directory: %s", projectDir); | |
Quarkus.asyncExit(1); | |
return; | |
} | |
} | |
analyzer.runAnalysis( | |
new Parameters( | |
mainBranch, tagRegex, moduleRegex, mavenSettingsFile, | |
!excludeDependencyStats, !excludeFileStats, | |
saveEachRelease, saveAllReleases, overrideExistingRelease, | |
outputDir, projectDir)); | |
} | |
} | |
/** | |
* User supplied parameters | |
*/ | |
record Parameters( | |
String mainBranch, String tagRegex, | |
String moduleRegex, Path mavenSettingsFile, | |
boolean includeDependencyStats, boolean includeFileStats, | |
boolean saveEachRelease, boolean saveAllReleases, boolean overrideExistingRelease, | |
Path outputDir, Path projectDir) { | |
} | |
/** | |
* Maven project stats | |
*/ | |
@ApplicationScoped | |
class MavenStatsAnalyzer { | |
public static final String COMMIT_DATE_REGEX = "(?m)^Date:\\s+(.*)$"; | |
public static final Pattern COMMIT_DATE_PATTERN = Pattern.compile(COMMIT_DATE_REGEX); | |
public static final String COMMIT_AUTHOR_REGEX = "(?m)^Author:\\s+(.*)\\s+<(.*)>$"; | |
public static final Pattern COMMIT_AUTHOR_PATTERN = Pattern.compile(COMMIT_AUTHOR_REGEX); | |
@LoggerName("cli.mavenstatsanalyzer") | |
Logger logger; | |
@Inject | |
ProcessManager proc; | |
@Inject | |
@Named("xmlMapper") | |
Mapper xml; | |
@Inject | |
@Named("jsonMapper") | |
Mapper json; | |
@Inject | |
@Named("cpuBoundExecutor") | |
ManagedExecutor executor; | |
@Inject | |
MavenStatsConfig config; | |
public void runAnalysis(Parameters params) { | |
logger.debugf("project analysis starting with configuration:"); | |
json.log(Level.DEBUG, params); | |
final var projectHistory = analyzeProjectHistory(params); | |
logger.debugf("project analysis completed"); | |
if (params.saveAllReleases()) { | |
if (params.outputDir() != null) { | |
json.write( | |
projectHistory, | |
params.outputDir().resolve("releases.json"), | |
StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING); | |
} else { | |
json.write( | |
projectHistory, | |
System.out); | |
} | |
} | |
} | |
private ProjectHistoryInfo analyzeProjectHistory(Parameters params) { | |
// switch to the main branch before pulling the list of tags | |
if (!proc.output(params.projectDir(), "git", "branch", "--show-current") | |
.trim() | |
.equals(params.mainBranch())) { | |
logger.debugf("project not in main branch. switching to: %s", params.mainBranch()); | |
proc.run(params.projectDir(), "git", "checkout", params.mainBranch(), "--quiet"); | |
} else { | |
logger.debugf("project already in main branch: %s", params.mainBranch()); | |
} | |
final var fullTagList = Arrays.asList( | |
proc.output(params.projectDir(), "git", "tag", "--list").split("\\s+")); | |
final var selectedTagList = fullTagList.stream() | |
.filter(tag -> params.tagRegex().isEmpty() || tag.matches(params.tagRegex())) | |
.sorted(TagComparator.INSTANCE) | |
.toList(); | |
logger.debugf("number of tags: %d (of %d)", selectedTagList.size(), fullTagList.size()); | |
logger.debug(selectedTagList); | |
// go over each tag detected | |
final var releases = new ArrayList<ProjectInfo>(); | |
for (String tag : selectedTagList) { | |
try { | |
final var outputReleaseFile = params.outputDir() != null | |
? params.outputDir().resolve("release.%s.json".formatted(tag.replaceAll("[/: \\t]", "_"))) | |
: null; | |
final ProjectInfo projectInfo; | |
if (!params.overrideExistingRelease() | |
&& outputReleaseFile != null && Files.isRegularFile(outputReleaseFile)) { | |
logger.debugf("loading already existing tag: %s", tag); | |
projectInfo = json.readAs(ProjectInfo.class, outputReleaseFile, StandardOpenOption.READ); | |
} else { | |
projectInfo = analyzeProjectTag(params, tag); | |
if (params.saveEachRelease() && params.outputDir() != null) { | |
json.write( | |
projectInfo, | |
params.outputDir().resolve("release.%s.json".formatted(tag)), | |
StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING); | |
} | |
} | |
releases.add(projectInfo); | |
} catch (RuntimeException e) { | |
logger.warnf("skipping tag: %s, unexpected exception: %s", tag, e.getMessage()); | |
} | |
} | |
return new ProjectHistoryInfo( | |
selectedTagList.size(), | |
releases); | |
} | |
private ProjectInfo analyzeProjectTag(Parameters params, String tag) { | |
logger.infof("checking out tag: %s", tag); | |
proc.run(params.projectDir(), "git", "checkout", tag, "--quiet"); | |
return analyzeProjectWorkingTree(params, tag); | |
} | |
private ProjectInfo analyzeProjectWorkingTree(Parameters params, String releaseTag) { | |
logger.debugf("querying last commit"); | |
final var commitContent = proc.output( | |
params.projectDir(), "git", "log", "HEAD~1..HEAD", "--date=iso8601-strict"); | |
final var commitDateMatcher = COMMIT_DATE_PATTERN.matcher(commitContent); | |
final var commitDate = commitDateMatcher.find() | |
? OffsetDateTime.parse(commitDateMatcher.group(1)) | |
: null; | |
logger.debugf("commit date: %s", commitDate); | |
final var commitAuthorMatcher = COMMIT_AUTHOR_PATTERN.matcher(commitContent); | |
final String commitAuthorName; | |
final String commitAuthorEmail; | |
if (commitAuthorMatcher.find()) { | |
commitAuthorName = commitAuthorMatcher.group(1); | |
commitAuthorEmail = commitAuthorMatcher.group(2); | |
} else { | |
commitAuthorName = null; | |
commitAuthorEmail = null; | |
} | |
logger.debugf("commit author: %s <%s>", commitAuthorName, commitAuthorEmail); | |
// find modules | |
final var fullModuleList = | |
extractFullModuleList(params.projectDir().resolve("pom.xml")); | |
final var selectedModuleList = fullModuleList.stream() | |
.filter(module -> params.moduleRegex().isEmpty() || module.matches(params.moduleRegex())) | |
.toList(); | |
logger.debugf("number of modules: %d (of %d)", selectedModuleList.size(), | |
fullModuleList.size()); | |
final List<ModuleInfo> moduleInfoList; | |
if (config.parallel().enabled()) { | |
try { | |
moduleInfoList = executor.invokeAll( | |
selectedModuleList.stream() | |
.map( | |
module -> ((Callable<ModuleInfo>) (() -> analyzeProjectModule(params, module)))) | |
.toList()) | |
.stream() | |
.map(moduleInfoF -> { | |
try { | |
return moduleInfoF.get(); | |
} catch (InterruptedException | ExecutionException e) { | |
throw new RuntimeException(e.getMessage(), e); | |
} | |
}).toList(); | |
} catch (InterruptedException | RuntimeException e) { | |
logger.warnf("unexpected interruption: %s", e.getMessage()); | |
throw new RuntimeException(e.getMessage(), e); | |
} | |
} else { | |
moduleInfoList = selectedModuleList.stream() | |
.map(module -> analyzeProjectModule(params, module)) | |
.toList(); | |
} | |
final var totalAnalyze = moduleInfoList.stream() | |
.mapToLong(info -> info.analyzeDuration()).sum(); | |
final var uniqueDependencyCount = (params.includeDependencyStats()) | |
? moduleInfoList.stream() | |
.filter(info -> info.dependencies() != null) | |
.flatMap(info -> info.dependencies().stream()) | |
.collect(Collectors.toSet()) | |
.size() | |
: null; | |
final var fileCount = (params.includeFileStats()) | |
? moduleInfoList.stream() | |
.filter(info -> info.fileCount() != null) | |
.mapToLong(ModuleInfo::fileCount) | |
.sum() | |
: null; | |
final var fileCountByExtension = (params.includeFileStats()) | |
? moduleInfoList.stream() | |
.map(ModuleInfo::fileCountByExtension) | |
.filter(Objects::nonNull) | |
.reduce( | |
new HashMap<>(), | |
(a, b) -> { | |
final var result = new HashMap<>(a); | |
b.forEach((ext, count) -> result.merge(ext, count, Long::sum)); | |
return result; | |
}) | |
: null; | |
return new ProjectInfo( | |
releaseTag, | |
commitDate, | |
commitAuthorName, | |
commitAuthorEmail, | |
uniqueDependencyCount, | |
fileCount, | |
fileCountByExtension, | |
selectedModuleList.size(), | |
moduleInfoList, | |
totalAnalyze); | |
} | |
private ModuleInfo analyzeProjectModule(Parameters params, String module) { | |
logger.debugf("analyzing module: %s", module); | |
try { | |
var startTime = System.currentTimeMillis(); | |
final List<DependencyInfo> dependencies; | |
if (params.includeDependencyStats()) { | |
final var effectivePomPath = Files.createTempFile( | |
"mavenstats.%s.".formatted(module.replaceAll("[/: \\t]", "_")), | |
".effective-pom.xml"); | |
final var command = new ArrayList<>(List.of("mvn", "help:effective-pom")); | |
if (params.mavenSettingsFile() != null) | |
command.add("--settings=%s".formatted(params.mavenSettingsFile())); | |
command.addAll( | |
List.of( | |
"--quiet", | |
"--projects=%s".formatted(module), | |
"-Doutput=%s".formatted(effectivePomPath))); | |
proc.run(params.projectDir(), command); | |
final var projectNode = xml.read( | |
effectivePomPath, | |
StandardOpenOption.READ, | |
StandardOpenOption.DELETE_ON_CLOSE); | |
dependencies = extractDependencies(projectNode); | |
logger.debugf("module: %s, dependencies: %d", module, dependencies.size()); | |
} else { | |
dependencies = null; | |
} | |
Long fileCount; | |
final Map<String, Long> fileCountByExtension; | |
if (params.includeFileStats()) { | |
fileCountByExtension = Files.find( | |
params.projectDir().resolve(module), | |
Integer.MAX_VALUE, | |
(file, attrs) -> attrs.isRegularFile()) | |
.collect( | |
Collectors.groupingBy( | |
path -> FilenameUtils.getExtension(path.toString()), | |
Collectors.counting())); | |
fileCount = fileCountByExtension.values().stream().mapToLong(Long::longValue).sum(); | |
logger.debugf("module: %s, files: %d", module, fileCount); | |
} else { | |
fileCountByExtension = null; | |
fileCount = null; | |
} | |
long duration = (System.currentTimeMillis() - startTime) / 1000; | |
return new ModuleInfo( | |
module, | |
dependencies != null ? dependencies.size() : null, | |
dependencies, | |
fileCount, | |
fileCountByExtension, | |
duration | |
); | |
} catch (IOException e) { | |
logger.warnf("unexpected I/O exception: %s", e.getMessage()); | |
throw new RuntimeException(e.getMessage(), e); | |
} | |
} | |
private List<DependencyInfo> extractDependencies(JsonNode projectNode) { | |
final var dependenciesNode = projectNode.path("dependencies").path("dependency"); | |
if (dependenciesNode.isMissingNode() || !dependenciesNode.isArray()) { | |
return List.of(); | |
} | |
return StreamSupport.stream(dependenciesNode.spliterator(), false) | |
.map(dependencyNode -> new DependencyInfo( | |
dependencyNode.path("groupId").textValue(), | |
dependencyNode.path("artifactId").textValue(), | |
dependencyNode.path("version").textValue(), | |
dependencyNode.path("scope").textValue())) | |
.toList(); | |
} | |
private List<String> extractFullModuleList(Path parentPom) { | |
final var modules = extractModuleList(parentPom); | |
return modules != null ? modules : List.of(); | |
} | |
private List<String> extractModuleList(Path pom) { | |
final var projectNode = xml.read(pom, StandardOpenOption.READ); | |
// is this a pom? | |
final var packaging = projectNode.path("packaging").textValue(); | |
if (!"pom".equals(packaging)) { | |
return null; // leaf node | |
} | |
final var modulesNode = projectNode.path("modules").path("module"); | |
if (!modulesNode.isArray()) { | |
return List.of(); // unexpected, but no leaves | |
} | |
final var modules = | |
StreamSupport.stream(modulesNode.spliterator(), false) | |
.map(JsonNode::textValue) | |
.toList(); | |
final var result = new ArrayList<String>(); | |
for (final var module : modules) { | |
final var submodules = | |
extractModuleList(pom.getParent().resolve(module).resolve("pom.xml")); | |
if (submodules != null) { | |
result.addAll( | |
submodules.stream() | |
.map(submodule -> "%s/%s".formatted(module, submodule)) | |
.toList()); | |
} else { | |
result.add(module); | |
} | |
} | |
return result; | |
} | |
} | |
class TagComparator implements Comparator<String> { | |
public static TagComparator INSTANCE = new TagComparator(); | |
@Override | |
public int compare(String tag1, String tag2) { | |
if (Objects.equals(tag1, tag2)) { | |
return 0; | |
} | |
return Arrays.compare( | |
(tag1 != null) ? tag1.split("\\.") : null, | |
(tag2 != null) ? tag2.split("\\.") : null, | |
TagPartComparator.INSTANCE); | |
} | |
} | |
class TagPartComparator implements Comparator<String> { | |
public static TagPartComparator INSTANCE = new TagPartComparator(); | |
@Override | |
public int compare(String part1, String part2) { | |
//noinspection StringEquality | |
if (part1 == part2) { | |
return 0; | |
} | |
if (part1 == null || part2 == null) { | |
return part1 == null ? -1 : 1; | |
} | |
try { | |
return Integer.compare(Integer.parseInt(part1), Integer.parseInt(part2)); | |
} catch (NumberFormatException e) { | |
return part1.compareTo(part2); | |
} | |
} | |
} | |
@JsonInclude(Include.NON_NULL) | |
record ProjectHistoryInfo( | |
int tagCount, | |
List<ProjectInfo> releases | |
) { | |
} | |
@JsonInclude(Include.NON_NULL) | |
record ProjectInfo( | |
String releaseTag, | |
OffsetDateTime releaseDate, | |
String authorName, | |
String authorEmail, | |
Integer uniqueDependencyCount, | |
Long fileCount, | |
Map<String, Long> fileCountByExtension, | |
int moduleCount, | |
List<ModuleInfo> modules, | |
Long totalAnalyzeDuration | |
) { | |
} | |
@JsonInclude(Include.NON_NULL) | |
record ModuleInfo( | |
String module, | |
Integer dependencyCount, | |
List<DependencyInfo> dependencies, | |
Long fileCount, | |
Map<String, Long> fileCountByExtension, | |
long analyzeDuration | |
) { | |
} | |
@JsonInclude(Include.NON_NULL) | |
record DependencyInfo( | |
String groupId, | |
String artifactId, | |
String version, | |
String scope | |
) { | |
} | |
/** | |
* Executor factory used to run system jobs | |
*/ | |
@SuppressWarnings("unused") | |
@ApplicationScoped | |
class ManagedExecutorFactory { | |
@LoggerName("cli.managedexecutorfactory") | |
Logger logger; | |
@Inject | |
MavenStatsConfig config; | |
@Produces | |
@Named("cpuBoundExecutor") | |
public ManagedExecutor executor() { | |
if (config != null) { | |
final int parallelMaxAsyncResolved = config.parallel().maxAsync() <= 0 | |
? (Runtime.getRuntime().availableProcessors() * 2 / 3) | |
: Math.max(config.parallel().maxAsync(), Runtime.getRuntime().availableProcessors() * 2); | |
logger.debugf( | |
"cpu bound executor max async size: %d, max queue size: %d", | |
parallelMaxAsyncResolved, config.parallel().maxQueue()); | |
return ManagedExecutor.builder() | |
.maxAsync(parallelMaxAsyncResolved) | |
.maxQueued(config.parallel().maxQueue()) | |
.build(); | |
} | |
throw new IllegalStateException("no configuration received yet"); | |
} | |
} | |
/** | |
* XML and JSON I/O with Jackson Mappers | |
*/ | |
@SuppressWarnings("unused") | |
@ApplicationScoped | |
class ObjectMapperFactory { | |
@LoggerName("cli.objectmapperfactory") | |
Logger logger; | |
private ObjectMapper jsonObjectMapper() { | |
return objectMapper(JsonMapper.builder()); | |
} | |
@Produces | |
@ApplicationScoped | |
@Named("jsonMapper") | |
public Mapper jsonMapper() { | |
return new Mapper(jsonObjectMapper(), logger); | |
} | |
private ObjectMapper xmlObjectMapper() { | |
return objectMapper(XmlMapper.builder()); | |
} | |
@Produces | |
@ApplicationScoped | |
@Named("xmlMapper") | |
public Mapper xmlMapper() { | |
return new Mapper(xmlObjectMapper(), logger); | |
} | |
private <M extends ObjectMapper, B extends MapperBuilder<M, B>> M objectMapper(B builder) { | |
final var mapper = builder | |
.disable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS) | |
.enable(SerializationFeature.INDENT_OUTPUT) | |
.build(); | |
mapper.registerModule(new JavaTimeModule()); | |
return mapper; | |
} | |
} | |
/** | |
* Jackson ObjectMapper wrapper with I/O extension methods | |
*/ | |
@SuppressWarnings("ClassCanBeRecord") | |
class Mapper { | |
private final ObjectMapper objectMapper; | |
private final Logger logger; | |
@Inject | |
Mapper(ObjectMapper objectMapper, Logger logger) { | |
this.objectMapper = objectMapper; | |
this.logger = logger; | |
} | |
public JsonNode read(Path file, OpenOption... options) { | |
try (final var in = Files.newInputStream(file, options)) { | |
return objectMapper.readTree(in); | |
} catch (IOException e) { | |
logger.warnf( | |
"unexpected I/O exception when reading: %s. message: %s", file, e.getMessage()); | |
throw new RuntimeException(e.getMessage(), e); | |
} | |
} | |
public <T> T readAs(Class<T> type, Path file, OpenOption... options) { | |
try (final var in = Files.newInputStream(file, options)) { | |
return objectMapper.readValue(in, type); | |
} catch (IOException e) { | |
logger.warnf( | |
"unexpected I/O exception when reading: %s. message: %s", file, e.getMessage()); | |
throw new RuntimeException(e.getMessage(), e); | |
} | |
} | |
public void write(Object node, Path file, OpenOption... options) { | |
try (final var out = Files.newOutputStream(file, options)) { | |
write(node, out); | |
} catch (IOException e) { | |
logger.warnf( | |
"unexpected I/O exception when writing: %s. message: %s", file, e.getMessage()); | |
throw new RuntimeException(e.getMessage(), e); | |
} | |
} | |
public void write(Object node, OutputStream out) { | |
try { | |
objectMapper.writeValue(out, node); | |
} catch (IOException e) { | |
logger.warnf("unexpected I/O exception when writing. message: %s", e.getMessage()); | |
throw new RuntimeException(e.getMessage(), e); | |
} | |
} | |
public void log(Level level, Object node) { | |
try (final var out = new JBossLogOutputStream(logger, level)) { | |
write(node, out); | |
} catch (IOException e) { | |
logger.warnf("unexpected I/O exception when logging. message: %s", e.getMessage()); | |
throw new RuntimeException(e.getMessage(), e); | |
} | |
} | |
} | |
/** | |
* System process management | |
*/ | |
@SuppressWarnings("unused") | |
@ApplicationScoped | |
class ProcessManager { | |
@LoggerName("cli.procmanager") | |
Logger logger; | |
public void run(Path workingDir, String... command) { | |
run(workingDir, List.of(command)); | |
} | |
public void run(Path workingDir, List<String> command) { | |
try { | |
run(pe -> | |
pe.directory(workingDir != null ? workingDir.toFile() : null) | |
.command(command)); | |
} catch (NonZeroExitCodeRuntimeException e) { | |
throw new NonZeroExitCodeRuntimeException(e.exitCode, command); | |
} | |
} | |
public void run(Consumer<ProcessExecutor> processExecutorConfigurator) { | |
final var exitCode = exitCode(processExecutorConfigurator); | |
if (exitCode != 0) { | |
throw new NonZeroExitCodeRuntimeException(exitCode); | |
} | |
} | |
public int exitCode(Path workingDir, String... command) { | |
return exitCode(workingDir, List.of(command)); | |
} | |
public int exitCode(Path workingDir, List<String> command) { | |
return exitCode(pe -> | |
pe.directory(workingDir != null ? workingDir.toFile() : null) | |
.command(command)); | |
} | |
public int exitCode(Consumer<ProcessExecutor> processExecutorConfigurator) { | |
final var executor = new ProcessExecutor(); | |
processExecutorConfigurator.accept(executor); | |
return invoke(() -> | |
executor.redirectOutput(loggerOutputStream()) | |
.execute() | |
.getExitValue()); | |
} | |
public String output(Path workingDir, String... command) { | |
return output(workingDir, List.of(command)); | |
} | |
public String output(Path workingDir, List<String> command) { | |
return output(pe -> | |
pe.directory(workingDir != null ? workingDir.toFile() : null) | |
.command(command)); | |
} | |
public String output(Consumer<ProcessExecutor> processExecutorConfigurator) { | |
final var executor = new ProcessExecutor(); | |
processExecutorConfigurator.accept(executor); | |
return invoke(() -> | |
executor.readOutput(true) | |
.execute() | |
.outputUTF8()); | |
} | |
private <V> V invoke(Callable<V> job) { | |
try { | |
return job.call(); | |
} catch (Exception e) { | |
logger.errorf(e, e.getMessage()); | |
throw new RuntimeException(e.getMessage(), e); | |
} | |
} | |
private OutputStream loggerOutputStream() { | |
return new JBossLogOutputStream(logger, Level.DEBUG); | |
} | |
} | |
class NonZeroExitCodeRuntimeException extends RuntimeException { | |
final int exitCode; | |
final List<String> command; | |
public NonZeroExitCodeRuntimeException(int exitCode, List<String> command) { | |
super( | |
"command failed. exit code: %d. command line: %s" | |
.formatted(exitCode, command)); | |
this.exitCode = exitCode; | |
this.command = command; | |
} | |
public NonZeroExitCodeRuntimeException(int exitCode) { | |
this(exitCode, null); | |
} | |
} | |
/** | |
* Bridge for APIs that require an OutputStream, to send output to JBoss logger | |
*/ | |
class JBossLogOutputStream extends LogOutputStream { | |
final BasicLogger logger; | |
final Level level; | |
JBossLogOutputStream(BasicLogger logger, Level level) { | |
this.logger = logger; | |
this.level = level; | |
} | |
@Override | |
protected void processLine(String line) { | |
logger.log(level, line); | |
} | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment