featurePacks, ProvisioningManag
private void provisionServer(Path home, Path outputProvisioningFile, Path workDir) throws ProvisioningException,
MojoExecutionException, IOException, XMLStreamException {
- try (ProvisioningManager pm = ProvisioningManager.builder().addArtifactResolver(artifactResolver)
+ GalleonBuilder provider = new GalleonBuilder();
+ provider.addArtifactResolver(artifactResolver);
+
+ // Prior to build the config, sub classes could have to inject content to the config according to the
+ // provisioned FP.
+ normalizeFeaturePackList();
+ ConfigId defaultConfig = willProvision(featurePacks, provider);
+ GalleonProvisioningConfig config = buildGalleonConfig(provider, defaultConfig);
+ try (Provisioning pm = provider.newProvisioningBuilder(config)
.setInstallationHome(home)
.setMessageWriter(new MvnMessageWriter(getLog()))
.setLogTime(logTime)
.setRecordState(recordState)
.build()) {
-
- // Prior to build the config, sub classes could have to inject content to the config according to the
- // provisioned FP.
- normalizeFeaturePackList();
- ConfigId defaultConfig = willProvision(featurePacks, pm);
- ProvisioningConfig config = buildGalleonConfig(pm, defaultConfig).buildConfig();
IoUtils.recursiveDelete(home);
getLog().info("Building server based on " + config.getFeaturePackDeps() + " galleon feature-packs");
MavenUpgrade mavenUpgrade = null;
+ GalleonProvisioningConfig newConfig;
if (isChannelsProvisioning()) {
if (!overriddenServerArtifacts.isEmpty()) {
throw new MojoExecutionException("overridden-server-artifacts can't be configured when channels are configured.");
}
+ newConfig = config;
} else {
- mavenUpgrade = new MavenUpgrade(this, config, pm);
+ mavenUpgrade = new MavenUpgrade(this, pm, config);
// Dump artifacts
if (dumpOriginalArtifacts) {
Path file = workDir.resolve("bootable-jar-server-original-artifacts.xml");
getLog().info("Dumping original Maven artifacts in " + file);
mavenUpgrade.dumpArtifacts(file);
}
- config = mavenUpgrade.upgrade();
+ newConfig = mavenUpgrade.upgrade();
}
// store provisioning.xml
- try(FileWriter writer = new FileWriter(outputProvisioningFile.toFile())) {
- ProvisioningXmlWriter.getInstance().write(config, writer);
- }
+ pm.storeProvisioningConfig(newConfig, outputProvisioningFile);
try {
- MavenUpgrade fmavenUpgrade = mavenUpgrade;
- scannedArtifacts = BootableJarSupport.scanArtifacts(pm, config, new ArtifactLog() {
- @Override
- public void info(FPID fpid, MavenArtifact a) {
- getLog().info("Found artifact " + a + " in " + (fmavenUpgrade == null ? fpid : fmavenUpgrade.getMavenFeaturePack(fpid)));
- }
-
- @Override
- public void debug(FPID fpid, MavenArtifact a) {
- AbstractBuildBootableJarMojo.this.debug("Found patching artifact %s in %s", a, (fmavenUpgrade == null ? fpid : fmavenUpgrade.getMavenFeaturePack(fpid)));
- }
- });
+ scannedArtifacts = BootableJarSupport.scanArtifacts(pm, newConfig, new MvnMessageWriter(getLog()));
} catch (Exception ex) {
throw new MojoExecutionException(ex);
}
- PluginProgressTracker.initTrackers(pm, getLog());
- ProvisioningRuntime rt = pm.getRuntime(config);
- pm.provision(rt.getLayout());
+ PluginProgressTracker.initTrackers(pm, new MavenJBossLogger(getLog()));
+ pm.provision(newConfig);
if (!recordState) {
Path file = home.resolve(PLUGIN_PROVISIONING_FILE);
- try (FileWriter writer = new FileWriter(file.toFile())) {
- ProvisioningXmlWriter.getInstance().write(config, writer);
- }
+ pm.storeProvisioningConfig(newConfig, file);
}
-
}
}
@@ -1233,7 +1223,7 @@ private void buildJar(Path contentDir, Path jarFile) throws MojoExecutionExcepti
}
ZipUtils.unzip(rtJarFile, contentDir);
updateManifest(contentDir);
- BootableJarSupport.zip(contentDir, jarFile);
+ ZipUtils.zip(contentDir, jarFile);
}
private void updateManifest(Path target) throws IOException {
@@ -1338,7 +1328,7 @@ public FileVisitResult postVisitDirectory(Path dir, IOException e)
}
}
- Path resolveMaven(ArtifactCoordinate coordinate) throws MavenUniverseException {
+ Path resolveMaven(GalleonArtifactCoordinate coordinate) throws MavenUniverseException {
final MavenArtifact artifact = new MavenArtifact()
.setGroupId(coordinate.getGroupId())
.setArtifactId(coordinate.getArtifactId())
diff --git a/plugin/src/main/java/org/wildfly/plugins/bootablejar/maven/goals/BootLoggingConfiguration.java b/plugin/src/main/java/org/wildfly/plugins/bootablejar/maven/goals/BootLoggingConfiguration.java
deleted file mode 100644
index e6df4522..00000000
--- a/plugin/src/main/java/org/wildfly/plugins/bootablejar/maven/goals/BootLoggingConfiguration.java
+++ /dev/null
@@ -1,1089 +0,0 @@
-/*
- * JBoss, Home of Professional Open Source.
- *
- * Copyright 2020 Red Hat, Inc., and individual contributors
- * as indicated by the @author tags.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.wildfly.plugins.bootablejar.maven.goals;
-
-import java.io.BufferedWriter;
-import java.io.IOException;
-import java.io.Writer;
-import java.nio.charset.StandardCharsets;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Properties;
-import java.util.TreeMap;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-import java.util.stream.Collectors;
-import javax.inject.Named;
-
-import org.jboss.as.controller.client.ModelControllerClient;
-import org.jboss.as.controller.client.helpers.ClientConstants;
-import org.jboss.as.controller.client.helpers.Operations;
-import org.jboss.dmr.ModelNode;
-import org.jboss.dmr.ModelType;
-import org.jboss.dmr.Property;
-
-/**
- * Generates a new {@code logging.properties} file based on the logging subsystem model.
- *
- *
- * This should be considered a hack which generates a {@code logging.properties} file. The generated file will not
- * necessarily be identical to that of which WildFly generates. Expressions will be written to the generated file. For
- * this reason a new file will be generated which the entry point needs to load as system properties before the log
- * manager is configured.
- *
- *
- *
- * Also handlers, formatters and filters considered explicit will not be configured at boot. As they are not used by
- * another resource this should not be an issue. Once the logging subsystems runtime phase is executed these resources
- * will be initialized.
- *
- *
- *
- * The generated file cannot support log4j appenders created as custom-handlers. Boot errors will
- * occur if this happens.
- *
- *
- * @author James R. Perkins
- */
-@Named
-// @TODO, we can't use AbstractLogEnabled, it is not in the maven plugin classloader.
-public class BootLoggingConfiguration { //extends AbstractLogEnabled {
-
- private static final Pattern SIZE_PATTERN = Pattern.compile("(\\d+)([kKmMgGbBtT])?");
- private static final String NEW_LINE = System.lineSeparator();
-
- private static final Collection IGNORED_PROPERTIES = Arrays.asList(
- "java.ext.dirs",
- "java.home",
- "jboss.home.dir",
- "java.io.tmpdir",
- "jboss.controller.temp.dir",
- "jboss.server.base.dir",
- "jboss.server.config.dir",
- "jboss.server.data.dir",
- "jboss.server.default.config",
- "jboss.server.deploy.dir",
- "jboss.server.log.dir",
- "jboss.server.persist.config",
- "jboss.server.management.uuid",
- "jboss.server.temp.dir",
- "modules.path",
- "org.jboss.server.bootstrap.maxThreads",
- "user.dir",
- "user.home"
- );
- private static final String KEY_OVERRIDES = "keyOverrides";
- private final Map properties;
- private final Map usedProperties;
- private final Map additionalPatternFormatters;
- private ModelControllerClient client;
-
- public BootLoggingConfiguration() {
- properties = new HashMap<>();
- usedProperties = new TreeMap<>();
- additionalPatternFormatters = new LinkedHashMap<>();
- }
-
- public void generate(final Path configDir, final ModelControllerClient client) throws Exception {
- properties.clear();
- usedProperties.clear();
- additionalPatternFormatters.clear();
- // First we need to determine if there is a logging subsystem, if not we don't need to handle rewriting the
- // configuration.
- ModelNode op = Operations.createOperation("read-children-names");
- op.get(ClientConstants.CHILD_TYPE).set("subsystem");
- ModelNode result = client.execute(op);
- if (!Operations.isSuccessfulOutcome(result)) {
- throw new Exception("Could not determine if the logging subsystem was present: "
- + Operations.getFailureDescription(result).asString());
- } else {
- if (Operations.readResult(result)
- .asList()
- .stream()
- .noneMatch((name) -> name.asString().equals("logging"))) {
- return;
- }
- }
-
- // Create the operations to read the resources required
- final Operations.CompositeOperationBuilder builder = Operations.CompositeOperationBuilder.create()
- .addStep(Operations.createReadResourceOperation(Operations.createAddress("subsystem", "logging"), true));
- op = Operations.createOperation("read-children-resources");
- op.get(ClientConstants.CHILD_TYPE).set("system-property");
- builder.addStep(op);
- op = Operations.createOperation("read-children-resources");
- op.get(ClientConstants.CHILD_TYPE).set("path");
- builder.addStep(op);
-
- result = client.execute(builder.build());
- if (!Operations.isSuccessfulOutcome(result)) {
- throw new Exception("Failed to determine the logging configuration: "
- + Operations.getFailureDescription(result).asString());
- }
- result = Operations.readResult(result);
- // step-1 is the subsystem, step-2 is the system properties and step-3 is the paths
- final ModelNode subsystem = Operations.readResult(result.get("step-1"));
- final ModelNode systemProperties = Operations.readResult(result.get("step-2"));
- final ModelNode paths = Operations.readResult(result.get("step-3"));
-
- // This shouldn't happen, but let's be safe
- if (subsystem.isDefined()) {
- // Sets the client to use
- this.client = client;
- parseProperties(systemProperties);
- try (BufferedWriter writer = Files.newBufferedWriter(configDir.resolve("logging.properties"), StandardCharsets.UTF_8)) {
- writer.write("# Note this file has been generated and will be overwritten if a");
- writer.write(NEW_LINE);
- writer.write("# logging subsystem has been defined in the XML configuration.");
- writer.write(NEW_LINE);
- writer.write(NEW_LINE);
-
- writeLoggers(writer, subsystem);
- writeHandlers(writer, subsystem, paths);
- // Note the formatters MUST be written after the handlers. Handlers have a legacy "formatter" attribute and
- // additional pattern-formatters may need be written.
- writeFormatters(writer, subsystem);
- writeFilters(writer, subsystem);
- } catch (IOException e) {
- throw new Exception("Failed to write the logging configuration file to " + configDir.toAbsolutePath(), e);
- }
-
- // Collect the properties we need at boot
- final Properties requiredProperties = new Properties();
- final Iterator> iter = usedProperties.entrySet().iterator();
- while (iter.hasNext()) {
- final Map.Entry entry = iter.next();
- final String key = entry.getKey();
- if (properties.containsKey(key)) {
- requiredProperties.put(key, properties.get(key));
- } else {
- // @TODO, we can't use AbstractLogEnabled, it is not in the maven plugin classloader.
- // getLogger().warn(String.format("The value for the expression \"%s\" could not be resolved " +
- // "and may not be set at boot if no default value is available.", entry.getValue()));
- System.err.println(String.format("The value for the expression \"%s\" could not be resolved "
- + "and may not be set at boot if no default value is available.", entry.getValue()));
- }
- iter.remove();
- }
-
- if (!requiredProperties.isEmpty()) {
- // Note the hard-coded "boot-config.properties", the bootable JAR entry point will look for this file
- // and process it if it exists.
- try (BufferedWriter writer = Files.newBufferedWriter(configDir.resolve("boot-config.properties"))) {
- requiredProperties.store(writer, "Bootable JAR boot properties required by the log manager.");
- } catch (IOException e) {
- throw new Exception("Failed to write the system properties required by the logging configuration file to "
- + configDir.toAbsolutePath(), e);
- }
- }
- }
- }
-
- private void writeFilters(final Writer writer, final ModelNode subsystem) throws IOException {
- if (subsystem.hasDefined("filter")) {
- for (Property property : subsystem.get("filter").asPropertyList()) {
- final String name = property.getName();
- final ModelNode model = property.getValue();
- final String prefix = "filter." + name;
- writeProperty(writer, prefix, null, resolveAsString(model.get("class")));
- writeProperty(writer, prefix, "module", resolveAsString(model.get("module")));
-
- final ModelNode allProperties = new ModelNode();
-
- if (model.hasDefined("constructor-properties")) {
- final ModelNode properties = model.get("constructor-properties");
- final Collection constructorNames = properties.asPropertyList()
- .stream()
- .map(Property::getName)
- .collect(Collectors.toList());
- writeProperty(writer, prefix, "constructorProperties", toCsvString(constructorNames));
- for (String n : constructorNames) {
- allProperties.get(n).set(properties.get(n));
- }
- }
- if (model.hasDefined("properties")) {
- final ModelNode properties = model.get("properties");
- final Collection propertyNames = properties.asPropertyList()
- .stream()
- .map(Property::getName)
- .collect(Collectors.toList());
- for (String n : propertyNames) {
- allProperties.get(n).set(properties.get(n));
- }
- }
- if (allProperties.isDefined()) {
- writeProperty(writer, prefix, "properties", toCsvString(allProperties.asPropertyList()
- .stream()
- .map(Property::getName)
- .collect(Collectors.toList())
- ));
- writeProperties(writer, prefix, allProperties);
- }
- }
- writer.write(NEW_LINE);
- }
- }
-
- private void writeFormatters(final Writer writer, final ModelNode subsystem) throws IOException {
- // Formatters
- if (subsystem.hasDefined("custom-formatter")) {
- writeCustomFormatter(writer, subsystem.get("custom-formatter").asPropertyList());
- }
- if (subsystem.hasDefined("json-formatter")) {
- writeStructuredFormatter("org.jboss.logmanager.formatters.JsonFormatter", writer, subsystem.get("json-formatter").asPropertyList());
- }
- if (subsystem.hasDefined("pattern-formatter")) {
- writePatternFormatter(writer, subsystem.get("pattern-formatter").asPropertyList());
- }
- if (subsystem.hasDefined("xml-formatter")) {
- writeStructuredFormatter("org.jboss.logmanager.formatters.XmlFormatter", writer, subsystem.get("xml-formatter").asPropertyList());
- }
- }
-
- private void writeCustomFormatter(final Writer writer, final List formatters) throws IOException {
- for (Property property : formatters) {
- final String name = property.getName();
- final ModelNode model = property.getValue().clone();
- final String prefix = "formatter." + name;
- writeProperty(writer, prefix, null, resolveAsString(model.remove("class")));
- writeProperty(writer, prefix, "module", resolveAsString(model.remove("module")));
- if (model.hasDefined("properties")) {
- final ModelNode properties = model.get("properties");
- // Next we need to write the properties
- final Collection definedPropertyNames = properties.asPropertyList()
- .stream()
- .filter((p) -> p.getValue().isDefined())
- .map(Property::getName)
- .collect(Collectors.toList());
- writeProperty(writer, prefix, "properties", toCsvString(definedPropertyNames));
- // Write the property values
- for (String attributeName : definedPropertyNames) {
- writeProperty(writer, prefix, attributeName, properties.get(attributeName));
- }
- }
- writer.write(NEW_LINE);
- }
- }
-
- private void writePatternFormatter(final Writer writer, final List formatters) throws IOException {
- for (Property property : formatters) {
- final String name = property.getName();
- final ModelNode model = property.getValue().clone();
- final String prefix = "formatter." + name;
- writeProperty(writer, prefix, null, "org.jboss.logmanager.formatters.PatternFormatter");
-
- // Next we need to write the properties
- final Collection definedPropertyNames = model.asPropertyList()
- .stream()
- .filter((p) -> p.getValue().isDefined())
- .map(Property::getName)
- .collect(Collectors.toList());
- writeProperty(writer, prefix, "properties", toCsvString(definedPropertyNames
- .stream()
- .map(BootLoggingConfiguration::resolvePropertyName)
- .collect(Collectors.toList())
- ));
- // Write the property values
- for (String attributeName : definedPropertyNames) {
- writeProperty(writer, prefix, resolvePropertyName(attributeName), model.get(attributeName));
- }
- writer.write(NEW_LINE);
- }
-
- // Write any additional pattern-formatters that were defined on a handlers "formatter" attribute
- final Iterator> iter = additionalPatternFormatters.entrySet().iterator();
- while (iter.hasNext()) {
- final Map.Entry entry = iter.next();
- final String prefix = "formatter." + entry.getKey();
- writeProperty(writer, prefix, null, "org.jboss.logmanager.formatters.PatternFormatter");
- writeProperty(writer, prefix, "constructorProperties", "pattern");
- writeProperty(writer, prefix, "properties", "pattern");
- writeProperty(writer, prefix, "pattern", entry.getValue());
- writer.write(NEW_LINE);
- iter.remove();
- }
- }
-
- private void writeStructuredFormatter(final String type, final Writer writer,
- final List formatters) throws IOException {
- for (Property property : formatters) {
- final String name = property.getName();
- final ModelNode model = property.getValue().clone();
- final String prefix = "formatter." + name;
- writeProperty(writer, prefix, null, type);
- boolean needKeyOverrides = !model.hasDefined("key-overrides");
- // The key-overrides are used as constructor parameters
- // This property is alwasy added.
- writeProperty(writer, prefix, "constructorProperties", KEY_OVERRIDES);
- // Next we need to write the properties
- final Collection definedPropertyNames = model.asPropertyList()
- .stream()
- .filter((p) -> p.getValue().isDefined())
- .map(Property::getName)
- .collect(Collectors.toList());
- if (needKeyOverrides) {
- definedPropertyNames.add(KEY_OVERRIDES);
- }
- writeProperty(writer, prefix, "properties", toCsvString(definedPropertyNames
- .stream()
- .map(BootLoggingConfiguration::resolvePropertyName)
- .collect(Collectors.toList())
- ));
- // Write the property values
- for (String attributeName : definedPropertyNames) {
- final ModelNode value = model.get(attributeName);
- // Handle special cases
- if ("exception-output-type".equals(attributeName)) {
- writeProperty(writer, prefix, resolvePropertyName(attributeName), toEnumString(model.get(attributeName)));
- } else {
- if (needKeyOverrides && KEY_OVERRIDES.equals(attributeName)) {
- // The value is empty if explicitely added.
- writeProperty(writer, prefix, resolvePropertyName(attributeName), "");
- } else {
- writeProperty(writer, prefix, resolvePropertyName(attributeName), value);
- }
- }
- }
- writer.write(NEW_LINE);
- }
- }
-
- private void writeHandlers(final Writer writer, final ModelNode subsystem, final ModelNode pathModel) throws IOException {
- if (subsystem.hasDefined("async-handler")) {
- writeAsyncHandlers(writer, subsystem.get("async-handler").asPropertyList());
- }
-
- if (subsystem.hasDefined("console-handler")) {
- writeConsoleHandlers(writer, subsystem.get("console-handler").asPropertyList());
- }
- if (subsystem.hasDefined("custom-handler")) {
- writeCustomHandlers(writer, subsystem.get("custom-handler").asPropertyList());
- }
- if (subsystem.hasDefined("file-handler")) {
- writeFileHandlers(pathModel, "org.jboss.logmanager.handlers.FileHandler", writer, subsystem.get("file-handler").asPropertyList());
- }
- if (subsystem.hasDefined("periodic-rotating-file-handler")) {
- writeFileHandlers(pathModel, "org.jboss.logmanager.handlers.PeriodicRotatingFileHandler", writer, subsystem.get("periodic-rotating-file-handler").asPropertyList());
- }
- if (subsystem.hasDefined("periodic-size-rotating-file-handler")) {
- writeFileHandlers(pathModel, "org.jboss.logmanager.handlers.PeriodicSizeRotatingFileHandler", writer, subsystem.get("periodic-size-rotating-file-handler").asPropertyList());
- }
- if (subsystem.hasDefined("size-rotating-file-handler")) {
- writeFileHandlers(pathModel, "org.jboss.logmanager.handlers.SizeRotatingFileHandler", writer, subsystem.get("size-rotating-file-handler").asPropertyList());
- }
- if (subsystem.hasDefined("socket-handler")) {
- writeSocketHandler(writer, subsystem.get("socket-handler").asPropertyList());
- }
- if (subsystem.hasDefined("syslog-handler")) {
- writeSyslogHandler(writer, subsystem.get("syslog-handler").asPropertyList());
- }
- }
-
- private void writeAsyncHandlers(final Writer writer, final List handlers) throws IOException {
- for (Property property : handlers) {
- final String name = property.getName();
- final String prefix = "handler." + name;
- final ModelNode model = property.getValue().clone();
- writeCommonHandler("org.jboss.logmanager.handlers.AsyncHandler", writer, name, prefix, model);
- final ModelNode subhandlers = model.remove("subhandlers");
- if (isDefined(subhandlers)) {
- writeProperty(writer, prefix, "handlers", subhandlers);
- }
- // Next we need to write the properties
- final Collection definedPropertyNames = model.asPropertyList()
- .stream()
- .filter((p) -> p.getValue().isDefined())
- .map(Property::getName)
- .collect(Collectors.toList());
- definedPropertyNames.add("closeChildren");
- writeProperty(writer, prefix, "properties", toCsvString(definedPropertyNames
- .stream()
- .map(BootLoggingConfiguration::resolvePropertyName)
- .collect(Collectors.toList())
- ));
- // Write the constructor properties
- writeProperty(writer, prefix, "constructorProperties", "queueLength");
- // Write the property values
- for (String attributeName : definedPropertyNames) {
- if ("closeChildren".equals(attributeName)) {
- writeProperty(writer, prefix, attributeName, "false");
- } else {
- writeProperty(writer, prefix, resolvePropertyName(attributeName), model.get(attributeName));
- }
- }
- writer.write(NEW_LINE);
- }
- }
-
- private void writeConsoleHandlers(final Writer writer, final List handlers) throws IOException {
- for (Property property : handlers) {
- final String name = property.getName();
- final String prefix = "handler." + name;
- final ModelNode model = property.getValue().clone();
- writeCommonHandler("org.jboss.logmanager.handlers.ConsoleHandler", writer, name, prefix, model);
- // Next we need to write the properties
- final Collection definedPropertyNames = model.asPropertyList()
- .stream()
- .filter((p) -> p.getValue().isDefined())
- .map(Property::getName)
- .collect(Collectors.toList());
- writeProperty(writer, prefix, "properties", toCsvString(definedPropertyNames
- .stream()
- .map(BootLoggingConfiguration::resolvePropertyName)
- .collect(Collectors.toList())
- ));
- // Write the property values
- for (String attributeName : definedPropertyNames) {
- if ("target".equals(attributeName)) {
- writeProperty(writer, prefix, resolvePropertyName(attributeName), toEnumString(model.get(attributeName)));
- } else {
- writeProperty(writer, prefix, resolvePropertyName(attributeName), model.get(attributeName));
- }
- }
- writer.write(NEW_LINE);
- }
- }
-
- private void writeCustomHandlers(final Writer writer, final List handlers) throws IOException {
- for (Property property : handlers) {
- final String name = property.getName();
- final String prefix = "handler." + name;
- final ModelNode model = property.getValue().clone();
- writeCommonHandler(null, writer, name, prefix, model);
- // Next we need to write the properties
- if (model.hasDefined("properties")) {
- final Collection definedPropertyNames = model.get("properties").asPropertyList()
- .stream()
- .filter((p) -> p.getValue().isDefined())
- .map(Property::getName)
- .collect(Collectors.toList());
- if (model.hasDefined("enabled")) {
- definedPropertyNames.add("enabled");
- }
- writeProperty(writer, prefix, "properties", toCsvString(definedPropertyNames));
- final ModelNode properties = model.get("properties");
- for (String attributeName : definedPropertyNames) {
- if ("enabled".equals(attributeName)) {
- if (model.hasDefined(attributeName)) {
- writeProperty(writer, prefix, attributeName, model.get(attributeName));
- }
- } else {
- writeProperty(writer, prefix, attributeName, properties.get(attributeName));
- }
- }
- } else {
- if (model.hasDefined("enabled")) {
- writeProperty(writer, prefix, "properties", "enabled");
- writeProperty(writer, prefix, "enabled", model.get("enabled"));
- }
- }
- writer.write(NEW_LINE);
- }
- }
-
- private void writeFileHandlers(final ModelNode pathModel, final String type, final Writer writer,
- final List handlers) throws IOException {
- for (Property property : handlers) {
- final String name = property.getName();
- final String prefix = "handler." + name;
- final ModelNode model = property.getValue().clone();
-
- final ModelNode file = model.remove("file");
- // If the file is not defined, which shouldn't happen, we'll just skip this one
- if (!isDefined(file)) {
- continue;
- }
-
- writeCommonHandler(type, writer, name, prefix, model);
-
- // Next we need to write the properties
- final Collection definedPropertyNames = model.asPropertyList()
- .stream()
- .filter((p) -> p.getValue().isDefined())
- .map(Property::getName)
- .collect(Collectors.toList());
- final Collection propertyNames = definedPropertyNames
- .stream()
- .map(BootLoggingConfiguration::resolvePropertyName)
- .collect(Collectors.toList());
- propertyNames.add("fileName");
- writeProperty(writer, prefix, "properties", toCsvString(propertyNames));
-
- // Write the constructor properties
- writeProperty(writer, prefix, "constructorProperties", "fileName,append");
-
- // Write the remainder of the properties
- for (String attributeName : definedPropertyNames) {
- // The rotate-size requires special conversion
- if ("rotate-size".equals(attributeName)) {
- final String resolvedValue = String.valueOf(parseSize(model.get(attributeName)));
- writeProperty(writer, prefix, resolvePropertyName(attributeName), resolvedValue);
- } else {
- writeProperty(writer, prefix, resolvePropertyName(attributeName), model.get(attributeName));
- }
- }
-
- // Write the fileName
- final StringBuilder result = new StringBuilder();
- if (file.hasDefined("relative-to")) {
- final String relativeTo = file.get("relative-to").asString();
- resolveRelativeTo(pathModel, relativeTo, result);
- }
- if (file.hasDefined("path")) {
- result.append(resolveAsString(file.get("path")));
- }
- writeProperty(writer, prefix, "fileName", result.toString());
- writer.write(NEW_LINE);
- }
- }
-
- private void writeSocketHandler(final Writer writer, final List handlers) throws IOException {
- // Socket handlers are actually configured late initialized defined as a DelayedHandler
- for (Property property : handlers) {
- final String name = property.getName();
- final String prefix = "handler." + name;
- final ModelNode model = property.getValue().clone();
- writeCommonHandler("org.jboss.logmanager.handlers.DelayedHandler", writer, name, prefix, model);
- if (model.hasDefined("enabled")) {
- writeProperty(writer, prefix, "properties", "enabled");
- writeProperty(writer, prefix, "enabled", model.get("enabled"));
- }
- writer.write(NEW_LINE);
- }
- }
-
- private void writeSyslogHandler(final Writer writer, final List handlers) throws IOException {
- // Socket handlers are actually configured late initialized defined as a DelayedHandler
- for (Property property : handlers) {
- final String name = property.getName();
- final String prefix = "handler." + name;
- final ModelNode model = property.getValue().clone();
- writeCommonHandler("org.jboss.logmanager.handlers.SyslogHandler", writer, name, prefix, model);
-
- // Next we need to write the properties
- final Collection definedPropertyNames = model.asPropertyList()
- .stream()
- .filter((p) -> p.getValue().isDefined())
- .map(Property::getName)
- .collect(Collectors.toList());
- writeProperty(writer, prefix, "properties", toCsvString(definedPropertyNames
- .stream()
- .map(BootLoggingConfiguration::resolvePropertyName)
- .collect(Collectors.toList())
- ));
- for (String attributeName : definedPropertyNames) {
- if ("facility".equals(attributeName)) {
- writeProperty(writer, prefix, resolvePropertyName(attributeName), toEnumString(model.get(attributeName)));
- } else {
- writeProperty(writer, prefix, resolvePropertyName(attributeName), model.get(attributeName));
- }
- }
- writer.write(NEW_LINE);
- }
- }
-
- private void writeCommonHandler(final String type, final Writer writer, final String name,
- final String prefix, final ModelNode model) throws IOException {
- if (type == null) {
- writeProperty(writer, prefix, null, resolveAsString(model.remove("class")));
- writeProperty(writer, prefix, "module", resolveAsString(model.remove("module")));
- } else {
- writeProperty(writer, prefix, null, type);
- }
-
- // Remove the legacy "name" attribute
- model.remove("name");
-
- // Write the level
- final ModelNode level = model.remove("level");
- if (isDefined(level)) {
- writeProperty(writer, prefix, "level", level);
- }
- final ModelNode encoding = model.remove("encoding");
- if (isDefined(encoding)) {
- writeProperty(writer, prefix, "encoding", encoding);
- }
-
- final ModelNode namedFormatter = model.remove("named-formatter");
- final ModelNode formatter = model.remove("formatter");
- if (isDefined(namedFormatter)) {
- writeProperty(writer, prefix, "formatter", namedFormatter.asString());
- } else if (isDefined(formatter)) {
- // We need to add a formatter with the known name used in WildFly
- final String defaultFormatterName = name + "-wfcore-pattern-formatter";
- additionalPatternFormatters.put(defaultFormatterName, resolveAsString(formatter));
- writeProperty(writer, prefix, "formatter", defaultFormatterName);
- }
- // Write the filter spec and remove the filter attribute which we will not use
- model.remove("filter");
- final ModelNode filter = model.remove("filter-spec");
- if (isDefined(filter)) {
- writeProperty(writer, prefix, "filter", filter);
- }
- }
-
- private void writeLoggers(final Writer writer, final ModelNode model) throws IOException {
- if (model.hasDefined("logger")) {
- final List loggerModel = model.get("logger").asPropertyList();
- writer.write("# Additional loggers to configure (the root logger is always configured)");
- writer.write(NEW_LINE);
- // First we need to list the loggers to define
- writeProperty(writer, "loggers", null, toCsvString(loggerModel
- .stream()
- .map(Property::getName)
- .collect(Collectors.toList())
- ));
- writer.write(NEW_LINE);
- // Next get the root logger
- if (model.hasDefined("root-logger", "ROOT")) {
- writeLogger(writer, null, model.get("root-logger", "ROOT"));
- }
-
- for (Property property : loggerModel) {
- writeLogger(writer, property.getName(), property.getValue());
- }
- }
- }
-
- private void writeLogger(final Writer writer, final String name, final ModelNode model) throws IOException {
- final String prefix = name == null ? "logger" : "logger." + name;
- if (model.hasDefined("filter-spec")) {
- writeProperty(writer, prefix, "filter", model.get("filter-spec"));
- }
- if (model.hasDefined("handlers")) {
- writeProperty(writer, prefix, "handlers", toCsvString(model.get("handlers").asList()
- .stream()
- .map(ModelNode::asString)
- .collect(Collectors.toList())));
- }
- if (model.hasDefined("level")) {
- writeProperty(writer, prefix, "level", model.get("level"));
- }
- if (model.hasDefined("use-parent-filters")) {
- writeProperty(writer, prefix, "useParentFilters", model.get("use-parent-filters"));
- }
- if (model.hasDefined("use-parent-handlers")) {
- writeProperty(writer, prefix, "useParentHandlers", model.get("use-parent-handlers"));
- }
- writer.write(NEW_LINE);
- }
-
- private void writeProperties(final Writer writer, final String prefix, final ModelNode model) throws IOException {
- for (Property property : model.asPropertyList()) {
- final String name = property.getName();
- final ModelNode value = property.getValue();
- if (value.isDefined()) {
- writeProperty(writer, prefix, name, value);
- }
- }
- }
-
- private void writeProperty(final Writer out, final String prefix, final String name, final ModelNode value) throws IOException {
- writeProperty(out, prefix, name, resolveAsString(value));
- }
-
- private String toEnumString(final ModelNode value) {
- final StringBuilder result = new StringBuilder();
- if (value.getType() == ModelType.EXPRESSION) {
- final Collection expressions = Expression.parse(value.asExpression());
- for (Expression expression : expressions) {
- addUsedProperties(expression, value.asString());
- result.append("${");
- final Iterator iter = expression.getKeys().iterator();
- while (iter.hasNext()) {
- result.append(iter.next());
- if (iter.hasNext()) {
- result.append(',');
- }
- }
- if (expression.hasDefault()) {
- result.append(':');
- final String dft = expression.getDefaultValue();
- for (char c : dft.toCharArray()) {
- if (c == '-' || c == '.') {
- result.append('_');
- } else {
- result.append(Character.toUpperCase(c));
- }
- }
- }
- result.append('}');
- }
- } else {
- for (char c : value.asString().toCharArray()) {
- if (c == '-' || c == '.') {
- result.append('_');
- } else {
- result.append(Character.toUpperCase(c));
- }
- }
- }
- return result.toString();
- }
-
- private String resolveAsString(final ModelNode value) {
- if (value.getType() == ModelType.LIST) {
- return toCsvString(value.asList()
- .stream()
- .map(ModelNode::asString)
- .collect(Collectors.toList())
- );
- } else if (value.getType() == ModelType.OBJECT) {
- return modelToMap(value);
- } else {
- if (value.getType() == ModelType.EXPRESSION) {
- final Collection expressions = Expression.parse(value.asExpression());
- addUsedProperties(expressions, value.asString());
- }
- return value.asString();
- }
- }
-
- private long parseSize(final ModelNode value) throws IOException {
- String stringValue;
- // This requires some special handling as we need the resolved value.
- if (value.getType() == ModelType.EXPRESSION) {
- // We need update the usedProperties
- final Collection expressions = Expression.parse(value.asExpression());
- addUsedProperties(expressions, value.asString());
- // Now we need to resolve the expression
- final ModelNode op = Operations.createOperation("resolve-expression");
- op.get("expression").set(value.asString());
- final ModelNode result = client.execute(op);
- if (!Operations.isSuccessfulOutcome(result)) {
- throw new RuntimeException(String.format("Failed to resolve the expression %s: %s", value.asString(),
- Operations.getFailureDescription(result).asString()));
- }
- stringValue = Operations.readResult(result).asString();
- } else {
- stringValue = value.asString();
- }
- final Matcher matcher = SIZE_PATTERN.matcher(stringValue);
- // This shouldn't happen, but we shouldn't fail either
- if (!matcher.matches()) {
- // by default, rotate at 10MB
- return 0xa0000L;
- }
- long qty = Long.parseLong(matcher.group(1), 10);
- final String chr = matcher.group(2);
- if (chr != null) {
- switch (chr.charAt(0)) {
- case 'b':
- case 'B':
- break;
- case 'k':
- case 'K':
- qty <<= 10L;
- break;
- case 'm':
- case 'M':
- qty <<= 20L;
- break;
- case 'g':
- case 'G':
- qty <<= 30L;
- break;
- case 't':
- case 'T':
- qty <<= 40L;
- break;
- default:
- // by default, rotate at 10MB
- return 0xa0000L;
- }
- }
- return qty;
- }
-
- private void parseProperties(final ModelNode model) {
- if (model.isDefined()) {
- for (Property property : model.asPropertyList()) {
- final String key = property.getName();
- if (IGNORED_PROPERTIES.contains(key)) {
- continue;
- }
- final ModelNode value = property.getValue().get("value");
- if (value.isDefined()) {
- properties.put(key, value.asString());
- }
- }
- }
- }
-
- private void resolveRelativeTo(final ModelNode pathModel, final String relativeTo, final StringBuilder builder) {
- if (pathModel.hasDefined(relativeTo)) {
- final ModelNode path = pathModel.get(relativeTo);
- if (path.hasDefined("relative-to")) {
- resolveRelativeTo(pathModel, path.get("relative-to").asString(), builder);
- }
- if (path.hasDefined("path")) {
- final ModelNode pathEntry = path.get("path");
- if (pathEntry.getType() == ModelType.EXPRESSION) {
- final Collection expressions = Expression.parse(pathEntry.asExpression());
- for (Expression expression : expressions) {
- for (String key : expression.getKeys()) {
- if (!properties.containsKey(key)) {
- // @TODO, we can't use AbstractLogEnabled, it is not in the maven plugin classloader.
- //getLogger().warn(String.format("The path %s is an undefined property. If not set at boot time unexpected results may occur.", pathEntry.asString()));
- System.err.println(String.format("The path %s is an undefined property. If not set at boot time unexpected results may occur.", pathEntry.asString()));
- } else {
- // We use the property name and value directly rather than referencing the path
- usedProperties.put(key, properties.get(key));
- expression.appendTo(builder);
- }
- }
- }
- } else {
- if (!IGNORED_PROPERTIES.contains(relativeTo)) {
- properties.put(relativeTo, pathEntry.asString());
- usedProperties.put(relativeTo, pathEntry.asString());
- }
- builder.append("${")
- .append(relativeTo)
- .append("}");
- }
- }
- // Use a Linux style path separator as we can't use a Windows one on Linux, but we
- // can use a Linux one on Windows.
- builder.append('/');
- }
- }
-
- private void addUsedProperties(final Collection expressions, final String value) {
- for (Expression expression : expressions) {
- addUsedProperties(expression, value);
- }
- }
-
- private void addUsedProperties(final Expression expression, final String value) {
- for (String key : expression.getKeys()) {
- usedProperties.put(key, value);
- }
- }
-
- private static void writeProperty(final Writer out, final String prefix, final String name, final String value) throws IOException {
- if (name == null) {
- writeKey(out, prefix);
- } else {
- writeKey(out, String.format("%s.%s", prefix, name));
- }
- writeValue(out, value);
- out.write(NEW_LINE);
- }
-
- private static void writeValue(final Appendable out, final String value) throws IOException {
- writeSanitized(out, value, false);
- }
-
- private static void writeKey(final Appendable out, final String key) throws IOException {
- writeSanitized(out, key, true);
- out.append('=');
- }
-
- private static void writeSanitized(final Appendable out, final String string, final boolean escapeSpaces) throws IOException {
- for (int x = 0; x < string.length(); x++) {
- final char c = string.charAt(x);
- switch (c) {
- case ' ':
- if (x == 0 || escapeSpaces)
- out.append('\\');
- out.append(c);
- break;
- case '\t':
- out.append('\\').append('t');
- break;
- case '\n':
- out.append('\\').append('n');
- break;
- case '\r':
- out.append('\\').append('r');
- break;
- case '\f':
- out.append('\\').append('f');
- break;
- case '\\':
- case '=':
- case ':':
- case '#':
- case '!':
- out.append('\\').append(c);
- break;
- default:
- out.append(c);
- }
- }
- }
-
- private static String modelToMap(final ModelNode value) {
- if (value.getType() != ModelType.OBJECT) {
- return null;
- }
- final List properties = value.asPropertyList();
- final StringBuilder result = new StringBuilder();
- final Iterator iterator = properties.iterator();
- while (iterator.hasNext()) {
- final Property property = iterator.next();
- escapeKey(result, property.getName());
- result.append('=');
- final ModelNode v = property.getValue();
- if (v.isDefined()) {
- escapeValue(result, v.asString());
- }
- if (iterator.hasNext()) {
- result.append(',');
- }
- }
- return result.toString();
- }
-
- private static boolean isDefined(final ModelNode value) {
- return value != null && value.isDefined();
- }
-
- private static String toCsvString(final Collection names) {
- final StringBuilder result = new StringBuilder(1024);
- Iterator iterator = names.iterator();
- while (iterator.hasNext()) {
- final String name = iterator.next();
- // No need to write empty names
- if (!name.isEmpty()) {
- result.append(name);
- if (iterator.hasNext()) {
- result.append(",");
- }
- }
- }
- return result.toString();
- }
-
- private static String resolvePropertyName(final String modelName) {
- if ("autoflush".equals(modelName)) {
- return "autoFlush";
- }
- if ("color-map".equals(modelName)) {
- return "colors";
- }
- if ("syslog-format".equals(modelName)) {
- return "syslogType";
- }
- if ("server-address".equals(modelName)) {
- return "serverHostname";
- }
- if (modelName.contains("-")) {
- final StringBuilder builder = new StringBuilder();
- boolean cap = false;
- for (char c : modelName.toCharArray()) {
- if (c == '-') {
- cap = true;
- continue;
- }
- if (cap) {
- builder.append(Character.toUpperCase(c));
- cap = false;
- } else {
- builder.append(c);
- }
- }
- return builder.toString();
- }
- return modelName;
- }
-
- /**
- * Escapes a maps key value for serialization to a string. If the key contains a {@code \} or an {@code =} it will
- * be escaped by a preceding {@code \}. Example: {@code key\=} or {@code \\key}.
- *
- * @param sb the string builder to append the escaped key to
- * @param key the key
- */
- private static void escapeKey(final StringBuilder sb, final String key) {
- final char[] chars = key.toCharArray();
- for (int i = 0; i < chars.length; i++) {
- final char c = chars[i];
- // Ensure that \ and = are escaped
- if (c == '\\') {
- final int n = i + 1;
- if (n >= chars.length) {
- sb.append('\\').append('\\');
- } else {
- final char next = chars[n];
- if (next == '\\' || next == '=') {
- // Nothing to do, already properly escaped
- sb.append(c);
- sb.append(next);
- i = n;
- } else {
- // Now we need to escape the \
- sb.append('\\').append('\\');
- }
- }
- } else if (c == '=') {
- sb.append('\\').append(c);
- } else {
- sb.append(c);
- }
- }
- }
-
- /**
- * Escapes a maps value for serialization to a string. If a value contains a {@code \} or a {@code ,} it will be
- * escaped by a preceding {@code \}. Example: {@code part1\,part2} or {@code value\\other}.
- *
- * @param sb the string builder to append the escaped value to
- * @param value the value
- */
- private static void escapeValue(final StringBuilder sb, final String value) {
- if (value != null) {
- final char[] chars = value.toCharArray();
- for (int i = 0; i < chars.length; i++) {
- final char c = chars[i];
- // Ensure that \ and , are escaped
- if (c == '\\') {
- final int n = i + 1;
- if (n >= chars.length) {
- sb.append('\\').append('\\');
- } else {
- final char next = chars[n];
- if (next == '\\' || next == ',') {
- // Nothing to do, already properly escaped
- sb.append(c);
- sb.append(next);
- i = n;
- } else {
- // Now we need to escape the \
- sb.append('\\').append('\\');
- }
- }
- } else if (c == ',') {
- sb.append('\\').append(c);
- } else {
- sb.append(c);
- }
- }
- }
- }
-}
diff --git a/plugin/src/main/java/org/wildfly/plugins/bootablejar/maven/goals/ChannelMavenArtifactRepositoryManager.java b/plugin/src/main/java/org/wildfly/plugins/bootablejar/maven/goals/ChannelMavenArtifactRepositoryManager.java
index ec3553f7..dc127ff7 100644
--- a/plugin/src/main/java/org/wildfly/plugins/bootablejar/maven/goals/ChannelMavenArtifactRepositoryManager.java
+++ b/plugin/src/main/java/org/wildfly/plugins/bootablejar/maven/goals/ChannelMavenArtifactRepositoryManager.java
@@ -37,8 +37,8 @@
import org.eclipse.aether.RepositorySystem;
import org.eclipse.aether.RepositorySystemSession;
import org.eclipse.aether.repository.RemoteRepository;
-import org.jboss.galleon.ProvisioningException;
-import org.jboss.galleon.layout.FeaturePackDescriber;
+import org.jboss.galleon.api.MavenStreamResolver;
+import org.jboss.galleon.api.Provisioning;
import org.jboss.galleon.universe.maven.MavenArtifact;
import org.jboss.galleon.universe.maven.MavenUniverseException;
import org.jboss.galleon.universe.maven.repo.MavenRepoManager;
@@ -50,6 +50,7 @@
import org.wildfly.channel.NoStreamFoundException;
import org.wildfly.channel.Repository;
import org.wildfly.channel.UnresolvedMavenArtifactException;
+import org.wildfly.channel.VersionResult;
import org.wildfly.channel.maven.VersionResolverFactory;
import static org.wildfly.channel.maven.VersionResolverFactory.DEFAULT_REPOSITORY_MAPPER;
import org.wildfly.channel.spi.ChannelResolvable;
@@ -57,7 +58,7 @@
import org.wildfly.prospero.metadata.ManifestVersionResolver;
import org.wildfly.prospero.metadata.ProsperoMetadataUtils;
-public class ChannelMavenArtifactRepositoryManager implements MavenRepoManager, ChannelResolvable {
+public class ChannelMavenArtifactRepositoryManager implements MavenRepoManager, ChannelResolvable, MavenStreamResolver {
private static final String REQUIRE_CHANNEL_FOR_ALL_ARTIFACT = "org.wildfly.plugins.galleon.all.artifact.requires.channel.resolution";
private final ChannelSession channelSession;
@@ -151,9 +152,7 @@ private boolean fpRequireChannel(MavenArtifact artifact) throws Exception {
artifact.getExtension(),
artifact.getClassifier(),
artifact.getVersion());
- try {
- FeaturePackDescriber.readSpec(mavenArtifact.getFile().toPath());
- } catch (ProvisioningException ex) {
+ if (!Provisioning.isFeaturePack(mavenArtifact.getFile().toPath())) {
// Not a feature-pack
return requireChannel;
}
@@ -244,4 +243,12 @@ public List getAllVersions(MavenArtifact artifact, Pattern includeVersio
public void install(MavenArtifact artifact, Path path) throws MavenUniverseException {
throw new MavenUniverseException("Channel resolution can't be applied to Galleon universe");
}
+
+ @Override
+ public String getLatestVersion(String groupId, String artifactId, String extension, String classifier, String baseVersion) {
+ VersionResult res = channelSession.findLatestMavenArtifactVersion(groupId, artifactId, extension, classifier,
+ baseVersion);
+ return res.getVersion();
+ }
+
}
diff --git a/plugin/src/main/java/org/wildfly/plugins/bootablejar/maven/goals/DevWatchBootableJarMojo.java b/plugin/src/main/java/org/wildfly/plugins/bootablejar/maven/goals/DevWatchBootableJarMojo.java
index 176dc892..ea7a3813 100644
--- a/plugin/src/main/java/org/wildfly/plugins/bootablejar/maven/goals/DevWatchBootableJarMojo.java
+++ b/plugin/src/main/java/org/wildfly/plugins/bootablejar/maven/goals/DevWatchBootableJarMojo.java
@@ -98,7 +98,7 @@
import org.jboss.dmr.ModelNode;
import org.wildfly.core.launcher.BootableJarCommandBuilder;
import org.wildfly.plugin.common.PropertyNames;
-import org.wildfly.plugin.core.ServerHelper;
+import org.wildfly.plugin.tools.ServerHelper;
/**
* Build and start a bootable JAR for dev-watch mode. This goal monitors the
diff --git a/plugin/src/main/java/org/wildfly/plugins/bootablejar/maven/goals/FeaturePacksUtil.java b/plugin/src/main/java/org/wildfly/plugins/bootablejar/maven/goals/FeaturePacksUtil.java
deleted file mode 100644
index 67eac04c..00000000
--- a/plugin/src/main/java/org/wildfly/plugins/bootablejar/maven/goals/FeaturePacksUtil.java
+++ /dev/null
@@ -1,120 +0,0 @@
-/*
- * Copyright 2023 Red Hat, Inc. and/or its affiliates
- * and other contributors as indicated by the @author tags.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.wildfly.plugins.bootablejar.maven.goals;
-
-import java.io.IOException;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-import org.jboss.galleon.ProvisioningException;
-import org.jboss.galleon.ProvisioningManager;
-import org.jboss.galleon.config.ConfigId;
-import org.jboss.galleon.config.FeaturePackConfig;
-import org.jboss.galleon.config.ProvisioningConfig;
-import org.jboss.galleon.layout.FeaturePackLayout;
-import org.jboss.galleon.layout.ProvisioningLayout;
-import org.jboss.galleon.universe.FeaturePackLocation;
-import org.wildfly.plugins.bootablejar.maven.common.FeaturePack;
-import static org.wildfly.plugins.bootablejar.maven.goals.AbstractBuildBootableJarMojo.STANDALONE;
-import static org.wildfly.plugins.bootablejar.maven.goals.AbstractBuildBootableJarMojo.STANDALONE_XML;
-
-/**
- *
- * @author jdenise
- */
-public class FeaturePacksUtil {
-
- private static final String HEALTH = "health";
- private static final String MP_HEALTH = "microprofile-health";
-
- public static class ProvisioningSpecifics {
-
- private final boolean isMicroprofile;
- private final String healthLayer;
-
- ProvisioningSpecifics(Set allLayers) {
- if (allLayers.contains(MP_HEALTH)) {
- healthLayer = MP_HEALTH;
- isMicroprofile = true;
- } else {
- if (allLayers.contains(HEALTH)) {
- healthLayer = HEALTH;
- } else {
- healthLayer = null;
- }
- isMicroprofile = false;
- }
- }
-
- ConfigId getDefaultConfig(boolean isCloud) {
- if (isCloud) {
- if (isMicroprofile) {
- return new ConfigId(STANDALONE, "standalone-microprofile-ha.xml");
- } else {
- return new ConfigId(STANDALONE, "standalone-ha.xml");
- }
- } else {
- if (isMicroprofile) {
- return new ConfigId(STANDALONE, "standalone-microprofile.xml");
- } else {
- return new ConfigId(STANDALONE, STANDALONE_XML);
- }
- }
- }
-
- String getHealthLayer() {
- return healthLayer;
- }
- }
-
- static ProvisioningSpecifics getSpecifics(List fps, ProvisioningManager pm) throws ProvisioningException, IOException {
- return new ProvisioningSpecifics(getAllLayers(fps, pm));
- }
-
- private static Set getAllLayers(List fps, ProvisioningManager pm) throws ProvisioningException, IOException {
- Set allLayers = new HashSet<>();
- for (FeaturePack fp : fps) {
- final FeaturePackLocation fpl;
- if (fp.getNormalizedPath() != null) {
- fpl = pm.getLayoutFactory().addLocal(fp.getNormalizedPath(), false);
- } else if (fp.getGroupId() != null && fp.getArtifactId() != null) {
- String coords = fp.getMavenCoords();
- fpl = FeaturePackLocation.fromString(coords);
- } else {
- fpl = FeaturePackLocation.fromString(fp.getLocation());
- }
- ProvisioningConfig pConfig = ProvisioningConfig.builder().
- addFeaturePackDep(FeaturePackConfig.builder(fpl).build()).build();
- try (ProvisioningLayout layout = pm.
- getLayoutFactory().newConfigLayout(pConfig)) {
- allLayers.addAll(getAllLayers(layout));
- }
- }
- return allLayers;
- }
-
- private static Set getAllLayers(ProvisioningLayout pLayout)
- throws ProvisioningException, IOException {
- Set layers = new HashSet<>();
- for (FeaturePackLayout fp : pLayout.getOrderedFeaturePacks()) {
- for (ConfigId layer : fp.loadLayers()) {
- layers.add(layer.getName());
- }
- }
- return layers;
- }
-}
diff --git a/plugin/src/main/java/org/wildfly/plugins/bootablejar/maven/goals/MavenUpgrade.java b/plugin/src/main/java/org/wildfly/plugins/bootablejar/maven/goals/MavenUpgrade.java
index fb72d50f..29096c49 100644
--- a/plugin/src/main/java/org/wildfly/plugins/bootablejar/maven/goals/MavenUpgrade.java
+++ b/plugin/src/main/java/org/wildfly/plugins/bootablejar/maven/goals/MavenUpgrade.java
@@ -30,13 +30,13 @@
import org.apache.maven.artifact.versioning.DefaultArtifactVersion;
import org.jboss.galleon.ProvisioningDescriptionException;
import org.jboss.galleon.ProvisioningException;
-import org.jboss.galleon.ProvisioningManager;
-import org.jboss.galleon.config.FeaturePackConfig;
-import org.jboss.galleon.config.ProvisioningConfig;
-import org.jboss.galleon.layout.FeaturePackDescriber;
-import org.jboss.galleon.spec.FeaturePackSpec;
+import org.jboss.galleon.api.GalleonFeaturePackDescription;
+import org.jboss.galleon.api.Provisioning;
+import org.jboss.galleon.api.config.GalleonFeaturePackConfig;
+import org.jboss.galleon.api.config.GalleonProvisioningConfig;
import org.jboss.galleon.universe.Channel;
import org.jboss.galleon.universe.FeaturePackLocation;
+import org.jboss.galleon.universe.FeaturePackLocation.FPID;
import org.jboss.galleon.universe.FeaturePackLocation.ProducerSpec;
import org.jboss.galleon.universe.maven.MavenChannel;
import org.jboss.galleon.universe.maven.MavenUniverseException;
@@ -48,18 +48,17 @@ final class MavenUpgrade {
private final Map dependencies = new LinkedHashMap<>();
private final Map topLevels = new LinkedHashMap<>();
private final AbstractBuildBootableJarMojo mojo;
- private final ProvisioningConfig config;
private final Map producerToGAC = new HashMap<>();
- private final ProvisioningManager pm;
private ScannedModules modules;
-
- MavenUpgrade(AbstractBuildBootableJarMojo mojo, ProvisioningConfig config, ProvisioningManager pm)
+ private final GalleonProvisioningConfig originalConfig;
+ private final Provisioning provisioning;
+ MavenUpgrade(AbstractBuildBootableJarMojo mojo, Provisioning provisioning, GalleonProvisioningConfig originalConfig)
throws MavenUniverseException, ProvisioningException, MojoExecutionException {
this.mojo = mojo;
- this.config = config;
- this.pm = pm;
- for (FeaturePackConfig cfg : config.getFeaturePackDeps()) {
- FeaturePack fp = toFeaturePack(cfg, pm);
+ this.provisioning = provisioning;
+ this.originalConfig = originalConfig;
+ for (GalleonFeaturePackConfig cfg : originalConfig.getFeaturePackDeps()) {
+ FeaturePack fp = toFeaturePack(cfg.getLocation());
if (fp == null) {
throw new ProvisioningException("Invalid location " + cfg.getLocation());
}
@@ -74,25 +73,25 @@ final class MavenUpgrade {
mojo.debug("Top level feature-packs: %s", topLevels);
mojo.debug("Resolved feature-packs: %s", resolvedFeaturePacks);
for (Entry entry : resolvedFeaturePacks.entrySet()) {
- FeaturePackSpec spec = FeaturePackDescriber.readSpec(entry.getValue());
- producerToGAC.put(spec.getFPID().getProducer(), entry.getKey());
- List allDeps = new ArrayList<>();
- for (FeaturePackConfig cfg : spec.getFeaturePackDeps()) {
+ GalleonFeaturePackDescription spec = Provisioning.getFeaturePackDescription(entry.getValue());
+ producerToGAC.put(spec.getProducer().getProducer(), entry.getKey());
+ List allDeps = new ArrayList<>();
+ for (FPID cfg : spec.getDependencies()) {
allDeps.add(cfg);
}
- for (FeaturePackConfig cfg : spec.getTransitiveDeps()) {
+ for (FPID cfg : spec.getTransitives()) {
allDeps.add(cfg);
}
- for (FeaturePackConfig cfg : allDeps) {
- FeaturePack fp = toFeaturePack(cfg, pm);
+ for (FPID cfg : allDeps) {
+ FeaturePack fp = toFeaturePack(cfg.getLocation());
if (fp != null) {
String gac = fp.getGAC();
// Only add the dep if not already seen. The first installed FP dep wins.
if (!topLevels.containsKey(gac) && !dependencies.containsKey(gac)) {
// Resolve to retrieve the actual producer and map to GAC
Path p = mojo.resolveMaven(fp);
- FeaturePackSpec depSpec = FeaturePackDescriber.readSpec(p);
- producerToGAC.put(depSpec.getFPID().getProducer(), gac);
+ GalleonFeaturePackDescription depSpec = Provisioning.getFeaturePackDescription(p);
+ producerToGAC.put(depSpec.getProducer().getProducer(), gac);
dependencies.put(gac, fp);
}
}
@@ -107,7 +106,7 @@ private Map getOriginalVersions() throws ProvisioningException,
private ScannedModules getScannedModules() throws ProvisioningException, MojoExecutionException {
if (modules == null) {
- modules = ScannedModules.scanProvisionedArtifacts(pm, config);
+ modules = ScannedModules.scanProvisionedArtifacts(provisioning, originalConfig);
}
return modules;
}
@@ -194,9 +193,9 @@ private static String getOriginalArtifactVersion(OverriddenArtifact a, Map originalVersions = getOriginalVersions();
List featurePackDependencies = new ArrayList<>();
@@ -286,15 +285,15 @@ ProvisioningConfig upgrade() throws MojoExecutionException, ProvisioningDescript
}
}
if (!artifactDependencies.isEmpty() || !featurePackDependencies.isEmpty()) {
- ProvisioningConfig.Builder c = ProvisioningConfig.builder(config);
+ GalleonProvisioningConfig original = originalConfig;
+ GalleonProvisioningConfig.Builder c = GalleonProvisioningConfig.builder(original);
if (!featurePackDependencies.isEmpty()) {
mojo.getLog().info("[UPDATE] Overriding Galleon feature-pack dependency with: ");
for (FeaturePack fp : featurePackDependencies) {
- FeaturePackLocation fpl = FeaturePackLocation.fromString(fp.getMavenCoords());
mojo.getLog().info("[UPDATE] " + fp.getGroupId() + ":" + fp.getArtifactId() + ":"
+ (fp.getClassifier() == null ? "" : fp.getClassifier() + ":")
+ fp.getVersion() + (fp.getExtension() == null ? "" : ":" + fp.getExtension()));
- c.addTransitiveDep(fpl);
+ c.addTransitiveDep(FeaturePackLocation.fromString(fp.getMavenCoords()));
}
}
if (!artifactDependencies.isEmpty()) {
@@ -306,12 +305,15 @@ ProvisioningConfig upgrade() throws MojoExecutionException, ProvisioningDescript
+ (update.getClassifier() == null ? "" : update.getClassifier() + ":")
+ update.getVersion() + (update.getType() == null ? "" : ":" + update.getType()));
}
- c.addOption("jboss-overridden-artifacts", updates);
+ Map allOptions = new HashMap<>();
+ allOptions.putAll(original.getOptions());
+ allOptions.put("jboss-overridden-artifacts", updates);
+ c.addOptions(allOptions);
}
}
return c.build();
} else {
- return config;
+ return originalConfig;
}
}
@@ -338,13 +340,13 @@ static String locationWithVersion(String featurePackLocation, MavenProjectArtifa
return featurePackLocation;
}
- private FeaturePack toFeaturePack(FeaturePackConfig cfg, ProvisioningManager pm) throws MojoExecutionException {
+ private FeaturePack toFeaturePack(FeaturePackLocation fpl) throws MojoExecutionException {
FeaturePack fp;
- validateFPL(cfg.getLocation());
- if (cfg.getLocation().isMavenCoordinates()) {
- fp = getFeaturePack(cfg.getLocation().toString());
+ validateFPL(fpl);
+ if (fpl.isMavenCoordinates()) {
+ fp = getFeaturePack(fpl.toString());
} else {
- fp = getFeaturePack(cfg, pm);
+ fp = getFeaturePack(fpl);
}
return fp;
}
@@ -364,24 +366,24 @@ String getMavenFeaturePack(FeaturePackLocation.FPID location) {
}
}
- private FeaturePack getFeaturePack(FeaturePackConfig cfg, ProvisioningManager pm) {
+ private FeaturePack getFeaturePack(FeaturePackLocation fpl) {
try {
- Channel channel = pm.getLayoutFactory().getUniverseResolver().getChannel(cfg.getLocation());
+ Channel channel = provisioning.getUniverseResolver().getChannel(fpl);
if (channel instanceof MavenChannel) {
MavenChannel mavenChannel = (MavenChannel) channel;
FeaturePack fp = new FeaturePack();
fp.setGroupId(mavenChannel.getFeaturePackGroupId());
fp.setArtifactId(mavenChannel.getFeaturePackArtifactId());
- String build = cfg.getLocation().getBuild();
+ String build = fpl.getBuild();
if (build == null) {
- build = mavenChannel.getLatestBuild(cfg.getLocation());
+ build = mavenChannel.getLatestBuild(fpl);
}
fp.setVersion(build);
return fp;
}
} catch (ProvisioningException ex) {
// OK, invalid channel, can occurs for non registered FP that are referenced from GAV.
- mojo.debug("Invalid channel for %s, the feature-pack is not known in the universe, skipping it.", cfg.getLocation());
+ mojo.debug("Invalid channel for %s, the feature-pack is not known in the universe, skipping it.", fpl);
}
return null;
}
diff --git a/plugin/src/main/java/org/wildfly/plugins/bootablejar/maven/goals/ScannedModules.java b/plugin/src/main/java/org/wildfly/plugins/bootablejar/maven/goals/ScannedModules.java
index 6c7154fc..e68f98d3 100644
--- a/plugin/src/main/java/org/wildfly/plugins/bootablejar/maven/goals/ScannedModules.java
+++ b/plugin/src/main/java/org/wildfly/plugins/bootablejar/maven/goals/ScannedModules.java
@@ -31,16 +31,16 @@
import javax.xml.parsers.ParserConfigurationException;
import org.apache.maven.plugin.MojoExecutionException;
import org.jboss.galleon.ProvisioningException;
-import org.jboss.galleon.ProvisioningManager;
-import org.jboss.galleon.config.ProvisioningConfig;
-import org.jboss.galleon.runtime.FeaturePackRuntime;
-import org.jboss.galleon.runtime.PackageRuntime;
-import org.jboss.galleon.runtime.ProvisioningRuntime;
+import org.jboss.galleon.api.GalleonFeaturePackRuntime;
+import org.jboss.galleon.api.GalleonPackageRuntime;
+import org.jboss.galleon.api.GalleonProvisioningRuntime;
+import org.jboss.galleon.api.Provisioning;
+import org.jboss.galleon.api.config.GalleonProvisioningConfig;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
-import org.wildfly.plugins.bootablejar.BootableJarSupport;
+import org.wildfly.plugin.tools.bootablejar.BootableJarSupport;
import org.xml.sax.SAXException;
final class ScannedModules {
@@ -87,13 +87,13 @@ Map getProvisionedArtifacts() {
return all;
}
- static ScannedModules scanProvisionedArtifacts(ProvisioningManager pm, ProvisioningConfig config)
+ static ScannedModules scanProvisionedArtifacts(Provisioning pm, GalleonProvisioningConfig config)
throws ProvisioningException, MojoExecutionException {
Map propsMap = new HashMap<>();
Map> perModule = new TreeMap<>();
Map copiedArtifacts = new HashMap<>();
- try (ProvisioningRuntime rt = pm.getRuntime(config)) {
- for (FeaturePackRuntime fprt : rt.getFeaturePacks()) {
+ try (GalleonProvisioningRuntime rt = pm.getProvisioningRuntime(config)) {
+ for (GalleonFeaturePackRuntime fprt : rt.getGalleonFeaturePacks()) {
Path artifactProps = fprt.getResource(BootableJarSupport.WILDFLY_ARTIFACT_VERSIONS_RESOURCE_PATH);
try {
AbstractBuildBootableJarMojo.readProperties(artifactProps, propsMap);
@@ -101,7 +101,7 @@ static ScannedModules scanProvisionedArtifacts(ProvisioningManager pm, Provision
throw new MojoExecutionException("Error reading artifact versions", ex);
}
}
- for (FeaturePackRuntime fprt : rt.getFeaturePacks()) {
+ for (GalleonFeaturePackRuntime fprt : rt.getGalleonFeaturePacks()) {
processPackages(fprt, perModule, propsMap, copiedArtifacts);
}
}
@@ -113,12 +113,12 @@ static ScannedModules scanProvisionedArtifacts(ProvisioningManager pm, Provision
return new ScannedModules(perModule, MODULE_RUNTIME_KEY, moduleRuntimeValue, copiedArtifacts);
}
- private static void processPackages(final FeaturePackRuntime fp,
+ private static void processPackages(final GalleonFeaturePackRuntime fp,
Map> perModule,
Map propsMap,
Map copiedArtifacts) throws ProvisioningException {
- Map jbossModules = new HashMap<>();
- for (PackageRuntime pkg : fp.getPackages()) {
+ Map jbossModules = new HashMap<>();
+ for (GalleonPackageRuntime pkg : fp.getGalleonPackages()) {
final Path pmWfDir = pkg.getResource(PM, WILDFLY);
if (!Files.exists(pmWfDir)) {
continue;
@@ -132,18 +132,18 @@ private static void processPackages(final FeaturePackRuntime fp,
processTasks(pkg, tasks, propsMap, copiedArtifacts);
}
}
- for (Map.Entry entry : jbossModules.entrySet()) {
- final PackageRuntime pkg = entry.getValue();
+ for (Map.Entry entry : jbossModules.entrySet()) {
+ final GalleonPackageRuntime pkg = entry.getValue();
try {
processModuleTemplate(pkg, entry.getKey(), perModule, propsMap);
} catch (IOException | ParserConfigurationException | ProvisioningException | SAXException e) {
throw new ProvisioningException("Failed to process JBoss module XML template for feature-pack "
- + pkg.getFeaturePackRuntime().getFPID() + " package " + pkg.getName(), e);
+ + pkg.getFeaturePackFPID() + " package " + pkg.getName(), e);
}
}
}
- private static void processTasks(PackageRuntime pkg, Path tasks, Map propsMap,
+ private static void processTasks(GalleonPackageRuntime pkg, Path tasks, Map propsMap,
Map artifacts) throws ProvisioningException {
try {
try (InputStream reader = Files.newInputStream(tasks)) {
@@ -175,12 +175,12 @@ private static void processTasks(PackageRuntime pkg, Path tasks, Map jbossModules) throws ProvisioningException {
+ private static void processModules(GalleonPackageRuntime pkg, Path fpModuleDir,
+ Map jbossModules) throws ProvisioningException {
try {
Files.walkFileTree(fpModuleDir, new SimpleFileVisitor() {
@Override
@@ -194,11 +194,11 @@ public FileVisitResult visitFile(Path file, BasicFileAttributes attrs)
});
} catch (IOException e) {
throw new ProvisioningException("Failed to process modules from package " + pkg.getName()
- + " from feature-pack " + pkg.getFeaturePackRuntime().getFPID(), e);
+ + " from feature-pack " + pkg.getFeaturePackFPID(), e);
}
}
- private static void processModuleTemplate(PackageRuntime pkg, Path moduleXmlRelativePath,
+ private static void processModuleTemplate(GalleonPackageRuntime pkg, Path moduleXmlRelativePath,
Map> perModule, Map propsMap) throws ProvisioningException, IOException, ParserConfigurationException, SAXException {
final Path moduleTemplate = pkg.getResource(PM, WILDFLY, MODULE).resolve(moduleXmlRelativePath);
diff --git a/plugin/src/main/java/org/wildfly/plugins/bootablejar/maven/goals/ShutdownBootableJarMojo.java b/plugin/src/main/java/org/wildfly/plugins/bootablejar/maven/goals/ShutdownBootableJarMojo.java
index 86e0b5bf..22b6837b 100644
--- a/plugin/src/main/java/org/wildfly/plugins/bootablejar/maven/goals/ShutdownBootableJarMojo.java
+++ b/plugin/src/main/java/org/wildfly/plugins/bootablejar/maven/goals/ShutdownBootableJarMojo.java
@@ -24,7 +24,7 @@
import org.apache.maven.project.MavenProject;
import org.jboss.as.controller.client.ModelControllerClient;
import org.wildfly.plugin.common.AbstractServerConnection;
-import org.wildfly.plugin.core.ServerHelper;
+import org.wildfly.plugin.tools.ServerHelper;
/**
* Shutdown the bootable JAR. In order to be able to shutdown a running server,
diff --git a/plugin/src/main/java/org/wildfly/plugins/bootablejar/maven/goals/StartBootableJarMojo.java b/plugin/src/main/java/org/wildfly/plugins/bootablejar/maven/goals/StartBootableJarMojo.java
index 2089855f..833f7156 100644
--- a/plugin/src/main/java/org/wildfly/plugins/bootablejar/maven/goals/StartBootableJarMojo.java
+++ b/plugin/src/main/java/org/wildfly/plugins/bootablejar/maven/goals/StartBootableJarMojo.java
@@ -30,7 +30,7 @@
import org.wildfly.core.launcher.BootableJarCommandBuilder;
import org.wildfly.core.launcher.Launcher;
import org.wildfly.plugin.common.AbstractServerConnection;
-import org.wildfly.plugin.core.ServerHelper;
+import org.wildfly.plugin.tools.ServerHelper;
import org.wildfly.plugins.bootablejar.maven.common.Utils;
/**
diff --git a/plugin/src/test/java/org/wildfly/plugins/bootablejar/maven/goals/BootLoggingConfigurationTestCase.java b/plugin/src/test/java/org/wildfly/plugins/bootablejar/maven/goals/BootLoggingConfigurationTestCase.java
deleted file mode 100644
index 4e8dfe9f..00000000
--- a/plugin/src/test/java/org/wildfly/plugins/bootablejar/maven/goals/BootLoggingConfigurationTestCase.java
+++ /dev/null
@@ -1,865 +0,0 @@
-/*
- * JBoss, Home of Professional Open Source.
- *
- * Copyright 2020 Red Hat, Inc., and individual contributors
- * as indicated by the @author tags.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.wildfly.plugins.bootablejar.maven.goals;
-
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.nio.charset.StandardCharsets;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.nio.file.StandardCopyOption;
-import java.util.ArrayDeque;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.Deque;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Properties;
-import java.util.Set;
-import java.util.TreeSet;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.TimeoutException;
-import java.util.regex.Pattern;
-
-import org.jboss.as.controller.client.ModelControllerClient;
-import org.jboss.as.controller.client.Operation;
-import org.jboss.as.controller.client.helpers.ClientConstants;
-import org.jboss.as.controller.client.helpers.Operations;
-import org.jboss.as.controller.client.helpers.Operations.CompositeOperationBuilder;
-import org.jboss.dmr.ModelNode;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Ignore;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.TestName;
-import org.wildfly.core.launcher.Launcher;
-import org.wildfly.core.launcher.StandaloneCommandBuilder;
-import org.wildfly.plugin.core.ServerHelper;
-
-/**
- * @author James R. Perkins
- */
-public class BootLoggingConfigurationTestCase {
-
- private static final Pattern EXPRESSION_PATTERN = Pattern.compile(".*\\$\\{.*}.*");
- private static Process currentProcess;
- private static Path stdout;
- private static ModelControllerClient client;
-
- @Rule
- public TestName testName = new TestName();
-
- private final Deque tearDownOps = new ArrayDeque<>();
- private Path tmpDir;
-
- @BeforeClass
- public static void startWildFly() throws Exception {
- stdout = Files.createTempFile("stdout-", ".log");
- final StandaloneCommandBuilder builder = StandaloneCommandBuilder.of(TestEnvironment.getJBossHome())
- .addJavaOptions(TestEnvironment.getJvmArgs());
- currentProcess = Launcher.of(builder)
- .setRedirectErrorStream(true)
- .redirectOutput(stdout)
- .launch();
- client = ModelControllerClient.Factory.create(TestEnvironment.getHost(), TestEnvironment.getManagementPort());
- // Wait for standalone to start
- ServerHelper.waitForStandalone(currentProcess, client, TestEnvironment.getTimeout());
- Assert.assertTrue(String.format("Standalone server is not running:%n%s", getLog()), ServerHelper.isStandaloneRunning(client));
- }
-
- @AfterClass
- public static void shutdown() throws Exception {
- if (client != null) {
- ServerHelper.shutdownStandalone(client);
- client.close();
- }
- if (currentProcess != null) {
- if (!currentProcess.waitFor(TestEnvironment.getTimeout(), TimeUnit.SECONDS)) {
- currentProcess.destroyForcibly();
- }
- }
- }
-
- @Before
- public void setup() throws Exception {
- tmpDir = TestEnvironment.createTempPath("test-config", testName.getMethodName());
- if (Files.notExists(tmpDir)) {
- Files.createDirectories(tmpDir);
- }
- }
-
- @After
- public void cleanUp() throws Exception {
- final CompositeOperationBuilder builder = CompositeOperationBuilder.create();
- ModelNode op;
- while ((op = tearDownOps.pollFirst()) != null) {
- builder.addStep(op);
- }
- executeOperation(builder.build());
- }
-
- @Test
- public void testDefault() throws Exception {
- generateAndTest();
- }
-
- @Test
- public void testAsyncHandler() throws Exception {
- final CompositeOperationBuilder builder = CompositeOperationBuilder.create();
-
- // Add a file handler
- final ModelNode fileHandler = createLoggingAddress("file-handler", "test-file");
- ModelNode op = Operations.createAddOperation(fileHandler);
- op.get("named-formatter").set("PATTERN");
- op.get("append").set(true);
- final ModelNode file = op.get("file");
- file.get("relative-to").set("jboss.server.log.dir");
- file.get("path").set("test-file.log");
- builder.addStep(op);
-
- // Add the async handler
- final ModelNode asyncAddress = createLoggingAddress("async-handler", "async");
- op = Operations.createAddOperation(asyncAddress);
- op.get("overflow-action").set("DISCARD");
- op.get("queue-length").set(5000);
- final ModelNode subhandlers = op.get("subhandlers").setEmptyList();
- subhandlers.add("test-file");
- builder.addStep(op);
-
- // Add the handler to the root-logger
- builder.addStep(createAddHandlerOp("async"));
-
- executeOperation(builder.build());
- tearDownOps.add(Operations.createRemoveOperation(asyncAddress));
- tearDownOps.add(Operations.createRemoveOperation(fileHandler));
- generateAndTest();
- }
-
- @Test
- public void testDefaultConsole() throws Exception {
- final ModelNode address = createLoggingAddress("console-handler", "new-handler");
- // Just do a raw add which will add the default formatter rather than a named-formatter
- executeOperation(Operations.createAddOperation(address));
- tearDownOps.add(Operations.createRemoveOperation(address));
- generateAndTest();
- }
-
- @Test
- public void testCustomHandler() throws Exception {
- final CompositeOperationBuilder builder = CompositeOperationBuilder.create();
-
- final ModelNode formatterAddress = createLoggingAddress("custom-formatter", "json");
- ModelNode op = Operations.createAddOperation(formatterAddress);
- op.get("class").set("org.jboss.logmanager.formatters.JsonFormatter");
- op.get("module").set("org.jboss.logmanager");
- ModelNode properties = op.get("properties");
- properties.get("prettyPrint").set("true");
- properties.get("recordDelimiter").set("|");
- builder.addStep(op);
-
-
- final ModelNode handlerAddress = createLoggingAddress("custom-handler", "custom-console");
- op = Operations.createAddOperation(handlerAddress);
- op.get("class").set("org.jboss.logmanager.handlers.ConsoleHandler");
- op.get("module").set("org.jboss.logmanager");
- op.get("named-formatter").set("json");
- properties = op.get("properties");
- properties.get("target").set("SYSTEM_ERR");
- builder.addStep(op);
-
- builder.addStep(createAddHandlerOp("custom-console"));
-
- executeOperation(builder.build());
- // Create the tear down ops
- tearDownOps.addLast(Operations.createRemoveOperation(handlerAddress));
- tearDownOps.addLast(Operations.createRemoveOperation(formatterAddress));
-
- generateAndTest();
- }
-
- @Test
- public void testCustomHandlerNoProperties() throws Exception {
- final CompositeOperationBuilder builder = CompositeOperationBuilder.create();
-
- final ModelNode formatterAddress = createLoggingAddress("custom-formatter", "json");
- ModelNode op = Operations.createAddOperation(formatterAddress);
- op.get("class").set("org.jboss.logmanager.formatters.JsonFormatter");
- op.get("module").set("org.jboss.logmanager");
- builder.addStep(op);
-
-
- final ModelNode handlerAddress = createLoggingAddress("custom-handler", "custom-console");
- op = Operations.createAddOperation(handlerAddress);
- op.get("class").set("org.jboss.logmanager.handlers.ConsoleHandler");
- op.get("module").set("org.jboss.logmanager");
- op.get("named-formatter").set("json");
- builder.addStep(op);
-
- builder.addStep(createAddHandlerOp("custom-console"));
-
- executeOperation(builder.build());
- // Create the tear down ops
- tearDownOps.addLast(Operations.createRemoveOperation(handlerAddress));
- tearDownOps.addLast(Operations.createRemoveOperation(formatterAddress));
-
- generateAndTest();
- }
-
- @Test
- public void testPeriodicRotatingFileHandler() throws Exception {
- final CompositeOperationBuilder builder = CompositeOperationBuilder.create();
-
- // Create a handler to assign the formatter to
- final ModelNode handlerAddress = createLoggingAddress("periodic-rotating-file-handler", "new-file");
- final ModelNode op = Operations.createAddOperation(handlerAddress);
- op.get("named-formatter").set("PATTERN");
- op.get("suffix").set(".yyyy-MM-dd");
- final ModelNode file = op.get("file");
- file.get("relative-to").set("jboss.server.log.dir");
- file.get("path").set("test.log");
- builder.addStep(op);
-
- builder.addStep(createAddHandlerOp("new-file"));
-
- executeOperation(builder.build());
- tearDownOps.add(Operations.createRemoveOperation(handlerAddress));
-
- generateAndTest();
- }
-
- @Test
- public void testPeriodicSizeRotatingFileHandler() throws Exception {
- final CompositeOperationBuilder builder = CompositeOperationBuilder.create();
-
- // Create a handler to assign the formatter to
- final ModelNode handlerAddress = createLoggingAddress("periodic-size-rotating-file-handler", "new-file");
- final ModelNode op = Operations.createAddOperation(handlerAddress);
- op.get("named-formatter").set("PATTERN");
- op.get("suffix").set(".yyyy-MM-dd");
- op.get("rotate-on-boot").set(false);
- op.get("rotate-size").set("${test.rotate.size:50M}");
- final ModelNode file = op.get("file");
- file.get("relative-to").set("jboss.server.log.dir");
- file.get("path").set("test.log");
- builder.addStep(op);
-
- builder.addStep(createAddHandlerOp("new-file"));
-
- executeOperation(builder.build());
- tearDownOps.add(Operations.createRemoveOperation(handlerAddress));
-
- generateAndTest();
- }
-
- @Test
- public void testSizeRotatingFileHandler() throws Exception {
- final CompositeOperationBuilder builder = CompositeOperationBuilder.create();
-
- // Create a handler to assign the formatter to
- final ModelNode handlerAddress = createLoggingAddress("size-rotating-file-handler", "new-file");
- final ModelNode op = Operations.createAddOperation(handlerAddress);
- op.get("named-formatter").set("PATTERN");
- op.get("rotate-on-boot").set(false);
- op.get("rotate-size").set("50M");
- op.get("max-backup-index").set(100);
- final ModelNode file = op.get("file");
- file.get("relative-to").set("jboss.server.log.dir");
- file.get("path").set("test.log");
- builder.addStep(op);
-
- builder.addStep(createAddHandlerOp("new-file"));
-
- executeOperation(builder.build());
- tearDownOps.add(Operations.createRemoveOperation(handlerAddress));
-
- generateAndTest();
- }
-
- @Test
- @Ignore("This test is failing on CI. See WFCORE-5155.")
- public void testSocketHandler() throws Exception {
- final CompositeOperationBuilder builder = CompositeOperationBuilder.create();
-
- // Add the socket binding
- final ModelNode socketBindingAddress = Operations.createAddress("socket-binding-group", "standard-sockets",
- "remote-destination-outbound-socket-binding", "log-server");
- ModelNode op = Operations.createAddOperation(socketBindingAddress);
- op.get("host").set(TestEnvironment.getHost());
- op.get("port").set(TestEnvironment.getLogServerPort());
- builder.addStep(op);
-
- // Add a socket handler
- final ModelNode address = createLoggingAddress("socket-handler", "socket");
- op = Operations.createAddOperation(address);
- op.get("named-formatter").set("PATTERN");
- op.get("outbound-socket-binding-ref").set("log-server");
- builder.addStep(op);
-
- // Add the handler to the root-logger
- builder.addStep(createAddHandlerOp("socket"));
-
- executeOperation(builder.build());
- tearDownOps.add(Operations.createRemoveOperation(address));
- tearDownOps.add(Operations.createRemoveOperation(socketBindingAddress));
-
- generateAndTest();
- }
-
- @Test
- public void testSyslogHandler() throws Exception {
- final CompositeOperationBuilder builder = CompositeOperationBuilder.create();
-
- // Add a socket handler
- final ModelNode address = createLoggingAddress("syslog-handler", "syslog");
- final ModelNode op = Operations.createAddOperation(address);
- op.get("app-name").set("test-app");
- op.get("enabled").set(false);
- op.get("facility").set("local-use-0");
- op.get("hostname").set(TestEnvironment.getHost());
- op.get("level").set("WARN");
- op.get("named-formatter").set("PATTERN");
- op.get("port").set(TestEnvironment.getLogServerPort());
- builder.addStep(op);
-
- // Add the handler to the root-logger
- builder.addStep(createAddHandlerOp("syslog"));
-
- executeOperation(builder.build());
- tearDownOps.add(Operations.createRemoveOperation(address));
-
- generateAndTest();
- }
-
- @Test
- public void testFilter() throws Exception {
- final ModelNode filterAddress = createLoggingAddress("filter", "testFilter");
- ModelNode op = Operations.createAddOperation(filterAddress);
- op.get("class").set(TestFilter.class.getName());
- op.get("module").set("org.wildfly.plugins.bootablejar.maven.goals");
- final ModelNode constructorProperties = op.get("constructor-properties");
- constructorProperties.get("constructorText").set(" | constructor property text");
- final ModelNode properties = op.get("properties");
- properties.get("propertyText").set(" | property text");
- executeOperation(op);
- tearDownOps.add(Operations.createRemoveOperation(filterAddress));
-
- generateAndTest();
- }
-
- @Test
- public void testFilterNoProperties() throws Exception {
- final ModelNode filterAddress = createLoggingAddress("filter", "testFilter");
- ModelNode op = Operations.createAddOperation(filterAddress);
- op.get("class").set(TestFilter.class.getName());
- op.get("module").set("org.wildfly.plugins.bootablejar.maven.goals");
- executeOperation(op);
- tearDownOps.add(Operations.createRemoveOperation(filterAddress));
-
- generateAndTest();
- }
-
- @Test
- public void testJsonFormatter() throws Exception {
- final CompositeOperationBuilder builder = CompositeOperationBuilder.create();
-
- final ModelNode formatterAddress = createLoggingAddress("json-formatter", "json");
- ModelNode op = Operations.createAddOperation(formatterAddress);
- op.get("pretty-print").set(false);
- op.get("exception-output-type").set("${test.type:formatted}");
- op.get("date-format").set("yyyy-MM-dd'T'HH:mm:SSSZ");
-
- final ModelNode keyOverrides = op.get("key-overrides").setEmptyObject();
- keyOverrides.get("message").set("msg");
- keyOverrides.get("stack-trace").set("cause");
-
- final ModelNode metaData = op.get("meta-data").setEmptyObject();
- metaData.get("app-name").set("test");
- metaData.get("@version").set("1");
-
- op.get("print-details").set(true);
- op.get("record-delimiter").set("\n");
- op.get("zone-id").set("GMT");
- builder.addStep(op);
-
- // Create a handler to assign the formatter to
- final ModelNode handlerAddress = createLoggingAddress("file-handler", "json-file");
- op = Operations.createAddOperation(handlerAddress);
- op.get("append").set(false);
- op.get("level").set("DEBUG");
- op.get("named-formatter").set("json");
- final ModelNode file = op.get("file");
- file.get("relative-to").set("jboss.server.log.dir");
- file.get("path").set("test-json.log");
- builder.addStep(op);
-
- builder.addStep(createAddHandlerOp("json-file"));
-
- executeOperation(builder.build());
- tearDownOps.add(Operations.createRemoveOperation(handlerAddress));
- tearDownOps.add(Operations.createRemoveOperation(formatterAddress));
-
- generateAndTest();
- }
-
- @Test
- public void testPatternFormatter() throws Exception {
- final CompositeOperationBuilder builder = CompositeOperationBuilder.create();
-
- final ModelNode formatterAddress = createLoggingAddress("pattern-formatter", "new-pattern");
- ModelNode op = Operations.createAddOperation(formatterAddress);
- op.get("pattern").set("[test] %d{HH:mm:ss,SSS} %-5p [%c] %s%e%n");
- op.get("color-map").set("info:blue,warn:yellow,error:red,debug:cyan");
- builder.addStep(op);
-
- // Create a handler to assign the formatter to
- final ModelNode handlerAddress = createLoggingAddress("file-handler", "new-file");
- op = Operations.createAddOperation(handlerAddress);
- op.get("append").set(false);
- op.get("encoding").set("ISO-8859-1");
- op.get("level").set("DEBUG");
- op.get("filter-spec").set("any(accept,match(\".*\"))");
- op.get("named-formatter").set("new-pattern");
- final ModelNode file = op.get("file");
- file.get("relative-to").set("jboss.server.log.dir");
- file.get("path").set("test.log");
- builder.addStep(op);
-
- executeOperation(builder.build());
- tearDownOps.add(Operations.createRemoveOperation(handlerAddress));
- tearDownOps.add(Operations.createRemoveOperation(formatterAddress));
-
- generateAndTest();
- }
-
- @Test
- public void testLogger() throws Exception {
- final CompositeOperationBuilder builder = CompositeOperationBuilder.create();
-
- // Add a filter for the logger
- final ModelNode filterAddress = createLoggingAddress("filter", "testFilter");
- ModelNode op = Operations.createAddOperation(filterAddress);
- op.get("class").set(TestFilter.class.getName());
- op.get("module").set("org.wildfly.plugins.bootablejar.maven.goals");
- builder.addStep(op);
-
- // Add a formatter for the handler
- final ModelNode formatterAddress = createLoggingAddress("pattern-formatter", "custom-formatter");
- op = Operations.createAddOperation(formatterAddress);
- op.get("pattern").set("[%X{debug.token} %K{level}%d{HH:mm:ss,SSS} %-5p [%c] (%t) %s%e%n");
- builder.addStep(op);
-
- // Add a handler for the logger
- final ModelNode handlerAddress = createLoggingAddress("console-handler", "custom-console");
- op = Operations.createAddOperation(handlerAddress);
- op.get("named-formatter").set("custom-formatter");
- builder.addStep(op);
-
- // Create the logger
- final ModelNode loggerAddress = createLoggingAddress("logger", "org.jboss.as");
- op = Operations.createAddOperation(loggerAddress);
- op.get("level").set("${test.level:DEBUG}");
- op.get("use-parent-handlers").set(false);
- op.get("filter-spec").set("all(testFilter)");
- final ModelNode handlers = op.get("handlers").setEmptyList();
- handlers.add("custom-console");
- builder.addStep(op);
-
- executeOperation(builder.build());
- tearDownOps.add(Operations.createRemoveOperation(loggerAddress));
- tearDownOps.add(Operations.createRemoveOperation(handlerAddress));
- tearDownOps.add(Operations.createRemoveOperation(formatterAddress));
- tearDownOps.add(Operations.createRemoveOperation(filterAddress));
-
- generateAndTest();
- }
-
- @Test
- public void testWithProperties() throws Exception {
- final CompositeOperationBuilder builder = CompositeOperationBuilder.create();
-
- // Create some expected properties
- final Properties expectedProperties = new Properties();
- expectedProperties.setProperty("test.level", "TRACE");
- expectedProperties.setProperty("test.rotate-on-boot", "true");
- expectedProperties.setProperty("test.pretty.print", "true");
- expectedProperties.setProperty("test.exception-output-type", "formatted");
- expectedProperties.setProperty("test.zone.id", "UTC");
- expectedProperties.setProperty("test.dir", System.getProperty("java.io.tmpdir"));
-
- // Add the system properties
- for (String key : expectedProperties.stringPropertyNames()) {
- final ModelNode address = Operations.createAddress("system-property", key);
- final ModelNode op = Operations.createAddOperation(address);
- op.get("value").set(expectedProperties.getProperty(key));
- builder.addStep(op);
- }
- // Add a path and set this after
- final ModelNode tmpPathAddress = Operations.createAddress("path", "custom.log.dir");
- ModelNode op = Operations.createAddOperation(tmpPathAddress);
- op.get("path").set("${test.dir}");
- builder.addStep(op);
-
- final ModelNode logPathAddress = Operations.createAddress("path", "test.log.dir");
- op = Operations.createAddOperation(logPathAddress);
- op.get("relative-to").set("custom.log.dir");
- op.get("path").set("logs");
- builder.addStep(op);
-
- // Add one property that won't be used so it shouldn't end up in the boot-config.properties
- final ModelNode sysPropAddress = Operations.createAddress("system-property", "unused.property");
- op = Operations.createAddOperation(sysPropAddress);
- op.get("value").set("not used");
- builder.addStep(op);
- tearDownOps.add(Operations.createRemoveOperation(sysPropAddress));
-
- // Create a formatter
- final ModelNode formatterAddress = createLoggingAddress("json-formatter", "json");
- op = Operations.createAddOperation(formatterAddress);
- op.get("pretty-print").set("${test.pretty.print:false}");
- op.get("exception-output-type").set("${test.exception-output-type:detailed}");
- op.get("zone-id").set("${test.zone.id:GMT}");
- builder.addStep(op);
-
- // Create a file handler
- final ModelNode handlerAddress = createLoggingAddress("size-rotating-file-handler", "json-file");
- op = Operations.createAddOperation(handlerAddress);
- op.get("named-formatter").set("json");
- op.get("rotate-on-boot").set("${test.rotate-on-boot:false}");
- op.get("rotate-size").set("50M");
- op.get("max-backup-index").set(100);
- final ModelNode file = op.get("file");
- file.get("relative-to").set("test.log.dir");
- file.get("path").set("test.log");
- builder.addStep(op);
- // We don't actually expect the custom.log.dir property here as it should be written to the file as
- // ${test.dir}/${test.log.dir}/test.log
- expectedProperties.setProperty("test.log.dir", "logs");
-
- // Create a logger
- final ModelNode loggerAddress = createLoggingAddress("logger", "org.wildfly.core");
- op = Operations.createAddOperation(loggerAddress);
- op.get("level").set("${test.level:INFO}");
- builder.addStep(op);
-
- builder.addStep(createAddHandlerOp("json-file"));
-
-
- executeOperation(builder.build());
- tearDownOps.add(Operations.createRemoveOperation(loggerAddress));
- tearDownOps.add(Operations.createRemoveOperation(handlerAddress));
- tearDownOps.add(Operations.createRemoveOperation(formatterAddress));
- tearDownOps.add(Operations.createRemoveOperation(logPathAddress));
- tearDownOps.add(Operations.createRemoveOperation(tmpPathAddress));
-
- // Remove all the properties last
- for (String name : expectedProperties.stringPropertyNames()) {
- // test.log.dir isn't an actual system property
- if ("test.log.dir".equals(name)) continue;
- final ModelNode address = Operations.createAddress("system-property", name);
- tearDownOps.addLast(Operations.createRemoveOperation(address));
- }
-
- generateAndTest(expectedProperties);
- }
-
- @Test
- public void testNestedPaths() throws Exception {
- final CompositeOperationBuilder builder = CompositeOperationBuilder.create();
-
- // Create some expected properties
- final Properties expectedProperties = new Properties();
- // Add a path and set this after
- final ModelNode tmpPathAddress = Operations.createAddress("path", "custom.log.dir");
- ModelNode op = Operations.createAddOperation(tmpPathAddress);
- op.get("path").set("custom-logs");
- op.get("relative-to").set("jboss.server.log.dir");
- builder.addStep(op);
-
- final ModelNode logPathAddress = Operations.createAddress("path", "test.log.dir");
- op = Operations.createAddOperation(logPathAddress);
- op.get("relative-to").set("custom.log.dir");
- op.get("path").set("logs");
- builder.addStep(op);
- expectedProperties.setProperty("custom.log.dir", "custom-logs");
- expectedProperties.setProperty("test.log.dir", "logs");
-
- // Create a file handler
- final ModelNode handlerAddress = createLoggingAddress("file-handler", "test-file");
- op = Operations.createAddOperation(handlerAddress);
- op.get("named-formatter").set("PATTERN");
- final ModelNode file = op.get("file");
- file.get("relative-to").set("test.log.dir");
- file.get("path").set("test.log");
- builder.addStep(op);
-
- builder.addStep(createAddHandlerOp("test-file"));
-
-
- executeOperation(builder.build());
- tearDownOps.add(Operations.createRemoveOperation(handlerAddress));
- tearDownOps.add(Operations.createRemoveOperation(logPathAddress));
- tearDownOps.add(Operations.createRemoveOperation(tmpPathAddress));
-
- generateAndTest(expectedProperties);
- }
-
- @Test
- public void testMultiKeyExpression() throws Exception {
- final CompositeOperationBuilder builder = CompositeOperationBuilder.create();
-
- // Create some expected properties
- final Properties expectedProperties = new Properties();
- expectedProperties.setProperty("test.prod.level", "INFO");
- expectedProperties.setProperty("test.min.level", "WARN");
-
- // Add the system properties
- for (String key : expectedProperties.stringPropertyNames()) {
- final ModelNode address = Operations.createAddress("system-property", key);
- final ModelNode op = Operations.createAddOperation(address);
- op.get("value").set(expectedProperties.getProperty(key));
- builder.addStep(op);
- tearDownOps.add(Operations.createRemoveOperation(address));
- }
-
- // Create a logger to set the level on
- final ModelNode address = createLoggingAddress("logger", BootLoggingConfigurationTestCase.class.getName());
- final ModelNode op = Operations.createAddOperation(address);
- op.get("level").set("${test.dev.level,test.prod.level,test.min.level:DEBUG}");
- builder.addStep(op);
-
- executeOperation(builder.build());
- tearDownOps.add(Operations.createRemoveOperation(address));
-
- generateAndTest(expectedProperties);
- }
-
- private void generateAndTest() throws Exception {
- generateAndTest(null);
- }
-
- private void generateAndTest(final Properties expectedBootConfig) throws Exception {
- final BootLoggingConfiguration config = new BootLoggingConfiguration();
- // @TODO, we can't use AbstractLogEnabled, it is not in the maven plugin classloader.
- //config.enableLogging(TestLogger.getLogger(BootLoggingConfigurationTestCase.class));
- config.generate(tmpDir, client);
- compare(load(findLoggingConfig(), true, true),
- load(tmpDir.resolve("logging.properties"), false, true), true);
- final Path bootConfig = tmpDir.resolve("boot-config.properties");
- if (expectedBootConfig == null) {
- // The file should not exist
- Assert.assertTrue("Expected " + bootConfig + " not to exist", Files.notExists(bootConfig));
- } else {
- compare(expectedBootConfig, load(bootConfig, false, false), false);
- }
- }
-
- private ModelNode createAddHandlerOp(final String handlerName) {
- final ModelNode address = createLoggingAddress("root-logger", "ROOT");
- // Create the remove op first
- ModelNode op = Operations.createOperation("remove-handler", address);
- op.get("name").set(handlerName);
- tearDownOps.addFirst(op);
-
- // Create the add op
- op = Operations.createOperation("add-handler", address);
- op.get("name").set(handlerName);
- return op;
- }
-
- private Path findLoggingConfig() throws IOException {
- final Path serverLogConfig = TestEnvironment.getJBossHome().resolve("standalone").resolve("configuration")
- .resolve("logging.properties");
- Assert.assertTrue("Could find config file " + serverLogConfig, Files.exists(serverLogConfig));
- return Files.copy(serverLogConfig, tmpDir.resolve("server-logging.properties"), StandardCopyOption.REPLACE_EXISTING);
- }
-
- private static ModelNode createLoggingAddress(final String... parts) {
- final Collection addresses = new ArrayList<>();
- addresses.add("subsystem");
- addresses.add("logging");
- Collections.addAll(addresses, parts);
- return Operations.createAddress(addresses);
- }
-
- private static ModelNode executeOperation(final ModelNode op) throws IOException {
- return executeOperation(Operation.Factory.create(op));
- }
-
- private static ModelNode executeOperation(final Operation op) throws IOException {
- final ModelNode result = client.execute(op);
- if (!Operations.isSuccessfulOutcome(result)) {
- Assert.fail(String.format("Operation %s failed: %s", op.getOperation(), Operations.getFailureDescription(result).asString()));
- }
- // Reload if required
- if (result.hasDefined(ClientConstants.RESPONSE_HEADERS)) {
- final ModelNode responseHeaders = result.get(ClientConstants.RESPONSE_HEADERS);
- if (responseHeaders.hasDefined("process-state")) {
- if (ClientConstants.CONTROLLER_PROCESS_STATE_RELOAD_REQUIRED.equals(responseHeaders.get("process-state").asString())) {
- executeOperation(Operations.createOperation("reload"));
- try {
- ServerHelper.waitForStandalone(currentProcess, client, TestEnvironment.getTimeout());
- } catch (InterruptedException | TimeoutException e) {
- e.printStackTrace();
- Assert.fail("Reloading the server failed: " + e.getLocalizedMessage());
- }
- }
- }
- }
- return Operations.readResult(result);
- }
-
- private static String getLog() throws IOException {
- final StringBuilder result = new StringBuilder();
- Files.readAllLines(stdout, StandardCharsets.UTF_8).forEach(line -> result.append(line).append(System.lineSeparator()));
- return result.toString();
- }
-
- private static void compare(final Properties expected, final Properties found, final boolean resolveExpressions) throws IOException {
- compareKeys(expected, found);
- compareValues(expected, found, resolveExpressions);
- }
-
- private static void compareKeys(final Properties expected, final Properties found) {
- final Set expectedKeys = new TreeSet<>(expected.stringPropertyNames());
- final Set foundKeys = new TreeSet<>(found.stringPropertyNames());
- // Find the missing expected keys
- final Set missing = new TreeSet<>(expectedKeys);
- missing.removeAll(foundKeys);
- Assert.assertTrue("Missing the following keys in the generated file: " + missing.toString(),
- missing.isEmpty());
-
- // Find additional keys
- missing.addAll(foundKeys);
- missing.removeAll(expectedKeys);
- Assert.assertTrue("Found the following extra keys in the generated file: " + missing.toString(),
- missing.isEmpty());
- }
-
- private static void compareValues(final Properties expected, final Properties found, final boolean resolveExpressions) throws IOException {
- final Set keys = new TreeSet<>(expected.stringPropertyNames());
- for (String key : keys) {
- final String expectedValue = expected.getProperty(key);
- final String foundValue = found.getProperty(key);
- if (key.endsWith("fileName")) {
- final Path foundFileName = resolvePath(foundValue);
- Assert.assertEquals(Paths.get(expectedValue).normalize(), foundFileName);
- } else {
- if (expectedValue.contains(",")) {
- // Assume the values are a list
- final List expectedValues = stringToList(expectedValue);
- final List foundValues = stringToList(foundValue);
- Assert.assertEquals(String.format("Found %s expected %s", foundValues, expectedValues), expectedValues, foundValues);
- } else {
- if (resolveExpressions && EXPRESSION_PATTERN.matcher(foundValue).matches()) {
- String resolvedValue = resolveExpression(foundValue);
- // Handle some special cases
- if ("formatted".equals(resolvedValue)) {
- resolvedValue = resolvedValue.toUpperCase();
- }
- Assert.assertEquals(expectedValue, resolvedValue);
- } else {
- Assert.assertEquals(expectedValue, foundValue);
- }
- }
- }
- }
- }
-
- private static List stringToList(final String value) {
- final List result = new ArrayList<>();
- Collections.addAll(result, value.split(","));
- Collections.sort(result);
- return result;
- }
-
- private static Properties load(final Path path, final boolean expected, final boolean filter) throws IOException {
- final Properties result = new Properties();
- try (BufferedReader reader = Files.newBufferedReader(path, StandardCharsets.UTF_8)) {
- result.load(reader);
- }
- if (filter) {
- if (expected) {
- result.remove("handlers");
- result.remove("formatters");
- result.remove("filters");
- } else {
- // For some reason the default console-handler and periodic-rotating-file-handler don't persist the enabled
- // attribute.
- for (String key : result.stringPropertyNames()) {
- if (key.equals("handler.CONSOLE.enabled") || key.equals("handler.FILE.enabled")) {
- result.remove(key);
- final String propertiesKey = resolvePrefix(key) + ".properties";
- final String value = result.getProperty(propertiesKey);
- if (value != null) {
- if ("enabled".equals(value)) {
- result.remove(propertiesKey);
- } else {
- result.setProperty(propertiesKey, value.replace("enabled,", "").replace(",enabled", ""));
- }
- }
- }
- }
- }
- }
- return result;
- }
-
- private static String resolvePrefix(final String key) {
- final int i = key.lastIndexOf('.');
- if (i > 0) {
- return key.substring(0, i);
- }
- return key;
- }
-
- private static Path resolvePath(final String path) throws IOException {
- Path resolved = Paths.get(path);
- if (EXPRESSION_PATTERN.matcher(path).matches()) {
- // For testing purposes we're just going to use the last entry which should be a path entry
- final LinkedList expressions = new LinkedList<>(Expression.parse(path));
- Assert.assertFalse("The path could not be resolved: " + path, expressions.isEmpty());
- final Expression expression = expressions.getLast();
- // We're assuming we only have one key entry which for testing purposes should be okay
- final ModelNode op = Operations.createOperation("path-info",
- Operations.createAddress("path", expression.getKeys().get(0)));
- final ModelNode result = client.execute(op);
- if (!Operations.isSuccessfulOutcome(result)) {
- Assert.fail(Operations.getFailureDescription(result).asString());
- }
- final ModelNode pathInfo = Operations.readResult(result);
- final String resolvedPath = pathInfo.get("path", "resolved-path").asString();
- resolved = Paths.get(resolvedPath, resolved.getFileName().toString());
- }
- return resolved.normalize();
- }
-
- private static String resolveExpression(final String value) throws IOException {
- // Resolve the expression
- ModelNode op = Operations.createOperation("resolve-expression");
- op.get("expression").set(value);
- return executeOperation(op).asString();
- }
-}
diff --git a/plugin/src/test/modules/org/wildfly/plugins/bootablejar/maven/goals/main/module.xml b/plugin/src/test/modules/org/wildfly/plugins/bootablejar/maven/goals/main/module.xml
deleted file mode 100644
index b965ebef..00000000
--- a/plugin/src/test/modules/org/wildfly/plugins/bootablejar/maven/goals/main/module.xml
+++ /dev/null
@@ -1,14 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/pom.xml b/pom.xml
index 0a6f2ac7..9ed3de36 100644
--- a/pom.xml
+++ b/pom.xml
@@ -28,7 +28,7 @@
org.wildfly.plugins
wildfly-jar-parent
- 10.0.1.Final-SNAPSHOT
+ 11.0.0.Beta1-SNAPSHOT
pom
WildFly Bootable JAR Plugin Parent
@@ -42,7 +42,6 @@
- core
docs
plugin
tests
@@ -54,7 +53,7 @@
11
11
- 30.0.0.Final
+ 30.0.1.Final
main
30.0
@@ -65,10 +64,11 @@
3.7.0
3.7.0
2.0.0
- 5.2.2.Final
- 22.0.1.Final
+ 6.0.0.Beta1
+ 22.0.2.Final
1.6.0.Final
- 4.2.1.Final
+ 5.0.0.Beta1
+ 1.0.0.Beta1
1.0.5.Final
1.2.0.Final
3.0.0-M6
@@ -228,13 +228,7 @@
org.jboss.galleon
- galleon-core
- ${version.org.jboss.galleon}
-
-
-
- org.jboss.galleon
- galleon-maven-universe
+ galleon-api
${version.org.jboss.galleon}
@@ -280,6 +274,12 @@
${version.org.wildfly.plugins.wildfly-maven-plugin}