diff --git a/webapp/sources/Dockerfile b/webapp/sources/Dockerfile
index 50b54c781cd..fd7b7b15df9 100644
--- a/webapp/sources/Dockerfile
+++ b/webapp/sources/Dockerfile
@@ -11,5 +11,7 @@ RUN ./user.sh $USER_ID && \
# We need a recent node
RUN curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key | gpg --dearmor -o /usr/share/keyrings/nodesource.gpg
RUN echo "deb [arch=amd64 signed-by=/usr/share/keyrings/nodesource.gpg] https://deb.nodesource.com/node_20.x nodistro main" | tee /etc/apt/sources.list.d/nodesource.list > /dev/null
-
+
RUN apt-get update && apt-get install -y nodejs
+
+USER jenkins
diff --git a/webapp/sources/rudder/rudder-core/src/main/scala/com/normation/cfclerk/domain/TechniqueCategory.scala b/webapp/sources/rudder/rudder-core/src/main/scala/com/normation/cfclerk/domain/TechniqueCategory.scala
index cb90500195c..bab5fabdbab 100644
--- a/webapp/sources/rudder/rudder-core/src/main/scala/com/normation/cfclerk/domain/TechniqueCategory.scala
+++ b/webapp/sources/rudder/rudder-core/src/main/scala/com/normation/cfclerk/domain/TechniqueCategory.scala
@@ -37,7 +37,11 @@
package com.normation.cfclerk.domain
+import com.normation.rudder.domain.policies.ActiveTechniqueCategory
+import com.normation.rudder.domain.policies.ActiveTechniqueCategoryId
import scala.collection.SortedSet
+import scala.xml.Elem
+import zio.json.*
/**
* A policy category name.
@@ -47,6 +51,53 @@ import scala.collection.SortedSet
*/
final case class TechniqueCategoryName(value: String) extends AnyVal
+/*
+ * Just the name / description of a technique category without all the
+ * parent / subcategories / techniques stuff.
+ */
+final case class TechniqueCategoryMetadata(name: String, description: String, isSystem: Boolean)
+
+object TechniqueCategoryMetadata {
+ implicit val codecTechniqueCategoryMetadata: JsonCodec[TechniqueCategoryMetadata] = DeriveJsonCodec.gen
+
+ implicit class ToActiveTechniqueCategory(metadata: TechniqueCategoryMetadata) {
+ def toActiveTechniqueCategory(id: ActiveTechniqueCategoryId): ActiveTechniqueCategory = ActiveTechniqueCategory(
+ id,
+ metadata.name,
+ metadata.description,
+ Nil,
+ Nil
+ )
+
+ def toXml: Elem = {
+
+ {metadata.name}
+ {metadata.description}
+ {if (metadata.isSystem) true else xml.NodeSeq.Empty}
+
+ }
+ }
+
+ def parseXML(xml: Elem, defaultName: String): TechniqueCategoryMetadata = {
+ def nonEmpty(s: String): Option[String] = {
+ s match {
+ case null | "" => None
+ case _ => Some(s)
+ }
+ }
+
+ val name = nonEmpty((xml \\ "name").text).getOrElse(defaultName)
+ val description = nonEmpty((xml \\ "description").text).getOrElse("")
+ val isSystem = (nonEmpty((xml \\ "system").text).getOrElse("false")).equalsIgnoreCase("true")
+
+ TechniqueCategoryMetadata(name, description, isSystem = isSystem)
+ }
+
+ // the default file name for category metadata.
+ val FILE_NAME_XML = "category.xml"
+ val FILE_NAME_JSON = "category.json"
+}
+
sealed abstract class TechniqueCategoryId(val name: TechniqueCategoryName) {
/**
diff --git a/webapp/sources/rudder/rudder-core/src/main/scala/com/normation/cfclerk/services/impl/GitTechniqueReader.scala b/webapp/sources/rudder/rudder-core/src/main/scala/com/normation/cfclerk/services/impl/GitTechniqueReader.scala
index d858aaebd44..a31f604c800 100644
--- a/webapp/sources/rudder/rudder-core/src/main/scala/com/normation/cfclerk/services/impl/GitTechniqueReader.scala
+++ b/webapp/sources/rudder/rudder-core/src/main/scala/com/normation/cfclerk/services/impl/GitTechniqueReader.scala
@@ -931,26 +931,17 @@ class GitTechniqueReader(
parseDescriptor: Boolean // that option is a success optimization for the case diff between old/new commit
): IOResult[TechniqueCategory] = {
- def nonEmpty(s: String): Option[String] = {
- s match {
- case null | "" => None
- case _ => Some(s)
- }
- }
- def parse(db: Repository, parseDesc: Boolean, catId: TechniqueCategoryId): IOResult[(String, String, Boolean)] = {
+ def parse(db: Repository, parseDesc: Boolean, catId: TechniqueCategoryId): IOResult[TechniqueCategoryMetadata] = {
if (parseDesc) {
val managedStream =
ZIO.acquireRelease(IOResult.attempt(db.open(descriptorObjectId).openStream))(is => effectUioUnit(is.close()))
for {
xml <- loadDescriptorFile(managedStream, filePath)
} yield {
- val name = nonEmpty((xml \\ "name").text).getOrElse(catId.name.value)
- val description = nonEmpty((xml \\ "description").text).getOrElse("")
- val isSystem = (nonEmpty((xml \\ "system").text).getOrElse("false")).equalsIgnoreCase("true")
- (name, description, isSystem)
+ TechniqueCategoryMetadata.parseXML(xml, catId.name.value)
}
} else {
- (catId.name.value, "", false).succeed
+ TechniqueCategoryMetadata(catId.name.value, "", false).succeed
}
}
@@ -961,7 +952,7 @@ class GitTechniqueReader(
for {
triple <- parse(db, parseDescriptor, catId)
} yield {
- val (name, desc, system) = triple
+ val TechniqueCategoryMetadata(name, desc, system) = triple
catId match {
case RootTechniqueCategoryId => RootTechniqueCategory(name, desc, isSystem = system)
case sId: SubTechniqueCategoryId => SubTechniqueCategory(sId, name, desc, isSystem = system)
diff --git a/webapp/sources/rudder/rudder-core/src/main/scala/com/normation/rudder/git/ZipUtils.scala b/webapp/sources/rudder/rudder-core/src/main/scala/com/normation/rudder/git/ZipUtils.scala
index 3050ec24f6a..58c9648af7c 100644
--- a/webapp/sources/rudder/rudder-core/src/main/scala/com/normation/rudder/git/ZipUtils.scala
+++ b/webapp/sources/rudder/rudder-core/src/main/scala/com/normation/rudder/git/ZipUtils.scala
@@ -136,7 +136,11 @@ object ZipUtils {
def zip(zipout: OutputStream, toAdds: Seq[Zippable]): IOResult[Unit] = {
// we must ensure that each entry is unique, else zip fails
val unique = toAdds.distinctBy(_.path)
- ZIO.acquireReleaseWith(IOResult.attempt(new ZipOutputStream(zipout)))(zout => effectUioUnit(zout.close())) { zout =>
+ ZIO.acquireReleaseWith(IOResult.attempt(new ZipOutputStream(zipout)))(zout => {
+ // if the connection is interrupted, for ex if you use curl without a --output arg,
+ // then the usual effectUioUnit(zout.close()) leads to a big stack trace (unactionnable, uninteresting).
+ ZIO.attemptBlocking(zout.close()).orElseSucceed(ZIO.unit)
+ }) { zout =>
val addToZout = (is: InputStream) => IOResult.attempt("Error when copying file")(IOUtils.copy(is, zout))
ZIO.foreachDiscard(unique) { x =>
diff --git a/webapp/sources/rudder/rudder-core/src/main/scala/com/normation/rudder/repository/ItemArchiveManager.scala b/webapp/sources/rudder/rudder-core/src/main/scala/com/normation/rudder/repository/ItemArchiveManager.scala
index acc11e673c9..86548005c89 100644
--- a/webapp/sources/rudder/rudder-core/src/main/scala/com/normation/rudder/repository/ItemArchiveManager.scala
+++ b/webapp/sources/rudder/rudder-core/src/main/scala/com/normation/rudder/repository/ItemArchiveManager.scala
@@ -308,6 +308,8 @@ trait GitActiveTechniqueCategoryArchiver {
* managed by git.
* If gitCommit is true, the modification is
* saved in git. Else, no modification in git are saved.
+ *
+ * Only the metadata part (id, description...) are save ; item and children are ignored.
*/
def archiveActiveTechniqueCategory(
uptc: ActiveTechniqueCategory,
diff --git a/webapp/sources/rudder/rudder-core/src/main/scala/com/normation/rudder/repository/xml/GitArchivers.scala b/webapp/sources/rudder/rudder-core/src/main/scala/com/normation/rudder/repository/xml/GitArchivers.scala
index 0c46db6f820..88791ce07ea 100644
--- a/webapp/sources/rudder/rudder-core/src/main/scala/com/normation/rudder/repository/xml/GitArchivers.scala
+++ b/webapp/sources/rudder/rudder-core/src/main/scala/com/normation/rudder/repository/xml/GitArchivers.scala
@@ -40,6 +40,7 @@ package com.normation.rudder.repository.xml
import com.normation.NamedZioLogger
import com.normation.cfclerk.domain.SectionSpec
import com.normation.cfclerk.domain.Technique
+import com.normation.cfclerk.domain.TechniqueCategoryMetadata
import com.normation.cfclerk.domain.TechniqueId
import com.normation.cfclerk.domain.TechniqueName
import com.normation.cfclerk.services.TechniqueRepository
@@ -52,6 +53,7 @@ import com.normation.rudder.domain.Constants.CONFIGURATION_RULES_ARCHIVE_TAG
import com.normation.rudder.domain.Constants.GROUPS_ARCHIVE_TAG
import com.normation.rudder.domain.Constants.PARAMETERS_ARCHIVE_TAG
import com.normation.rudder.domain.Constants.POLICY_LIBRARY_ARCHIVE_TAG
+import com.normation.rudder.domain.logger.GitArchiveLoggerPure
import com.normation.rudder.domain.nodes.NodeGroup
import com.normation.rudder.domain.nodes.NodeGroupCategory
import com.normation.rudder.domain.nodes.NodeGroupCategoryId
@@ -70,6 +72,7 @@ import com.normation.rudder.repository.*
import com.normation.rudder.services.marshalling.*
import com.normation.rudder.services.user.PersonIdentService
import java.io.File
+import java.io.FileNotFoundException
import net.liftweb.common.*
import org.apache.commons.io.FileUtils
import org.eclipse.jgit.lib.PersonIdent
@@ -238,6 +241,7 @@ trait TechniqueArchiver {
committer: EventActor,
msg: String
): IOResult[Unit]
+
def saveTechnique(
techniqueId: TechniqueId,
categories: Seq[String],
@@ -246,6 +250,14 @@ trait TechniqueArchiver {
committer: EventActor,
msg: String
): IOResult[Unit]
+
+ def saveTechniqueCategory(
+ categories: Seq[String], // path (inclusive) to the category
+ metadata: TechniqueCategoryMetadata,
+ modId: ModificationId,
+ committer: EventActor,
+ msg: String
+ ): IOResult[Unit]
}
/*
@@ -414,6 +426,41 @@ class TechniqueArchiverImpl(
} yield ()).chainError(s"error when committing Technique '${techniqueId.serialize}'").unit
}
+ def saveTechniqueCategory(
+ categories: Seq[String], // path (inclusive) to the category
+ metadata: TechniqueCategoryMetadata,
+ modId: ModificationId,
+ committer: EventActor,
+ msg: String
+ ): IOResult[Unit] = {
+ val categoryPath = categories.filter(_ != "/").mkString("/")
+ val catGitPath = s"${relativePath}/${categoryPath}/${TechniqueCategoryMetadata.FILE_NAME_XML}"
+ val categoryFile = gitRepo.rootDirectory / catGitPath
+ val xml = metadata.toXml
+
+ categories.lastOption match {
+ case None => Unexpected("You can't change the root category information").fail
+ case Some(catId) =>
+ (for {
+ // the file may not exist, which is not an error in that case
+ existing <- IOResult.attempt {
+ val elem = XML.load(Source.fromFile(categoryFile.toJava))
+ Some(TechniqueCategoryMetadata.parseXML(elem, catId))
+ }.catchSome { case SystemError(_, _: FileNotFoundException) => None.succeed }
+ _ <- if (existing.contains(metadata)) {
+ GitArchiveLoggerPure.debug(s"Not commiting '${catGitPath}' because it already exists with these values")
+ } else {
+ for {
+ ident <- personIdentservice.getPersonIdentOrDefault(committer.name)
+ parent = categoryFile.parent
+ _ <- writeXml(categoryFile.toJava, xml, s"Archived technique category: ${catGitPath}")
+ _ <- IOResult.attempt(gitRepo.git.add.addFilepattern(catGitPath).call())
+ _ <- IOResult.attempt(gitRepo.git.commit.setCommitter(ident).setMessage(msg).call())
+ } yield ()
+ }
+ } yield ()).chainError(s"error when committing technique category '${catGitPath}'").unit
+ }
+ }
}
///////////////////////////////////////////////////////////////////////////////////////////////
diff --git a/webapp/sources/rudder/rudder-core/src/main/scala/com/normation/rudder/repository/xml/GitParseRudderObjects.scala b/webapp/sources/rudder/rudder-core/src/main/scala/com/normation/rudder/repository/xml/GitParseRudderObjects.scala
index 1881949cea2..2de3370468b 100644
--- a/webapp/sources/rudder/rudder-core/src/main/scala/com/normation/rudder/repository/xml/GitParseRudderObjects.scala
+++ b/webapp/sources/rudder/rudder-core/src/main/scala/com/normation/rudder/repository/xml/GitParseRudderObjects.scala
@@ -42,6 +42,7 @@ import com.normation.GitVersion.Revision
import com.normation.GitVersion.RevisionInfo
import com.normation.box.IOManaged
import com.normation.cfclerk.domain.Technique
+import com.normation.cfclerk.domain.TechniqueCategoryMetadata
import com.normation.cfclerk.domain.TechniqueCategoryName
import com.normation.cfclerk.domain.TechniqueId
import com.normation.cfclerk.domain.TechniqueName
@@ -429,15 +430,24 @@ trait TechniqueRevisionRepository {
* Directories are added at the beginning
*/
def getTechniqueFileContents(id: TechniqueId): IOResult[Option[Seq[(String, Option[IOManaged[InputStream]])]]]
+
+ /*
+ * Always use git, does not look at what is on the FS even when revision is default.
+ * Retrieve the category object from the category.xml files under given path.
+ * Path is relative to technique directory root, so that for ex,
+ * `systemSettings/remoteAccess` will look for
+ * `/var/rudder/configuration-repository/techniques/systemSettings/remoteAccess/category.xml`
+ */
+ def getTechniqueCategoryMetadata(path: String, rev: Revision): IOResult[Option[TechniqueCategoryMetadata]]
}
class GitParseTechniqueLibrary(
- techniqueParser: TechniqueParser,
- val repo: GitRepositoryProvider,
- revisionProvider: GitRevisionProvider,
- libRootDirectory: String, // relative name to git root file
-
- techniqueMetadata: String
+ techniqueParser: TechniqueParser,
+ val repo: GitRepositoryProvider,
+ revisionProvider: GitRevisionProvider,
+ libRootDirectory: String, // relative name to git root file
+ techniqueMetadata: String,
+ techniqueCategoryFilename: String = "category.xml"
) extends TechniqueRevisionRepository {
/**
@@ -490,6 +500,52 @@ class GitParseTechniqueLibrary(
)
}
+ override def getTechniqueCategoryMetadata(catPath: String, rev: Revision): IOResult[Option[TechniqueCategoryMetadata]] = {
+ val root = GitRootCategory.getGitDirectoryPath(libRootDirectory).root
+ val filePath = catPath + "/" + techniqueCategoryFilename
+ (for {
+ _ <- ConfigurationLoggerPure.revision.debug(s"Looking for technique category: ${filePath}")
+ treeId <- GitFindUtils.findRevTreeFromRevision(repo.db, rev, revisionProvider.currentRevTreeId)
+ _ <- ConfigurationLoggerPure.revision.trace(s"Git tree corresponding to revision: ${rev.value}: ${treeId.toString}")
+ paths <- GitFindUtils.listFiles(repo.db, treeId, List(root), List(filePath))
+ _ <- ConfigurationLoggerPure.revision.trace(s"Found candidate paths: ${paths}")
+ data <- paths.size match {
+ case 0 =>
+ ConfigurationLoggerPure.revision.debug(s"Technique category ${filePath} not found") *>
+ None.succeed
+ case 1 =>
+ val gitPath = paths.head
+ val catId = catPath.split("/").last
+ ConfigurationLoggerPure.revision.trace(
+ s"Technique category ${filePath} found at path '${gitPath}', loading it'"
+ ) *>
+ (for {
+ xml <- GitFindUtils.getFileContent(repo.db, treeId, gitPath) { inputStream =>
+ ParseXml(inputStream, Some(gitPath)).chainError(s"Error when parsing file '${gitPath}' as XML")
+ }
+ } yield {
+ Some(TechniqueCategoryMetadata.parseXML(xml, catId))
+ }).tapError(err => {
+ ConfigurationLoggerPure.revision.debug(
+ s"Impossible to find technique category with path/revision: '${filePath}/${rev.value}': ${err.fullMsg}."
+ )
+ })
+ case _ =>
+ Unexpected(
+ s"There is more than one technique category with path '${filePath}' in git: ${paths.mkString(",")}"
+ ).fail
+ }
+ } yield {
+ data
+ }).tapBoth(
+ err => ConfigurationLoggerPure.error(err.fullMsg),
+ {
+ case None => ConfigurationLoggerPure.revision.debug(s" -> not found")
+ case Some(_) => ConfigurationLoggerPure.revision.debug(s" -> found it!")
+ }
+ )
+ }
+
override def getTechniqueRevision(name: TechniqueName, version: Version): IOResult[List[RevisionInfo]] = {
val root = GitRootCategory.getGitDirectoryPath(libRootDirectory).root
for {
@@ -518,6 +574,7 @@ class GitParseTechniqueLibrary(
} yield {
revs.toList
}
+
}
/*
diff --git a/webapp/sources/rudder/rudder-core/src/test/scala/com/normation/cfclerk/services/JGitRepositoryTest.scala b/webapp/sources/rudder/rudder-core/src/test/scala/com/normation/cfclerk/services/JGitRepositoryTest.scala
index a0505f9d6ef..88b96b9232e 100644
--- a/webapp/sources/rudder/rudder-core/src/test/scala/com/normation/cfclerk/services/JGitRepositoryTest.scala
+++ b/webapp/sources/rudder/rudder-core/src/test/scala/com/normation/cfclerk/services/JGitRepositoryTest.scala
@@ -38,19 +38,31 @@
package com.normation.cfclerk.services
import better.files.File
+import com.normation.cfclerk.domain.TechniqueCategoryMetadata
+import com.normation.cfclerk.services.impl.SystemVariableSpecServiceImpl
+import com.normation.cfclerk.xmlparsers.SectionSpecParser
+import com.normation.cfclerk.xmlparsers.TechniqueParser
+import com.normation.cfclerk.xmlparsers.VariableSpecParser
import com.normation.errors
import com.normation.errors.Inconsistency
import com.normation.errors.IOResult
import com.normation.errors.effectUioUnit
+import com.normation.eventlog.EventActor
import com.normation.eventlog.ModificationId
import com.normation.rudder.db.DB
import com.normation.rudder.git.GitCommitId
import com.normation.rudder.git.GitConfigItemRepository
import com.normation.rudder.git.GitRepositoryProvider
import com.normation.rudder.git.GitRepositoryProviderImpl
+import com.normation.rudder.ncf.EditorTechnique
+import com.normation.rudder.ncf.TechniqueCompilationOutput
+import com.normation.rudder.ncf.TechniqueCompiler
+import com.normation.rudder.ncf.TechniqueCompilerApp
import com.normation.rudder.repository.GitModificationRepository
import com.normation.rudder.repository.xml.RudderPrettyPrinter
+import com.normation.rudder.repository.xml.TechniqueArchiverImpl
import com.normation.rudder.repository.xml.XmlArchiverUtils
+import com.normation.rudder.services.user.TrivialPersonIdentService
import com.normation.zio.*
import net.liftweb.common.Loggable
import org.apache.commons.io.FileUtils
@@ -64,7 +76,7 @@ import org.specs2.runner.JUnitRunner
import org.specs2.specification.AfterAll
import scala.annotation.nowarn
import scala.util.Random
-import zio.*
+import zio.{System as _, *}
import zio.syntax.*
/**
@@ -83,7 +95,7 @@ class JGitRepositoryTest extends Specification with Loggable with AfterAll {
sequential
/**
- * Add a switch to be able to see tmp files (not clean themps) with
+ * Add a switch to be able to see tmp files (not clean temps) with
* -Dtests.clean.tmp=false
*/
override def afterAll(): Unit = {
@@ -95,21 +107,44 @@ class JGitRepositoryTest extends Specification with Loggable with AfterAll {
gitRoot.createDirectories()
- val repo: GitRepositoryProviderImpl = GitRepositoryProviderImpl.make(gitRoot.pathAsString).runNow
+ val repo: GitRepositoryProviderImpl = GitRepositoryProviderImpl.make(gitRoot.pathAsString).runNow
+ val prettyPrinter: RudderPrettyPrinter = new RudderPrettyPrinter(Int.MaxValue, 2)
+ val modRepo: GitModificationRepository = new GitModificationRepository {
+ override def getCommits(modificationId: ModificationId): IOResult[Option[GitCommitId]] = None.succeed
+ override def addCommit(commit: GitCommitId, modId: ModificationId): IOResult[DB.GitCommitJoin] =
+ DB.GitCommitJoin(commit, modId).succeed
+ }
+ val personIdent: TrivialPersonIdentService = new TrivialPersonIdentService()
+ val techniqueParser: TechniqueParser = {
+ val varParser = new VariableSpecParser
+ new TechniqueParser(varParser, new SectionSpecParser(varParser), new SystemVariableSpecServiceImpl())
+ }
+ val techniqueCompiler = new TechniqueCompiler {
+ override def compileTechnique(technique: EditorTechnique): IOResult[TechniqueCompilationOutput] = {
+ TechniqueCompilationOutput(TechniqueCompilerApp.Rudderc, fallbacked = false, 0, Chunk.empty, "", "", "").succeed
+ }
+
+ override def getCompilationOutputFile(technique: EditorTechnique): File = File("compilation-config.yml")
+
+ override def getCompilationConfigFile(technique: EditorTechnique): File = File("compilation-output.yml")
+ }
+
+ // for test, we use as a group owner whatever git root directory has
+ val currentUserName: String = repo.rootDirectory.groupName
+
val archive: GitConfigItemRepository with XmlArchiverUtils = new GitConfigItemRepository with XmlArchiverUtils {
override val gitRepo: GitRepositoryProvider = repo
override def relativePath: String = ""
- override def xmlPrettyPrinter = new RudderPrettyPrinter(Int.MaxValue, 2)
+ override def xmlPrettyPrinter = prettyPrinter
override def encoding: String = "UTF-8"
- override def gitModificationRepository: GitModificationRepository = new GitModificationRepository {
- override def getCommits(modificationId: ModificationId): IOResult[Option[GitCommitId]] = None.succeed
- override def addCommit(commit: GitCommitId, modId: ModificationId): IOResult[DB.GitCommitJoin] =
- DB.GitCommitJoin(commit, modId).succeed
- }
+ override def gitModificationRepository: GitModificationRepository = modRepo
- override def groupOwner: String = ""
+ override def groupOwner: String = currentUserName
}
+ val techniqueArchive: TechniqueArchiverImpl =
+ new TechniqueArchiverImpl(repo, prettyPrinter, modRepo, personIdent, techniqueParser, techniqueCompiler, currentUserName)
+
// listing files at a commit is complicated
import org.eclipse.jgit.treewalk.TreeWalk
@@ -140,9 +175,9 @@ class JGitRepositoryTest extends Specification with Loggable with AfterAll {
"The test lib" should {
"not throw JGitInternalError on concurrent write" in {
- // to assess the usefulness of semaphor, you can remove `gitRepo.semaphore.withPermit`
+ // to assess the usefulness of semaphore, you can remove `gitRepo.semaphore.withPermit`
// in `commitAddFile` to check that you get the JGitInternalException.
- // More advanced tests may be needed to handle more complex cases of concurent access,
+ // More advanced tests may be needed to handle more complex cases of concurrent access,
// see: https://issues.rudder.io/issues/19910
val actor = new PersonIdent("test", "test@test.com")
@@ -167,6 +202,53 @@ class JGitRepositoryTest extends Specification with Loggable with AfterAll {
created must containTheSameElementsAs(files)
}
+
+ "save a category" should {
+
+ val category = TechniqueCategoryMetadata("My new category", "A new category", isSystem = false)
+ val catPath = List("systemSettings", "myNewCategory")
+
+ val modId = new ModificationId("add-technique-cat")
+
+ "create a new file and commit if the category does not exist" in {
+
+ techniqueArchive
+ .saveTechniqueCategory(
+ catPath,
+ category,
+ modId,
+ EventActor("test"),
+ s"test: commit add category ${catPath.mkString("/")}"
+ )
+ .runNow
+
+ val catFile = repo.rootDirectory / "techniques" / "systemSettings" / "myNewCategory" / "category.xml"
+
+ val xml = catFile.contentAsString
+
+ val lastCommitMsg = repo.git.log().setMaxCount(1).call().iterator().next().getFullMessage
+
+ // note: no false ; it's only written when true
+ (xml ===
+ """
+ | My new category
+ | A new category
+ |""".stripMargin) and (
+ lastCommitMsg === "test: commit add category systemSettings/myNewCategory"
+ )
+
+ }
+
+ "does nothing when the category already exsits" in {
+ techniqueArchive.saveTechniqueCategory(catPath, category, modId, EventActor("test"), s"test: commit again").runNow
+ val lastCommitMsg = repo.git.log().setMaxCount(1).call().iterator().next().getFullMessage
+
+ // last commit must be the old one
+ lastCommitMsg === "test: commit add category systemSettings/myNewCategory"
+
+ }
+ }
+
}
}
diff --git a/webapp/sources/rudder/rudder-core/src/test/scala/com/normation/rudder/ncf/TestEditorTechniqueWriter.scala b/webapp/sources/rudder/rudder-core/src/test/scala/com/normation/rudder/ncf/TestEditorTechniqueWriter.scala
index df4678a77bd..8b68f47a918 100644
--- a/webapp/sources/rudder/rudder-core/src/test/scala/com/normation/rudder/ncf/TestEditorTechniqueWriter.scala
+++ b/webapp/sources/rudder/rudder-core/src/test/scala/com/normation/rudder/ncf/TestEditorTechniqueWriter.scala
@@ -43,6 +43,7 @@ import com.normation.cfclerk.domain.ReportingLogic
import com.normation.cfclerk.domain.RootTechniqueCategory
import com.normation.cfclerk.domain.TechniqueCategory
import com.normation.cfclerk.domain.TechniqueCategoryId
+import com.normation.cfclerk.domain.TechniqueCategoryMetadata
import com.normation.cfclerk.domain.TechniqueId
import com.normation.cfclerk.domain.TechniqueName
import com.normation.cfclerk.domain.TechniqueResourceId
@@ -139,6 +140,14 @@ class TestEditorTechniqueWriter extends Specification with ContentMatchers with
committer: EventActor,
msg: String
): IOResult[Unit] = ZIO.unit
+
+ override def saveTechniqueCategory(
+ categories: Seq[String],
+ metadata: TechniqueCategoryMetadata,
+ modId: ModificationId,
+ committer: EventActor,
+ msg: String
+ ): IOResult[Unit] = ZIO.unit
}
object TestLibUpdater extends UpdateTechniqueLibrary {
diff --git a/webapp/sources/rudder/rudder-rest/src/main/scala/com/normation/rudder/rest/lift/ArchiveApi.scala b/webapp/sources/rudder/rudder-rest/src/main/scala/com/normation/rudder/rest/lift/ArchiveApi.scala
index b41c3e00248..3ffc45a1fcd 100644
--- a/webapp/sources/rudder/rudder-rest/src/main/scala/com/normation/rudder/rest/lift/ArchiveApi.scala
+++ b/webapp/sources/rudder/rudder-rest/src/main/scala/com/normation/rudder/rest/lift/ArchiveApi.scala
@@ -40,6 +40,7 @@ package com.normation.rudder.rest.lift
import better.files.File
import cats.data.NonEmptyList
import com.normation.cfclerk.domain.Technique
+import com.normation.cfclerk.domain.TechniqueCategoryMetadata
import com.normation.cfclerk.domain.TechniqueCategoryName
import com.normation.cfclerk.domain.TechniqueId
import com.normation.cfclerk.domain.TechniqueName
@@ -97,10 +98,12 @@ import com.normation.rudder.rest.ApiModuleProvider
import com.normation.rudder.rest.ApiPath
import com.normation.rudder.rest.ArchiveApi as API
import com.normation.rudder.rest.AuthzToken
+import com.normation.rudder.rest.EndpointSchema0
import com.normation.rudder.rest.RudderJsonResponse
import com.normation.rudder.rest.RudderJsonResponse.ResponseSchema
import com.normation.rudder.rest.implicits.*
import com.normation.rudder.rest.lift.ImportAnswer.*
+import com.normation.rudder.rule.category.RuleCategoryId
import com.normation.rudder.services.queries.CmdbQueryParser
import com.normation.utils.StringUuidGenerator
import com.normation.zio.*
@@ -130,7 +133,7 @@ import zio.syntax.*
*/
final case class FeatureSwitch0[A <: LiftApiModule0](enable: A, disable: A)(featureSwitchState: IOResult[FeatureSwitch])
extends LiftApiModule0 {
- override val schema = enable.schema
+ override val schema: EndpointSchema0 = enable.schema
override def process0(
version: ApiVersion,
path: ApiPath,
@@ -489,6 +492,56 @@ class ZipArchiveBuilderService(
* Retrieve the group using first cache, then the config service, and update cache accordingly
*/
+ /*
+ * Get zips for technique categories.
+ * - we need to don't have duplicate to avoid zipping the same things.
+ * - we may have two categories with different versions. Version aren't ordered, so we can't take
+ * the newest. Since that case should be super rare and generally without impact (technique category description
+ * are not often changed), just take one at random
+ * - we assume that path are the whole path (just missing leading "/")
+ */
+ def getTechniqueCategoryZippable(
+ techniquesDir: String,
+ cats: Seq[(Chunk[TechniqueCategoryName], TechniqueVersion)]
+ ): IOResult[Seq[Zippable]] = {
+ for {
+ ref <- Ref.Synchronized.make(Map[String, Zippable]())
+ _ <- ZIO.foreachDiscard(cats.distinctBy(_._1)) {
+ case (cs, v) =>
+ // build the list of path. getTechniqueCategoryMetadata will add the leading part of the
+ // path and category.xml at the end, so remove leading "/" and skip last part.
+ val (paths, _) = cs.foldLeft((List.empty[String], "")) {
+ case (x, catName) if (catName.value == "/" || catName.value == TechniqueCategoryMetadata.FILE_NAME_XML) =>
+ x
+ case ((acc, parent), catName) =>
+ val newParent = parent + "/" + catName.value
+
+ (newParent :: acc, newParent)
+ }
+ ZIO.foreachDiscard(paths) { p =>
+ ref.updateZIO { m =>
+ // at each level, we need to add the directory and the file
+ val dirPath = techniquesDir + "/" + p
+ val filePathJson = dirPath + "/" + TechniqueCategoryMetadata.FILE_NAME_JSON
+ if (m.contains(filePathJson)) m.succeed // if category.json is here, by construction its parent dir is, too
+ else {
+ techniqueRevisionRepo.getTechniqueCategoryMetadata(p, v.rev).map {
+ case None => m
+ case Some(data) =>
+ m + (dirPath -> Zippable(dirPath, None))
+ + (filePathJson -> Zippable(filePathJson, Some(getJsonZippableContent(data.toJsonPretty))))
+ }
+ }
+ }
+ }
+ }
+ infos <- ref.get
+ } yield {
+ val sorted = infos.toList.sortBy(_._1)
+ sorted.map(_._2)
+ }
+ }
+
/*
* Getting technique zippable is more complex than other items because we can have a lot of
* files. The strategy used is to always copy ALL files for the given technique
@@ -522,16 +575,7 @@ class ZipArchiveBuilderService(
catDirs = cats.collect { case TechniqueCategoryName(value) if value != "/" => value }
basePath = techniquesDir + "/" + catDirs.mkString("/") + "/" + techniqueId.withDefaultRev.serialize + "/"
// start by adding directories toward technique
- zips = catDirs
- .foldLeft(List[Zippable]()) {
- case (dirs, current) =>
- // each time, head is the last parent, revert at the end
- dirs.headOption match {
- case None => Zippable(techniquesDir + "/" + current, None) :: Nil
- case Some(parent) => Zippable(parent.path + "/" + current, None) :: dirs
- }
- }
- .reverse ++ filtered.map { case (p, opt) => Zippable.make(basePath + p, opt) }
+ zips = filtered.map { case (p, opt) => Zippable.make(basePath + p, opt) }
_ <- ApplicationLoggerPure.Archive.debug(
s"Building archive '${archiveName}': adding technique zippables: ${zips.map(_.path).mkString(", ")}"
)
@@ -540,6 +584,11 @@ class ZipArchiveBuilderService(
}
}
+ def getRuleCatZippable(ids: Set[RuleCategoryId]): IOResult[Seq[Zippable]] = {
+ // todo : https://issues.rudder.io/issues/25061
+ Seq().succeed
+ }
+
/*
* Get the list of all group category or group as Zip object, in the order they need to be created
* (ie first categories from root to leaves, then groups)
@@ -688,12 +737,13 @@ class ZipArchiveBuilderService(
rulesDir = root + "/" + RULES_DIR
_ <- usedNames.update(_ + ((RULES_DIR, Set.empty[String])))
rulesDirZip = Zippable(rulesDir, None)
+ ruleCatsRef <- Ref.make(Set[RuleCategoryId]())
rulesZip <- ZIO
.foreach(ruleIds) { ruleId =>
configRepo
.getRule(ruleId)
.notOptional(s"Rule with id ${ruleId.serialize} was not found in Rudder")
- .flatMap(rule => {
+ .flatMap { rule =>
if (rule.isSystem) None.succeed
else {
for {
@@ -704,13 +754,16 @@ class ZipArchiveBuilderService(
path = rulesDir + "/" + name
_ <- ApplicationLoggerPure.Archive
.debug(s"Building archive '${rootDirName}': adding rule zippable: ${path}")
+ _ <- ruleCatsRef.update(_ + rule.categoryId)
} yield {
Some(Zippable(path, Some(getJsonZippableContent(json))))
}
}
- })
+ }
}
.map(_.flatten)
+ ruleCats <- ruleCatsRef.get
+ ruleCatsZip <- getRuleCatZippable(ruleCats)
groupsDir = root + "/" + GROUPS_DIR
_ <- usedNames.update(_ + ((GROUPS_DIR, Set.empty[String])))
groupsDirZip = Zippable(groupsDir, None)
@@ -753,7 +806,10 @@ class ZipArchiveBuilderService(
techniquesDirZip = Zippable(techniquesDir, None)
depTechniques <- if (includeDepTechniques) techniques.get.map(_.keys) else Nil.succeed
allTech <- ZIO.foreach(techniqueIds ++ depTechniques)(techniqueId => getTechnique(techniqueId, techniques))
- techniquesZip <- ZIO.foreach(allTech.filter(_._2.policyTypes.isBase)) {
+ // start by zipping categories after having dedup them
+ techCats = allTech.collect { case (c, t) => (c, t.id.version) }
+ techCatsZip <- getTechniqueCategoryZippable(techniquesDir, techCats)
+ techniquesZip <- ZIO.foreach(allTech.filter(_._2.isBase)) {
case (cats, technique) =>
for {
techZips <- getTechniqueZippable(rootDirName, techniquesDir, cats, technique.id)
@@ -770,7 +826,7 @@ class ZipArchiveBuilderService(
groupsDirZip,
directivesDirZip,
techniquesDirZip
- ) ++ rulesZip ++ techniquesZip.flatten ++ directivesZip ++ groupsZip
+ ) ++ rulesZip ++ techCatsZip ++ techniquesZip.flatten ++ directivesZip ++ groupsZip
}
}
@@ -785,6 +841,13 @@ case object PolicyArchiveMetadata {
def empty: PolicyArchiveMetadata = PolicyArchiveMetadata("")
}
+final case class TechniqueCategoryArchive(
+ metadata: TechniqueCategoryMetadata,
+ // the path, last one is category with the metadata (ie also category id)
+ // Can't be empty because we don't change root category with archive.
+ category: NonEmptyChunk[String]
+)
+
final case class TechniqueInfo(id: TechniqueId, name: String, kind: TechniqueType)
final case class TechniqueArchive(
@@ -811,36 +874,41 @@ final case class GroupArchive(
* For techniques, we only parse metadata.xml, and we keep files as is
*/
final case class PolicyArchive(
- metadata: PolicyArchiveMetadata,
- techniques: Chunk[TechniqueArchive],
- directives: Chunk[DirectiveArchive],
- groupCats: Chunk[GroupCategoryArchive],
- groups: Chunk[GroupArchive],
- rules: Chunk[Rule]
+ metadata: PolicyArchiveMetadata,
+ techniqueCats: Chunk[TechniqueCategoryArchive],
+ techniques: Chunk[TechniqueArchive],
+ directives: Chunk[DirectiveArchive],
+ groupCats: Chunk[GroupCategoryArchive],
+ groups: Chunk[GroupArchive],
+ rules: Chunk[Rule]
) {
+ // format: off
def debugString: String = {
s"""Archive ${metadata.filename}:
- | - techniques : ${techniques.map(_.technique.id.serialize).sorted.mkString(", ")}
- | - directives : ${directives.map(d => s"'${d.directive.name}' [${d.directive.id.serialize}]").sorted.mkString(", ")}
- | - group categories: ${groupCats.map(c => s"'${c.category.name}' [${c.category.id}]").sorted.mkString(", ")}
- | - groups : ${groups.map(g => s"'${g.group.name}' [${g.group.id.serialize}]").sorted.mkString(", ")}
- | - rules : ${rules.map(r => s"'${r.name}' [${r.id.serialize}]").sorted.mkString(", ")}""".stripMargin
+ | - technique categories: ${techniqueCats.map(c => s"${c.category.mkString("/", "/", "/")}${metadata.filename}").sorted.mkString(", ")}
+ | - techniques : ${techniques.map(_.technique.id.serialize).sorted.mkString(", ")}
+ | - directives : ${directives.map(d => s"'${d.directive.name}' [${d.directive.id.serialize}]").sorted.mkString(", ")}
+ | - group categories : ${groupCats.map(c => s"'${c.category.name}' [${c.category.id}]").sorted.mkString(", ")}
+ | - groups : ${groups.map(g => s"'${g.group.name}' [${g.group.id.serialize}]").sorted.mkString(", ")}
+ | - rules : ${rules.map(r => s"'${r.name}' [${r.id.serialize}]").sorted.mkString(", ")}""".stripMargin
}
+ // format: on
}
object PolicyArchive {
def empty: PolicyArchive =
- PolicyArchive(PolicyArchiveMetadata.empty, Chunk.empty, Chunk.empty, Chunk.empty, Chunk.empty, Chunk.empty)
+ PolicyArchive(PolicyArchiveMetadata.empty, Chunk.empty, Chunk.empty, Chunk.empty, Chunk.empty, Chunk.empty, Chunk.empty)
}
final case class SortedEntries(
- techniques: Chunk[(String, Array[Byte])],
- directives: Chunk[(String, Array[Byte])],
- groupCats: Chunk[(String, Array[Byte])],
- groups: Chunk[(String, Array[Byte])],
- rules: Chunk[(String, Array[Byte])]
+ techniquesCats: Chunk[(String, Array[Byte])],
+ techniques: Chunk[(String, Array[Byte])],
+ directives: Chunk[(String, Array[Byte])],
+ groupCats: Chunk[(String, Array[Byte])],
+ groups: Chunk[(String, Array[Byte])],
+ rules: Chunk[(String, Array[Byte])]
)
object SortedEntries {
- def empty: SortedEntries = SortedEntries(Chunk.empty, Chunk.empty, Chunk.empty, Chunk.empty, Chunk.empty)
+ def empty: SortedEntries = SortedEntries(Chunk.empty, Chunk.empty, Chunk.empty, Chunk.empty, Chunk.empty, Chunk.empty)
}
final case class PolicyArchiveUnzip(
@@ -878,20 +946,40 @@ class ZipArchiveReaderImpl(
import com.softwaremill.quicklens.*
// we must avoid to eagerly match "ncf_techniques" as "techniques" but still accept when it starts by "techniques" without /
- val techniqueRegex: Regex = """(.*/|)techniques/(.+)""".r
- val yamlRegex: Regex = s"""(.+)/${TechniqueType.Yaml.name}""".r
- val jsonRegex: Regex = s"""(.+)/${TechniqueType.Json.name}""".r
- val metadataRegex: Regex = s"""(.+)/${TechniqueType.Metadata.name}""".r
- val directiveRegex: Regex = """(.*/|)directives/(.+.json)""".r
- val groupCatsRegex: Regex = """(.*/|)groups/(.*category.json)""".r
- val groupRegex: Regex = """(.*/|)groups/(.+.json)""".r
- val ruleRegex: Regex = """(.*/|)rules/(.+.json)""".r
+ val techniqueCatsRegex: Regex = """(.*/|)techniques/(.+category.json)""".r
+ val techniqueRegex: Regex = """(.*/|)techniques/(.+)""".r
+ val yamlRegex: Regex = s"""(.+)/${TechniqueType.Yaml.name}""".r
+ val jsonRegex: Regex = s"""(.+)/${TechniqueType.Json.name}""".r
+ val metadataRegex: Regex = s"""(.+)/${TechniqueType.Metadata.name}""".r
+ val directiveRegex: Regex = """(.*/|)directives/(.+.json)""".r
+ val groupCatsRegex: Regex = """(.*/|)groups/(.*category.json)""".r
+ val groupRegex: Regex = """(.*/|)groups/(.+.json)""".r
+ val ruleRegex: Regex = """(.*/|)rules/(.+.json)""".r
/*
* For technique, we are parsing metadata.xml.
* We also find technique name, version, and categories from base path.
* For file: we keep all, but we make their path relative to technique (ie we remove base path)
*/
+ def parseTechniqueCat(name: String, content: Array[Byte])(implicit
+ dec: JsonDecoder[TechniqueCategoryMetadata]
+ ): IOResult[TechniqueCategoryArchive] = {
+ // we need to keep path to be able to find back hierarchy later on
+ val catPath = name.split("/").dropRight(1).toList
+
+ catPath match {
+ case Nil =>
+ Unexpected(
+ s"Category in archive must have at least one parent, but category relative path is: ${name}"
+ ).fail
+ case head :: tail =>
+ new String(content, StandardCharsets.UTF_8)
+ .fromJson[TechniqueCategoryMetadata]
+ .toIO
+ .map(c => TechniqueCategoryArchive(c, NonEmptyChunk.fromIterable(head, tail)))
+ }
+ }
+
def parseTechnique(archiveName: String, basepath: String, files: Chunk[(String, Array[Byte])]): IOResult[TechniqueArchive] = {
// base path should look like "some/list/of/cats/techniqueName/techniqueVersion
def parseBasePath(p: String): IOResult[(TechniqueId, Chunk[String])] = {
@@ -1026,9 +1114,14 @@ class ZipArchiveReaderImpl(
(new String(content, StandardCharsets.UTF_8)).fromJson[JRRule].toIO.flatMap(_.toRule())
}
+ def parseTechniqueCats(arch: PolicyArchiveUnzip, cats: Chunk[(String, Array[Byte])])(implicit
+ dec: JsonDecoder[TechniqueCategoryMetadata]
+ ): IOResult[PolicyArchiveUnzip] = {
+ parseSimpleFile(arch, cats, modifyLens[PolicyArchiveUnzip](_.policies.techniqueCats), parseTechniqueCat)
+ }
/*
* Parse techniques.
- * The map is [techniqueBasePath -> (metadata contant, list of all technique files, including metadata.xlm: (filename (including base path), content))
+ * The map is [techniqueBasePath -> (metadata content, list of all technique files, including metadata.xlm: (filename (including base path), content))
*/
def parseTechniques(
archiveName: String,
@@ -1084,27 +1177,30 @@ class ZipArchiveReaderImpl(
val sortedEntries = zipEntries.foldLeft(SortedEntries.empty) {
case (arch, (e, optContent)) =>
(e.getName, optContent) match {
- case (techniqueRegex(_, x), Some(content)) =>
+ case (techniqueCatsRegex(_, x), Some(content)) =>
+ ApplicationLoggerPure.Archive.logEffect.trace(s"Archive '${archiveName}': found technique category file ${x}")
+ arch.modify(_.techniquesCats).using(_ :+ (x, content))
+ case (techniqueRegex(_, x), Some(content)) =>
ApplicationLoggerPure.Archive.logEffect.trace(s"Archive '${archiveName}': found technique file ${x}")
arch.modify(_.techniques).using(_ :+ (x, content))
- case (directiveRegex(_, x), Some(content)) =>
+ case (directiveRegex(_, x), Some(content)) =>
ApplicationLoggerPure.Archive.logEffect.trace(s"Archive '${archiveName}': found directive file ${x}")
arch.modify(_.directives).using(_ :+ (x, content))
- case (groupCatsRegex(_, x), Some(content)) =>
+ case (groupCatsRegex(_, x), Some(content)) =>
ApplicationLoggerPure.Archive.logEffect.trace(s"Archive '${archiveName}': found group category file ${x}")
arch.modify(_.groupCats).using(_ :+ (x, content))
- case (groupRegex(_, x), Some(content)) =>
+ case (groupRegex(_, x), Some(content)) =>
ApplicationLoggerPure.Archive.logEffect.trace(s"Archive '${archiveName}': found group file ${x}")
arch.modify(_.groups).using(_ :+ (x, content))
- case (ruleRegex(_, x), Some(content)) =>
+ case (ruleRegex(_, x), Some(content)) =>
ApplicationLoggerPure.Archive.logEffect.trace(s"Archive '${archiveName}': found rule file ${x}")
arch.modify(_.rules).using(_ :+ (x, content))
- case (name, Some(_)) =>
+ case (name, Some(_)) =>
ApplicationLoggerPure.Archive.logEffect.debug(
s"Archive '${archiveName}': file does not matches a known category: ${name}"
)
arch
- case (name, None) =>
+ case (name, None) =>
ApplicationLoggerPure.Archive.logEffect.trace(s"Directory '${name}' in archive '${archiveName}': looking for entries")
arch
}
@@ -1138,32 +1234,38 @@ class ZipArchiveReaderImpl(
// now, parse everything and collect errors
import com.normation.rudder.apidata.JsonResponseObjectDecodes.*
+ import com.normation.cfclerk.domain.TechniqueCategoryMetadata.codecTechniqueCategoryMetadata
for {
- _ <- ApplicationLoggerPure.Archive.debug(
- s"Processing archive '${archiveName}': techniques: '${techniqueUnzips.keys.mkString("', '")}'"
- )
- withTechniques <- parseTechniques(archiveName, PolicyArchiveUnzip.empty, techniqueUnzips)
- _ <- ApplicationLoggerPure.Archive.debug(
- s"Processing archive '${archiveName}': directives: '${sortedEntries.directives.map(_._1).mkString("', '")}'"
- )
- withDirectives <- parseDirectives(withTechniques, sortedEntries.directives)
- _ <- ApplicationLoggerPure.Archive.debug(
- s"Processing archive '${archiveName}': groups: '${sortedEntries.groups.map(_._1).mkString("', '")}'"
- )
- withGroupCats <- parseGroupCats(withDirectives, sortedEntries.groupCats)
- _ <- ApplicationLoggerPure.Archive.debug(
- s"Processing archive '${archiveName}': rules: '${sortedEntries.rules.map(_._1).mkString("', '")}'"
- )
- withGroups <- parseGroups(withGroupCats, sortedEntries.groups)
- _ <- ApplicationLoggerPure.Archive.debug(
- s"Processing archive '${archiveName}': rules: '${sortedEntries.rules.map(_._1).mkString("', '")}'"
- )
- withRules <- parseRules(withGroups, sortedEntries.rules)
+ _ <- ApplicationLoggerPure.Archive.debug(
+ s"Processing archive '${archiveName}': techniques: '${techniqueUnzips.keys.mkString("', '")}'"
+ )
+ withTechniques <- parseTechniques(archiveName, PolicyArchiveUnzip.empty, techniqueUnzips)
+ _ <-
+ ApplicationLoggerPure.Archive.debug(
+ s"Processing archive '${archiveName}': technique categories: '${sortedEntries.techniquesCats.map(_._1).mkString("', '")}'"
+ )
+ withTechniqueCats <- parseTechniqueCats(withTechniques, sortedEntries.techniquesCats)
+ _ <- ApplicationLoggerPure.Archive.debug(
+ s"Processing archive '${archiveName}': directives: '${sortedEntries.directives.map(_._1).mkString("', '")}'"
+ )
+ withDirectives <- parseDirectives(withTechniqueCats, sortedEntries.directives)
+ _ <- ApplicationLoggerPure.Archive.debug(
+ s"Processing archive '${archiveName}': groups: '${sortedEntries.groups.map(_._1).mkString("', '")}'"
+ )
+ withGroupCats <- parseGroupCats(withDirectives, sortedEntries.groupCats)
+ _ <- ApplicationLoggerPure.Archive.debug(
+ s"Processing archive '${archiveName}': rules: '${sortedEntries.rules.map(_._1).mkString("', '")}'"
+ )
+ withGroups <- parseGroups(withGroupCats, sortedEntries.groups)
+ _ <- ApplicationLoggerPure.Archive.debug(
+ s"Processing archive '${archiveName}': rules: '${sortedEntries.rules.map(_._1).mkString("', '")}'"
+ )
+ withRules <- parseRules(withGroups, sortedEntries.rules)
// aggregate errors
- policies <- withRules.errors.toList match {
- case Nil => withRules.policies.succeed
- case h :: tail => Accumulated(NonEmptyList.of(h, tail*)).fail
- }
+ policies <- withRules.errors.toList match {
+ case Nil => withRules.policies.succeed
+ case h :: tail => Accumulated(NonEmptyList.of(h, tail*)).fail
+ }
} yield policies
}
}
@@ -1299,7 +1401,6 @@ object SaveArchiveServicebyRepo {
class SaveArchiveServicebyRepo(
techniqueArchiver: TechniqueArchiverImpl,
techniqueReader: TechniqueReader,
- techniqueRepos: TechniqueRepository,
roDirectiveRepos: RoDirectiveRepository,
woDirectiveRepos: WoDirectiveRepository,
roGroupRepos: RoNodeGroupRepository,
@@ -1313,6 +1414,23 @@ class SaveArchiveServicebyRepo(
val GroupRootId = NodeGroupCategoryId("GroupRoot")
+ def saveTechniqueCat(eventMetadata: EventMetadata, a: TechniqueCategoryArchive): IOResult[Unit] = {
+ val catPath = a.category.toList
+
+ ApplicationLoggerPure.Archive.debug(
+ s"Adding technique category from archive: '${a.metadata.name}' (${catPath.mkString("/")}/category.xml)"
+ ) *>
+ techniqueArchiver
+ .saveTechniqueCategory(
+ catPath,
+ a.metadata,
+ eventMetadata.modId,
+ eventMetadata.actor,
+ eventMetadata.msg.getOrElse(s"Update technique category '${catPath.mkString("/")}' from archive import")
+ )
+ .unit
+ }
+
/*
* Saving a techniques:
* - override all files that are coming from archive
@@ -1481,6 +1599,7 @@ class SaveArchiveServicebyRepo(
)
val eventMetadata = cc.transformInto[EventMetadata]
for {
+ _ <- ZIO.foreach(archive.techniqueCats)(saveTechniqueCat(eventMetadata, _))
_ <- ZIO.foreach(archive.techniques)(saveTechnique(eventMetadata, _))
_ <- IOResult.attempt(techniqueReader.readTechniques)
_ <- ZIO.foreach(archive.directives)(saveDirective(eventMetadata, _))
diff --git a/webapp/sources/rudder/rudder-rest/src/test/scala/com/normation/rudder/rest/ArchiveApiTest.scala b/webapp/sources/rudder/rudder-rest/src/test/scala/com/normation/rudder/rest/ArchiveApiTest.scala
index 879e8c6735b..c829c5e83e8 100644
--- a/webapp/sources/rudder/rudder-rest/src/test/scala/com/normation/rudder/rest/ArchiveApiTest.scala
+++ b/webapp/sources/rudder/rudder-rest/src/test/scala/com/normation/rudder/rest/ArchiveApiTest.scala
@@ -213,7 +213,16 @@ class ArchiveApiTest extends Specification with AfterAll with Loggable {
// only system group => none exported
(children(testDir / s"${archiveName}/groups") must containTheSameElementsAs(Nil)) and
(children(testDir / s"${archiveName}/directives") must containTheSameElementsAs(List("10__Clock_Configuration.json"))) and
- (children(
+ // we have all level of category metadata for technique parents *except* the root category, that is system
+ // and that we don't want to change with import.
+ (directChildren(testDir / s"${archiveName}/techniques") must containTheSameElementsAs(List("systemSettings"))) and
+ (directChildren(testDir / s"${archiveName}/techniques/systemSettings") must containTheSameElementsAs(
+ List("misc", "category.json")
+ )) and
+ (directChildren(testDir / s"${archiveName}/techniques/systemSettings/misc") must containTheSameElementsAs(
+ List("clockConfiguration", "category.json")
+ )) and
+ (directChildren(
testDir / s"${archiveName}/techniques/systemSettings/misc/clockConfiguration/3.0"
) must containTheSameElementsAs(List("changelog", "clockConfiguration.st", "metadata.xml")))
@@ -278,7 +287,7 @@ class ArchiveApiTest extends Specification with AfterAll with Loggable {
case err => ko(s"I got an error in test: ${err}")
} and {
- val tech = restTestSetUp.mockTechniques.techniqueRepo
+ val tech = restTestSetUp.mockTechniques.techniqueRepo
.get(
TechniqueId(
TechniqueName("test_import_export_archive"),
@@ -286,6 +295,7 @@ class ArchiveApiTest extends Specification with AfterAll with Loggable {
)
)
.getOrElse(throw new IllegalArgumentException("test"))
+
// during import, we are actually migrating to Yaml
val techInfo = TechniqueInfo(tech.id, tech.name, TechniqueType.Yaml)
@@ -295,6 +305,7 @@ class ArchiveApiTest extends Specification with AfterAll with Loggable {
)
.runNow
.getOrElse(throw new IllegalArgumentException("test"))
+
restTestSetUp.archiveAPIModule.rootDirName.set(archiveName).runNow
restTest.testBinaryPOSTResponse(
s"/api/latest/archives/import",
@@ -540,7 +551,7 @@ class ArchiveApiTest extends Specification with AfterAll with Loggable {
}
/*
- * Copy the content of a existing archive into an import directory, zip-it
+ * Copy the content of an existing archive into an import directory, zip-it
*/
val dest = testDir / "import-rule-with-dep"
// so that we have systemSettings/misc/clockConfiguration
@@ -553,7 +564,7 @@ class ArchiveApiTest extends Specification with AfterAll with Loggable {
(testDir / "archive-group" / "groups" / "category_1" / "category.json").copyToDirectory(subCatDir)
(testDir / "archive-group" / "groups" / "category_1" / "Real_nodes.json").copyToDirectory(subCatDir)
- val tech = restTestSetUp.mockTechniques.techniqueRepo
+ val tech = restTestSetUp.mockTechniques.techniqueRepo
.get(
TechniqueId(
TechniqueName("clockConfiguration"),
@@ -562,13 +573,15 @@ class ArchiveApiTest extends Specification with AfterAll with Loggable {
)
.getOrElse(throw new IllegalArgumentException("test"))
.copy(name = "Time settings updated")
+
val techInfo = TechniqueInfo(tech.id, tech.name, TechniqueType.Metadata)
- val dir1 = restTestSetUp.mockDirectives.directiveRepo
+ val dir1 = restTestSetUp.mockDirectives.directiveRepo
.getDirective(DirectiveUid("directive1"))
.notOptional(s"test")
.runNow
.copy(shortDescription = "a new description")
+
val group = {
val (group, _) = restTestSetUp.mockNodeGroups.groupsRepo
.getNodeGroup(NodeGroupId(NodeGroupUid("0000f5d3-8c61-4d20-88a7-bb947705ba8a")))
@@ -576,6 +589,7 @@ class ArchiveApiTest extends Specification with AfterAll with Loggable {
// hidden properties are not copied in archive
group.copy(description = "a new description").copy(properties = group.properties.filter(_.visibility == Displayed))
}
+
val rule1 = restTestSetUp.mockRules.ruleRepo
.getOpt(RuleId(RuleUid("rule1")))
.notOptional(s"test")
@@ -583,6 +597,11 @@ class ArchiveApiTest extends Specification with AfterAll with Loggable {
.copy(shortDescription = "a new description")
// change things
+ sed(
+ dest / "techniques" / "systemSettings" / "category.json",
+ """"name" : "System settings"""",
+ s""""name" : "System settings updated""""
+ )
sed(
dest / "techniques" / "systemSettings" / "misc" / "clockConfiguration" / "3.0" / "metadata.xml",
"""""",
@@ -618,7 +637,12 @@ class ArchiveApiTest extends Specification with AfterAll with Loggable {
restTestSetUp.archiveAPIModule.archiveSaver.base.get.runNow match {
case None => ko(s"No policies were saved")
case Some((p, m)) =>
- (p.techniques(0).technique must beEqualTo(techInfo)) and
+ // we have 3 techniques cats: techniques/ncf_techniques (0), techniques/systemSettings/misc (1), and techniques/systemSettings (2)
+ // and two techniques: a_simple_yaml_technique (0) and clockConfiguration (1)
+ // and only one group (added by hand), directive and rule.
+
+ (p.techniqueCats.sortBy(_.metadata.name).apply(2).metadata.name must beEqualTo("System settings updated")) and
+ (p.techniques.sortBy(_.technique.id.name).apply(1).technique must beEqualTo(techInfo)) and
(p.directives(0).directive must beEqualTo(dir1)) and
(p.groups(0).group must beEqualTo(group))
(p.rules(0) must beEqualTo(rule1))
diff --git a/webapp/sources/rudder/rudder-web/src/main/scala/bootstrap/liftweb/RudderConfig.scala b/webapp/sources/rudder/rudder-web/src/main/scala/bootstrap/liftweb/RudderConfig.scala
index a5c329a1196..249bdf4d598 100644
--- a/webapp/sources/rudder/rudder-web/src/main/scala/bootstrap/liftweb/RudderConfig.scala
+++ b/webapp/sources/rudder/rudder-web/src/main/scala/bootstrap/liftweb/RudderConfig.scala
@@ -2011,7 +2011,6 @@ object RudderConfigInit {
new SaveArchiveServicebyRepo(
techniqueArchiver,
techniqueReader,
- techniqueRepository,
roDirectiveRepository,
woDirectiveRepository,
roNodeGroupRepository,