From 15a87f309a8042fb074eff9acb071437b724dc1b Mon Sep 17 00:00:00 2001 From: danicheg Date: Sat, 25 Nov 2023 12:37:26 +0300 Subject: [PATCH 1/9] Add sbt-scalafmt sbt plugin --- project/build.sbt | 1 + 1 file changed, 1 insertion(+) diff --git a/project/build.sbt b/project/build.sbt index 59c12ab..cd488e8 100644 --- a/project/build.sbt +++ b/project/build.sbt @@ -5,6 +5,7 @@ lazy val root = project addSbtPlugin("com.thesamet" % "sbt-protoc" % "1.0.6"), addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "2.1.3"), addSbtPlugin("com.github.sbt" % "sbt-ci-release" % "1.5.12"), + addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.5.2"), // // Let's add our sbt plugin to the sbt too ;) // unmanagedSourceDirectories in Compile ++= { // val pluginMainDir = baseDirectory.value.getParentFile / "sbt-plugin" / "src" / "main" From 335073c0e254e711469f71c8309952c3fe39c7a4 Mon Sep 17 00:00:00 2001 From: danicheg Date: Sat, 25 Nov 2023 12:42:23 +0300 Subject: [PATCH 2/9] Enhance the scalafmt settings --- .scalafmt.conf | 21 ++++++++++++++++----- 1 file changed, 16 insertions(+), 5 deletions(-) diff --git a/.scalafmt.conf b/.scalafmt.conf index 2ca4804..1cffef8 100644 --- a/.scalafmt.conf +++ b/.scalafmt.conf @@ -1,6 +1,17 @@ +version = "3.7.17" +runner.dialect = Scala213 maxColumn = 100 -project.git=true -align = none -danglingParentheses = true -newlines.neverBeforeJsNative = true -newlines.sometimesBeforeColonInMethodReturnType = false +docstrings.style = Asterisk +docstrings.wrap = no +assumeStandardLibraryStripMargin = true +newlines.beforeCurlyLambdaParams = multilineWithCaseOnly +align.tokens=[] +align.openParenCallSite = false +align.openParenDefnSite = false +binPack.literalArgumentLists = true +project.git = true + +# exclude submodules +project.excludeFilters = [ + "vscode-scala/" +] From 8bafa14dab3d212442523410c1493bd1496c5cd5 Mon Sep 17 00:00:00 2001 From: danicheg Date: Sat, 25 Nov 2023 12:44:55 +0300 Subject: [PATCH 3/9] Format sbt's files --- build.sbt | 23 +++++++----- project/BuildPlugin.scala | 63 ++++++++++++++++++-------------- project/WorkingPluginCross.scala | 3 +- project/build.sbt | 2 +- 4 files changed, 52 insertions(+), 39 deletions(-) diff --git a/build.sbt b/build.sbt index e94aead..73693fb 100644 --- a/build.sbt +++ b/build.sbt @@ -38,7 +38,7 @@ lazy val fullCrossVersionSettings = Seq( // Unfortunately, it only includes directories like "scala_2.12" or "scala_2.13", // not "scala_2.12.18" or "scala_2.13.12" that we need. // That's why we have to work around here. - val base = (Compile/ sourceDirectory).value + val base = (Compile / sourceDirectory).value val versionDir = scalaVersion.value.replaceAll("-.*", "") base / ("scala-" + versionDir) } @@ -48,7 +48,7 @@ import _root_.ch.epfl.scala.profiling.build.BuildImplementation.BuildDefaults import scalapb.compiler.Version.scalapbVersion lazy val profiledb = project .in(file("profiledb")) - //.settings(metalsSettings) + // .settings(metalsSettings) .settings( // Specify scala version to allow third-party software to use this module crossScalaVersions := bin212 ++ bin213, @@ -61,7 +61,7 @@ lazy val profiledb = project // Do not change the lhs id of this plugin, `BuildPlugin` relies on it lazy val plugin = project .dependsOn(profiledb) - //.settings(metalsSettings) + // .settings(metalsSettings) .settings( fullCrossVersionSettings, name := "scalac-profiling", @@ -90,8 +90,8 @@ lazy val plugin = project // Enable debugging information when necessary val debuggingPluginOptions = if (!enableStatistics.value) Nil - else List("-Ystatistics") //, "-P:scalac-profiling:show-profiles") - //else List("-Xlog-implicits", "-Ystatistics:typer") + else List("-Ystatistics") // , "-P:scalac-profiling:show-profiles") + // else List("-Xlog-implicits", "-Ystatistics:typer") Seq(addPlugin, dummy) ++ debuggingPluginOptions }, Test / scalacOptions ++= optionsForSourceCompilerPlugin.value, @@ -149,13 +149,18 @@ lazy val plugin = project // .dependsOn(publishLocal) // .value // ) -// +// lazy val profilingSbtPlugin = project .in(file("sbt-plugin")) .settings( name := "sbt-scalac-profiling", scalaVersion := bin212.head, - scriptedLaunchOpts ++= Seq("-Xmx2048M", "-Xms1024M", "-Xss8M", s"-Dplugin.version=${version.value}"), + scriptedLaunchOpts ++= Seq( + "-Xmx2048M", + "-Xms1024M", + "-Xss8M", + s"-Dplugin.version=${version.value}" + ), scriptedBufferLog := false ) .enablePlugins(SbtPlugin) @@ -176,13 +181,13 @@ lazy val integrations = project .sequential( clean, (BetterFilesCore / Compile / clean), - (WartremoverCore / Compile / clean), + (WartremoverCore / Compile / clean) ) .value, test := Def .sequential( (ThisBuild / showScalaInstances), - (Compile / compile), + (Compile / compile) ) .value, testOnly := Def.inputTaskDyn { diff --git a/project/BuildPlugin.scala b/project/BuildPlugin.scala index c85feee..4a1584d 100644 --- a/project/BuildPlugin.scala +++ b/project/BuildPlugin.scala @@ -45,10 +45,14 @@ object BuildKeys { // Source dependencies from git are cached by sbt val BetterFiles = RootProject( - uri("https://git@github.com/pathikrit/better-files.git#6f2e3f1328b1b18eddce973510db71bc6c14fadb") // v3.9.2 + uri( + "https://git@github.com/pathikrit/better-files.git#6f2e3f1328b1b18eddce973510db71bc6c14fadb" + ) // v3.9.2 ) val Wartremover = RootProject( - uri("https://git@github.com/wartremover/wartremover.git#29bb7b69ad49eb87c19d9ba865298071c2795bb7") // v3.1.4 + uri( + "https://git@github.com/wartremover/wartremover.git#29bb7b69ad49eb87c19d9ba865298071c2795bb7" + ) // v3.1.4 ) val BetterFilesCore = ProjectRef(BetterFiles.build, "core") @@ -56,7 +60,7 @@ object BuildKeys { val IntegrationProjectsAndReferences = List[(ProjectRef, String)]( BetterFilesCore -> "BetterFilesCore", - WartremoverCore -> "WartremoverCore", + WartremoverCore -> "WartremoverCore" ) val AllIntegrationProjects = IntegrationProjectsAndReferences.map(_._1) @@ -65,8 +69,10 @@ object BuildKeys { // final val ScalacVersion = Keys.version in BuildKeys.ScalacCompiler // final val ScalacScalaVersion = Keys.scalaVersion in BuildKeys.ScalacCompiler - /** Write all the compile-time dependencies of the compiler plugin to a file, - * in order to read it from the created Toolbox to run the neg tests. */ + /** + * Write all the compile-time dependencies of the compiler plugin to a file, + * in order to read it from the created Toolbox to run the neg tests. + */ lazy val generateToolboxClasspath = Def.task { val scalaBinVersion = (Compile / Keys.scalaBinaryVersion).value val targetDir = (Compile / Keys.target).value @@ -82,25 +88,25 @@ object BuildKeys { } /** - * Sbt does not like overrides of setting values that happen in ThisBuild, - * nor in other project settings like integrations'. No. Sbt is exigent and - * always asks you to give your best. - * - * Why so much code for such a simple idea? Well, `Project.extract` does force - * the execution and initialization of settings, so as `onLoad` is a setting - * it causes a recursive call to itself, yay! - * - * So, in short, solution: use an attribute in the state to short-circuit the - * recursive invocation. - * - * Notes to the future reader: the bug that prompted this solution is weird - * I can indeed override lots of settings via project refs, but when it comes - * to overriding a setting **in a project** (that has been generated via - * sbt-cross-project), it does not work. On top of this, this wouldn't happen - * if monocle defined the scala versions at the build level (it instead does it - * at the project level, which is bad practice). So, finding a repro for this - * is going to be fun. - */ + * Sbt does not like overrides of setting values that happen in ThisBuild, + * nor in other project settings like integrations'. No. Sbt is exigent and + * always asks you to give your best. + * + * Why so much code for such a simple idea? Well, `Project.extract` does force + * the execution and initialization of settings, so as `onLoad` is a setting + * it causes a recursive call to itself, yay! + * + * So, in short, solution: use an attribute in the state to short-circuit the + * recursive invocation. + * + * Notes to the future reader: the bug that prompted this solution is weird + * I can indeed override lots of settings via project refs, but when it comes + * to overriding a setting **in a project** (that has been generated via + * sbt-cross-project), it does not work. On top of this, this wouldn't happen + * if monocle defined the scala versions at the build level (it instead does it + * at the project level, which is bad practice). So, finding a repro for this + * is going to be fun. + */ final val hijacked = sbt.AttributeKey[Boolean]("the hijacked sexy option.") //////////////////////////////////////////////////////////////////////////////// @@ -123,7 +129,7 @@ object BuildKeys { private val AllKeywords = List( Keywords.Integration, Keywords.BetterFiles, - Keywords.Wartremover, + Keywords.Wartremover ) import sbt.complete.Parser @@ -206,7 +212,8 @@ object BuildImplementation { def setScalaVersion(ref: String) = s"""$ref / ${Keys.scalaVersion.key.label} := "$scalaV"""" def setScalacOptions(ref: String) = - s"""$ref / ${Keys.scalacOptions.key.label} := ${MethodRefs.scalacProfilingScalacOptionsRef(ref)}.value""".stripMargin + s"""$ref / ${Keys.scalacOptions.key.label} := ${MethodRefs + .scalacProfilingScalacOptionsRef(ref)}.value""".stripMargin def setUnmanagedJars(ref: String, config: String) = s"""$ref / $config / ${Keys.unmanagedJars.key.label} := ${MethodRefs.setUpUnmanagedJarsRef}.value""" val msg = "The build integrations are set up." @@ -266,10 +273,10 @@ object BuildImplementation { "-deprecation" :: "-encoding" :: "UTF-8" :: "-feature" :: "-language:existentials" :: "-language:higherKinds" :: "-language:implicitConversions" :: "-unchecked" :: "-Ywarn-numeric-widen" :: "-Xlint" :: Nil - ) + ) if (Keys.scalaVersion.value.startsWith("2.13")) base else base :+ "-Xfuture" - }, + } // Necessary because the scalac version has to be always SNAPSHOT to avoid caching issues // Scope here is wrong -- we put it here temporarily until this is fixed upstream // ReleaseEarlyKeys.releaseEarlyBypassSnapshotCheck := true diff --git a/project/WorkingPluginCross.scala b/project/WorkingPluginCross.scala index 5e0998a..0c1c484 100644 --- a/project/WorkingPluginCross.scala +++ b/project/WorkingPluginCross.scala @@ -13,7 +13,8 @@ object WorkingPluginCross { def switchParser(state: State): Parser[(String, String)] = { val knownVersions = Nil lazy val switchArgs = token(NotSpace.examples(knownVersions: _*)) ~ (token( - Space ~> matched(state.combinedParser)) ?? "") + Space ~> matched(state.combinedParser) + ) ?? "") lazy val nextSpaced = spacedFirst(PluginSwitchCommand) token(PluginSwitchCommand ~ OptSpace) flatMap { _ => switchArgs & nextSpaced diff --git a/project/build.sbt b/project/build.sbt index cd488e8..4feb5ad 100644 --- a/project/build.sbt +++ b/project/build.sbt @@ -11,5 +11,5 @@ lazy val root = project // val pluginMainDir = baseDirectory.value.getParentFile / "sbt-plugin" / "src" / "main" // List(pluginMainDir / "scala", pluginMainDir / s"scala-sbt-${Keys.sbtBinaryVersion.value}") // }, - libraryDependencies += "com.thesamet.scalapb" %% "compilerplugin" % "0.11.13", + libraryDependencies += "com.thesamet.scalapb" %% "compilerplugin" % "0.11.13" ) From 73fc198aee14877a9a86357abf6d882496f39efe Mon Sep 17 00:00:00 2001 From: danicheg Date: Sat, 25 Nov 2023 12:49:21 +0300 Subject: [PATCH 4/9] Format scala files --- .../scala/ch/epfl/scala/ProfilingPlugin.scala | 40 +++++++---- .../epfl/scala/profilers/ProfilingImpl.scala | 70 +++++++++++-------- .../scala/ch/epfl/scala/ImplicitTest.scala | 47 ++++++------- .../scala/ch/epfl/scala/tools/TestUtil.scala | 19 +++-- .../ch.epfl.scala.profiledb/ProfileDb.scala | 2 +- .../ch/epfl/scala/ProfilingSbtPlugin.scala | 26 +++---- .../sbt/ch/epfl/scala/SbtTaskTimer.scala | 4 +- 7 files changed, 113 insertions(+), 95 deletions(-) diff --git a/plugin/src/main/scala/ch/epfl/scala/ProfilingPlugin.scala b/plugin/src/main/scala/ch/epfl/scala/ProfilingPlugin.scala index f89f27d..a7744e6 100644 --- a/plugin/src/main/scala/ch/epfl/scala/ProfilingPlugin.scala +++ b/plugin/src/main/scala/ch/epfl/scala/ProfilingPlugin.scala @@ -58,10 +58,9 @@ class ProfilingPlugin(val global: Global) extends Plugin { self => private final lazy val config = PluginConfig( showProfiles = super.options.contains(ShowProfiles), generateDb = super.options.contains(GenerateProfileDb), - sourceRoot = - findOption(SourceRoot, SourceRootRegex) - .map(AbsolutePath.apply) - .getOrElse(AbsolutePath.workingDirectory), + sourceRoot = findOption(SourceRoot, SourceRootRegex) + .map(AbsolutePath.apply) + .getOrElse(AbsolutePath.workingDirectory), printSearchIds = findSearchIds(findOption(PrintSearchResult, PrintSearchRegex)), generateMacroFlamegraph = super.options.contains(GenerateMacroFlamegraph), generateGlobalFlamegraph = super.options.contains(GenerateGlobalFlamegraph), @@ -75,13 +74,21 @@ class ProfilingPlugin(val global: Global) extends Plugin { self => override def init(ops: List[String], e: (String) => Unit): Boolean = true - override val optionsHelp: Option[String] = Some(s""" - |-P:$name:${pad20(GenerateGlobalFlamegraph)}: Creates a global flamegraph of implicit searches for all compilation units. Use the `-P:$name:$SourceRoot` option to manage the root directory, otherwise, a working directory (defined by the `user.dir` property) will be picked. + override val optionsHelp: Option[String] = Some( + s""" + |-P:$name:${pad20( + GenerateGlobalFlamegraph + )}: Creates a global flamegraph of implicit searches for all compilation units. Use the `-P:$name:$SourceRoot` option to manage the root directory, otherwise, a working directory (defined by the `user.dir` property) will be picked. |-P:$name:${pad20(SourceRoot)}:_ Sets the source root for this project. |-P:$name:${pad20(ShowProfiles)} Logs profile information for every call-site. - |-P:$name:${pad20(ShowConcreteImplicitTparams)} Shows types in flamegraphs of implicits with concrete type params. - |-P:$name:${pad20(PrintSearchResult)}:_ Print implicit search result trees for a list of search ids separated by a comma. - """.stripMargin) + |-P:$name:${pad20( + ShowConcreteImplicitTparams + )} Shows types in flamegraphs of implicits with concrete type params. + |-P:$name:${pad20( + PrintSearchResult + )}:_ Print implicit search result trees for a list of search ids separated by a comma. + """.stripMargin + ) lazy val implementation = new ProfilingImpl(ProfilingPlugin.this.global, config, logger) implementation.registerProfilers() @@ -112,8 +119,10 @@ class ProfilingPlugin(val global: Global) extends Plugin { self => else if (ScalaSettingsOps.isScala213) "scala-2.13" else - sys.error(s"Currently, only Scala 2.12 and 2.13 are supported, " + - s"but [${global.settings.source.value}] has been spotted") + sys.error( + s"Currently, only Scala 2.12 and 2.13 are supported, " + + s"but [${global.settings.source.value}] has been spotted" + ) val globalDir = ProfileDbPath.toGraphsProfilePath( @@ -280,8 +289,10 @@ class ProfilingPlugin(val global: Global) extends Plugin { self => ProfileDb.read(path).flatMap { oldDb => val oldDbType = oldDb.`type` val newDbType = db.`type` - if (oldDbType.isGlobal && newDbType.isGlobal || - (oldDbType.isPerCompilationUnit && newDbType.isPerCompilationUnit)) { + if ( + oldDbType.isGlobal && newDbType.isGlobal || + (oldDbType.isPerCompilationUnit && newDbType.isPerCompilationUnit) + ) { val updatedDb = oldDb.addAllEntries(db.entries) ProfileDb.write(updatedDb, path) } else Try(sys.error(s"Db type mismatch: $newDbType != $oldDbType")) @@ -303,7 +314,8 @@ class ProfilingPlugin(val global: Global) extends Plugin { self => override def apply(unit: global.CompilationUnit): Unit = { if ( SettingsOps.areStatisticsEnabled(global) && - config.generateDb) { + config.generateDb + ) { val currentSourceFile = unit.source val compilationUnitEntry = profileDbEntryFor(currentSourceFile) dbPathFor(currentSourceFile) match { diff --git a/plugin/src/main/scala/ch/epfl/scala/profilers/ProfilingImpl.scala b/plugin/src/main/scala/ch/epfl/scala/profilers/ProfilingImpl.scala index 6c6a03d..121c599 100644 --- a/plugin/src/main/scala/ch/epfl/scala/profilers/ProfilingImpl.scala +++ b/plugin/src/main/scala/ch/epfl/scala/profilers/ProfilingImpl.scala @@ -31,14 +31,14 @@ final class ProfilingImpl[G <: Global]( } /** - * Represents the profiling information about expanded macros. - * - * Note that we could derive the value of expanded macros from the - * number of instances of [[MacroInfo]] if it were not by the fact - * that a macro can expand in the same position more than once. We - * want to be able to report/analyse such cases on their own, so - * we keep it as a paramater of this entity. - */ + * Represents the profiling information about expanded macros. + * + * Note that we could derive the value of expanded macros from the + * number of instances of [[MacroInfo]] if it were not by the fact + * that a macro can expand in the same position more than once. We + * want to be able to report/analyse such cases on their own, so + * we keep it as a paramater of this entity. + */ case class MacroInfo(expandedMacros: Int, expandedNodes: Int, expansionNanos: Long) { def +(other: MacroInfo): MacroInfo = { val totalExpanded = expandedMacros + other.expandedMacros @@ -76,7 +76,7 @@ final class ProfilingImpl[G <: Global]( } lazy val macroProfiler: MacroProfiler = { - import ProfilingMacroPlugin.macroInfos //, repeatedTrees} + import ProfilingMacroPlugin.macroInfos // , repeatedTrees} val perCallSite = macroInfos.toMap val perFile = groupPerFile(perCallSite)(MacroInfo.Empty, _ + _) .map { @@ -91,9 +91,9 @@ final class ProfilingImpl[G <: Global]( // perFile and inTotal are already converted to millis val callSiteNanos = perCallSite.map { - case (pos, mi) => pos -> mi.copy(expansionNanos = toMillis(mi.expansionNanos)) - } - MacroProfiler(callSiteNanos, perFile, inTotal, Map.empty) //repeated) + case (pos, mi) => pos -> mi.copy(expansionNanos = toMillis(mi.expansionNanos)) + } + MacroProfiler(callSiteNanos, perFile, inTotal, Map.empty) // repeated) } case class ImplicitInfo(count: Int) { @@ -149,7 +149,10 @@ final class ProfilingImpl[G <: Global]( } } - def generateGraphData(outputDir: AbsolutePath, globalDirMaybe: Option[AbsolutePath]): List[AbsolutePath] = { + def generateGraphData( + outputDir: AbsolutePath, + globalDirMaybe: Option[AbsolutePath] + ): List[AbsolutePath] = { Files.createDirectories(outputDir.underlying) val randomId = java.lang.Long.toString(System.currentTimeMillis()) @@ -222,9 +225,12 @@ final class ProfilingImpl[G <: Global]( stackedNanos.foreach { case (id, (nanos, _)) => val names = - stackedNames.getOrElse(id, sys.error(s"Stack name for search id ${id} doesn't exist!")) + stackedNames.getOrElse( + id, + sys.error(s"Stack name for search id ${id} doesn't exist!") + ) val stackName = names.mkString(";") - //val count = implicitSearchesByType.getOrElse(tpe, sys.error(s"No counter for ${tpe}")) + // val count = implicitSearchesByType.getOrElse(tpe, sys.error(s"No counter for ${tpe}")) stacksJavaList.add(s"$stackName ${nanos / 1000}") } java.util.Collections.sort(stacksJavaList) @@ -254,12 +260,12 @@ final class ProfilingImpl[G <: Global]( `type`, sys.error { s"""Id for ${`type`} doesn't exist. - | - | Information about the type: - | - `structure` -> ${global.showRaw(`type`)} - | - `safeToString` -> ${`type`.safeToString} - | - `toLongString` after typer -> ${typeToString(`type`)} - | - `typeSymbol` -> ${`type`.typeSymbol} + | + | Information about the type: + | - `structure` -> ${global.showRaw(`type`)} + | - `safeToString` -> ${`type`.safeToString} + | - `toLongString` after typer -> ${typeToString(`type`)} + | - `typeSymbol` -> ${`type`.typeSymbol} """.stripMargin } ) @@ -281,10 +287,10 @@ final class ProfilingImpl[G <: Global]( } val graph = s"""digraph "$graphName" { - | graph [ranksep=0, rankdir=LR]; - |${nodeInfos.mkString(" ", "\n ", "\n ")} - |${connections.mkString(" ", "\n ", "\n ")} - |}""".stripMargin.getBytes + | graph [ranksep=0, rankdir=LR]; + |${nodeInfos.mkString(" ", "\n ", "\n ")} + |${connections.mkString(" ", "\n ", "\n ")} + |}""".stripMargin.getBytes Files.write(outputPath, graph, StandardOpenOption.WRITE, StandardOpenOption.CREATE) } @@ -400,7 +406,11 @@ final class ProfilingImpl[G <: Global]( else concreteTypeFromSearch(result.subst(result.tree), targetType) } - if (config.printSearchIds.contains(searchId) || (result.isFailure && config.printFailedMacroImplicits)) { + if ( + config.printSearchIds.contains( + searchId + ) || (result.isFailure && config.printFailedMacroImplicits) + ) { logger.info( s"""implicit search ${searchId}: | -> valid ${result.isSuccess} @@ -477,9 +487,9 @@ final class ProfilingImpl[G <: Global]( 1 + tree.children.map(guessTreeSize).sum type RepeatedKey = (String, String) - //case class RepeatedValue(original: Tree, result: Tree, count: Int) - //private final val EmptyRepeatedValue = RepeatedValue(EmptyTree, EmptyTree, 0) - //private[ProfilingImpl] val repeatedTrees = perRunCaches.newMap[RepeatedKey, RepeatedValue] + // case class RepeatedValue(original: Tree, result: Tree, count: Int) + // private final val EmptyRepeatedValue = RepeatedValue(EmptyTree, EmptyTree, 0) + // private[ProfilingImpl] val repeatedTrees = perRunCaches.newMap[RepeatedKey, RepeatedValue] val macroInfos = perRunCaches.newAnyRefMap[Position, MacroInfo]() val searchIdsToMacroStates = perRunCaches.newMap[Int, List[MacroState]]() @@ -680,7 +690,7 @@ final class ProfilingImpl[G <: Global]( repeatedTrees.put(key, newValue)*/ val macroInfo = macroInfos.getOrElse(callSitePos, MacroInfo.Empty) val expandedMacros = macroInfo.expandedMacros + 1 - val treeSize = 0 //macroInfo.expandedNodes + guessTreeSize(expanded) + val treeSize = 0 // macroInfo.expandedNodes + guessTreeSize(expanded) // Use 0L for the timer because it will be filled in by the caller `apply` macroInfos.put(callSitePos, MacroInfo(expandedMacros, treeSize, 0L)) diff --git a/plugin/src/test/scala/ch/epfl/scala/ImplicitTest.scala b/plugin/src/test/scala/ch/epfl/scala/ImplicitTest.scala index dbdb686..5d2b9f2 100644 --- a/plugin/src/test/scala/ch/epfl/scala/ImplicitTest.scala +++ b/plugin/src/test/scala/ch/epfl/scala/ImplicitTest.scala @@ -1,17 +1,17 @@ package shapeless { sealed trait HList extends Product with Serializable - final case class ::[+H, +T <: HList](head : H, tail : T) extends HList { - def ::[HH](h : HH) : HH :: H :: T = shapeless.::(h, this) + final case class ::[+H, +T <: HList](head: H, tail: T) extends HList { + def ::[HH](h: HH): HH :: H :: T = shapeless.::(h, this) override def toString = head match { - case _: ::[_, _] => "("+head.toString+") :: "+tail.toString - case _ => head.toString+" :: "+tail.toString + case _: ::[_, _] => "(" + head.toString + ") :: " + tail.toString + case _ => head.toString + " :: " + tail.toString } } sealed trait HNil extends HList { - def ::[H](h : H) = shapeless.::(h, this) + def ::[H](h: H) = shapeless.::(h, this) override def toString = "HNil" } @@ -26,14 +26,13 @@ package shapeless { implicit def inHead[H, T <: HList]: Selector[H :: T, H] = new Selector[H :: T, H] { - def apply(l : H :: T) = l.head + def apply(l: H :: T) = l.head } - implicit def inTail[H, T <: HList, U] - (implicit st : Selector[T, U]): Selector[H :: T, U] = - new Selector[H :: T, U] { - def apply(l : H :: T) = st(l.tail) - } + implicit def inTail[H, T <: HList, U](implicit st: Selector[T, U]): Selector[H :: T, U] = + new Selector[H :: T, U] { + def apply(l: H :: T) = st(l.tail) + } } } @@ -583,16 +582,16 @@ object Test extends App { // // type L = Int :: - Int :: - Int :: - Int :: - Int :: - Int :: - Int :: - Int :: - Int :: - Int :: -// - Boolean :: - HNil -} \ No newline at end of file + Int :: + Int :: + Int :: + Int :: + Int :: + Int :: + Int :: + Int :: + Int :: +// + Boolean :: + HNil +} diff --git a/plugin/src/test/scala/ch/epfl/scala/tools/TestUtil.scala b/plugin/src/test/scala/ch/epfl/scala/tools/TestUtil.scala index a7773e1..ba970c6 100644 --- a/plugin/src/test/scala/ch/epfl/scala/tools/TestUtil.scala +++ b/plugin/src/test/scala/ch/epfl/scala/tools/TestUtil.scala @@ -30,8 +30,7 @@ object TestUtil { tb.eval(tb.parse(code)) } - def mkToolbox(compileOptions: String = "") - : ToolBox[_ <: scala.reflect.api.Universe] = { + def mkToolbox(compileOptions: String = ""): ToolBox[_ <: scala.reflect.api.Universe] = { val m = scala.reflect.runtime.currentMirror import scala.tools.reflect.ToolBox m.mkToolBox(options = compileOptions) @@ -49,15 +48,15 @@ object TestUtil { def expectError( errorSnippet: String, compileOptions: String = "", - baseCompileOptions: String = - s"-cp $toolboxClasspath $toolboxPluginOptions")(code: String): Unit = { + baseCompileOptions: String = s"-cp $toolboxClasspath $toolboxPluginOptions" + )(code: String): Unit = { val errorMessage = intercept[ToolBoxError] { eval(code, s"$compileOptions $baseCompileOptions") }.getMessage val userMessage = s""" - |FOUND: $errorMessage - |EXPECTED: $errorSnippet + |FOUND: $errorMessage + |EXPECTED: $errorSnippet """.stripMargin assert(errorMessage.contains(errorSnippet), userMessage) } @@ -65,10 +64,8 @@ object TestUtil { def expectWarning( errorSnippet: String, compileOptions: String = "", - baseCompileOptions: String = - s"-cp $toolboxClasspath $toolboxPluginOptions")(code: String): Unit = { - expectError(errorSnippet, - compileOptions + "-Xfatal-warnings", - baseCompileOptions)(code) + baseCompileOptions: String = s"-cp $toolboxClasspath $toolboxPluginOptions" + )(code: String): Unit = { + expectError(errorSnippet, compileOptions + "-Xfatal-warnings", baseCompileOptions)(code) } } diff --git a/profiledb/src/main/scala/ch.epfl.scala.profiledb/ProfileDb.scala b/profiledb/src/main/scala/ch.epfl.scala.profiledb/ProfileDb.scala index 8a30cb3..b5af8f1 100644 --- a/profiledb/src/main/scala/ch.epfl.scala.profiledb/ProfileDb.scala +++ b/profiledb/src/main/scala/ch.epfl.scala.profiledb/ProfileDb.scala @@ -36,4 +36,4 @@ object ProfileDb { outputStream.close() database } -} \ No newline at end of file +} diff --git a/sbt-plugin/src/main/scala/sbt/ch/epfl/scala/ProfilingSbtPlugin.scala b/sbt-plugin/src/main/scala/sbt/ch/epfl/scala/ProfilingSbtPlugin.scala index f8ca36e..95c539d 100644 --- a/sbt-plugin/src/main/scala/sbt/ch/epfl/scala/ProfilingSbtPlugin.scala +++ b/sbt-plugin/src/main/scala/sbt/ch/epfl/scala/ProfilingSbtPlugin.scala @@ -104,19 +104,19 @@ object ProfilingPluginImplementation { import sbt.{Scope, IO, Path} /** - * This command defines the warming up behaviour. - * - * After incessant attempts to get it working within tasks by only limiting ourselves - * to the task API, this task has proven itself impossible because sbt does not allow - * recursiveness at the task level. Any tried workaround (using task proxies et al) has - * miserably failed. - * - * As a result, we have no other choice than delegating to the Command API and using - * the state directly, implementing a traditional while loop that takes care of warming - * the compiler up. - * - * This command is private and SHOULD NOT be invoked directly. Use `profilingWarmupCompiler`. - */ + * This command defines the warming up behaviour. + * + * After incessant attempts to get it working within tasks by only limiting ourselves + * to the task API, this task has proven itself impossible because sbt does not allow + * recursiveness at the task level. Any tried workaround (using task proxies et al) has + * miserably failed. + * + * As a result, we have no other choice than delegating to the Command API and using + * the state directly, implementing a traditional while loop that takes care of warming + * the compiler up. + * + * This command is private and SHOULD NOT be invoked directly. Use `profilingWarmupCompiler`. + */ val profilingWarmupCommand: Command = Command.command("warmupCompileFor") { (st0: State) => def getStateAttribute[T](key: sbt.AttributeKey[T]): T = st0.get(key).getOrElse(sys.error(s"The caller did not pass the attribute ${key.label}")) diff --git a/sbt-plugin/src/main/scala/sbt/ch/epfl/scala/SbtTaskTimer.scala b/sbt-plugin/src/main/scala/sbt/ch/epfl/scala/SbtTaskTimer.scala index 8047831..14ef15b 100644 --- a/sbt-plugin/src/main/scala/sbt/ch/epfl/scala/SbtTaskTimer.scala +++ b/sbt-plugin/src/main/scala/sbt/ch/epfl/scala/SbtTaskTimer.scala @@ -66,9 +66,9 @@ class SbtTaskTimer(timers: ConcurrentHashMap[ScopedKey[_], BoxedLong], isDebugEn def allCompleted(state: Unit, results: sbt.RMap[Task, sbt.Result]): Unit = () def completed[T](state: Unit, task: Task[T], result: sbt.Result[T]): Unit = () def ready(state: Unit, task: Task[_]): Unit = () - def afterAllCompleted(results: sbt.internal.util.RMap[sbt.Task,sbt.Result]): Unit = () + def afterAllCompleted(results: sbt.internal.util.RMap[sbt.Task, sbt.Result]): Unit = () def afterReady(task: sbt.Task[_]): Unit = () - def afterWork[A](task: sbt.Task[A],result: Either[sbt.Task[A],sbt.Result[A]]): Unit = () + def afterWork[A](task: sbt.Task[A], result: Either[sbt.Task[A], sbt.Result[A]]): Unit = () def beforeWork(task: sbt.Task[_]): Unit = () def stop(): Unit = () } From b53926a7e4b8eeb4b32f2fc134641809a5700594 Mon Sep 17 00:00:00 2001 From: danicheg Date: Sat, 25 Nov 2023 12:58:48 +0300 Subject: [PATCH 5/9] Set up checking formatting in CI --- .github/workflows/ci.yml | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index d5a53fd..23dace5 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -7,6 +7,15 @@ on: branches: ['**', '!update/**', '!pr/**'] jobs: + scalafmt: + name: Formatting + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: olafurpg/setup-scala@v13 + - uses: coursier/cache-action@v5 + - name: Check formatting + run: sbt -v "scalafmtSbtCheck" "scalafmtCheckAll" test: name: ${{ matrix.command }} runs-on: ubuntu-20.04 From 52007f8d90cfd23565b5476dfe6685d5578c0e1a Mon Sep 17 00:00:00 2001 From: danicheg Date: Sat, 25 Nov 2023 13:02:04 +0300 Subject: [PATCH 6/9] Update the ci.yml external dependencies --- .github/workflows/ci.yml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 23dace5..ac0f57d 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -12,13 +12,13 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - uses: olafurpg/setup-scala@v13 - - uses: coursier/cache-action@v5 + - uses: olafurpg/setup-scala@v14 + - uses: coursier/cache-action@v6 - name: Check formatting run: sbt -v "scalafmtSbtCheck" "scalafmtCheckAll" test: name: ${{ matrix.command }} - runs-on: ubuntu-20.04 + runs-on: ubuntu-latest strategy: fail-fast: false matrix: @@ -26,9 +26,9 @@ jobs: - sbt "+plugin/test" "profilingSbtPlugin/scripted compiler-profiling/scripted" - sbt "showScalaInstances" "integrations/testOnly integration better-files wartremover" steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: submodules: true - - uses: olafurpg/setup-scala@v13 - - uses: coursier/cache-action@v5 + - uses: olafurpg/setup-scala@v14 + - uses: coursier/cache-action@v6 - run: ${{ matrix.command }} From 700806174d577c4d6bf38fb751da099da2be5118 Mon Sep 17 00:00:00 2001 From: danicheg Date: Sat, 25 Nov 2023 13:06:00 +0300 Subject: [PATCH 7/9] Update the release.yml external dependencies --- .github/workflows/release.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index c898b57..ab90797 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -5,16 +5,16 @@ on: tags: ["*"] jobs: publish: - runs-on: ubuntu-20.04 + runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 - uses: actions/setup-java@v3 with: - distribution: temurin - java-version: 8 - cache: sbt + distribution: 'temurin' + java-version: '8' + cache: 'sbt' - run: sbt ci-release env: PGP_PASSPHRASE: ${{ secrets.PGP_PASSPHRASE }} From 25643000332c5a5204392c48ab4143092a13a98c Mon Sep 17 00:00:00 2001 From: danicheg Date: Sat, 25 Nov 2023 13:21:10 +0300 Subject: [PATCH 8/9] Add 'trailingCommas = preserve' setting to Scalafmt --- .scalafmt.conf | 1 + 1 file changed, 1 insertion(+) diff --git a/.scalafmt.conf b/.scalafmt.conf index 1cffef8..472cd55 100644 --- a/.scalafmt.conf +++ b/.scalafmt.conf @@ -10,6 +10,7 @@ align.openParenCallSite = false align.openParenDefnSite = false binPack.literalArgumentLists = true project.git = true +trailingCommas = preserve # exclude submodules project.excludeFilters = [ From 1b3235070c6d97d61ed4f26952227a083270524b Mon Sep 17 00:00:00 2001 From: danicheg Date: Sun, 26 Nov 2023 11:39:10 +0300 Subject: [PATCH 9/9] Rm olafurpg/setup-scala action --- .github/workflows/ci.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ac0f57d..03a46a7 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -12,7 +12,6 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - uses: olafurpg/setup-scala@v14 - uses: coursier/cache-action@v6 - name: Check formatting run: sbt -v "scalafmtSbtCheck" "scalafmtCheckAll" @@ -29,6 +28,5 @@ jobs: - uses: actions/checkout@v4 with: submodules: true - - uses: olafurpg/setup-scala@v14 - uses: coursier/cache-action@v6 - run: ${{ matrix.command }}