Skip to content

Commit

Permalink
Merge pull request #56 from danicheg/scalafmt
Browse files Browse the repository at this point in the history
Set up Scalafmt correctly
  • Loading branch information
danicheg authored Nov 26, 2023
2 parents af02e05 + 1b32350 commit 22ffa42
Show file tree
Hide file tree
Showing 14 changed files with 199 additions and 148 deletions.
15 changes: 11 additions & 4 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,19 +7,26 @@ on:
branches: ['**', '!update/**', '!pr/**']

jobs:
scalafmt:
name: Formatting
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: coursier/cache-action@v6
- name: Check formatting
run: sbt -v "scalafmtSbtCheck" "scalafmtCheckAll"
test:
name: ${{ matrix.command }}
runs-on: ubuntu-20.04
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
command:
- sbt "+plugin/test" "profilingSbtPlugin/scripted compiler-profiling/scripted"
- sbt "showScalaInstances" "integrations/testOnly integration better-files wartremover"
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
submodules: true
- uses: olafurpg/setup-scala@v13
- uses: coursier/cache-action@v5
- uses: coursier/cache-action@v6
- run: ${{ matrix.command }}
10 changes: 5 additions & 5 deletions .github/workflows/release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,16 +5,16 @@ on:
tags: ["*"]
jobs:
publish:
runs-on: ubuntu-20.04
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
fetch-depth: 0
- uses: actions/setup-java@v3
with:
distribution: temurin
java-version: 8
cache: sbt
distribution: 'temurin'
java-version: '8'
cache: 'sbt'
- run: sbt ci-release
env:
PGP_PASSPHRASE: ${{ secrets.PGP_PASSPHRASE }}
Expand Down
22 changes: 17 additions & 5 deletions .scalafmt.conf
Original file line number Diff line number Diff line change
@@ -1,6 +1,18 @@
version = "3.7.17"
runner.dialect = Scala213
maxColumn = 100
project.git=true
align = none
danglingParentheses = true
newlines.neverBeforeJsNative = true
newlines.sometimesBeforeColonInMethodReturnType = false
docstrings.style = Asterisk
docstrings.wrap = no
assumeStandardLibraryStripMargin = true
newlines.beforeCurlyLambdaParams = multilineWithCaseOnly
align.tokens=[]
align.openParenCallSite = false
align.openParenDefnSite = false
binPack.literalArgumentLists = true
project.git = true
trailingCommas = preserve

# exclude submodules
project.excludeFilters = [
"vscode-scala/"
]
23 changes: 14 additions & 9 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ lazy val fullCrossVersionSettings = Seq(
// Unfortunately, it only includes directories like "scala_2.12" or "scala_2.13",
// not "scala_2.12.18" or "scala_2.13.12" that we need.
// That's why we have to work around here.
val base = (Compile/ sourceDirectory).value
val base = (Compile / sourceDirectory).value
val versionDir = scalaVersion.value.replaceAll("-.*", "")
base / ("scala-" + versionDir)
}
Expand All @@ -48,7 +48,7 @@ import _root_.ch.epfl.scala.profiling.build.BuildImplementation.BuildDefaults
import scalapb.compiler.Version.scalapbVersion
lazy val profiledb = project
.in(file("profiledb"))
//.settings(metalsSettings)
// .settings(metalsSettings)
.settings(
// Specify scala version to allow third-party software to use this module
crossScalaVersions := bin212 ++ bin213,
Expand All @@ -61,7 +61,7 @@ lazy val profiledb = project
// Do not change the lhs id of this plugin, `BuildPlugin` relies on it
lazy val plugin = project
.dependsOn(profiledb)
//.settings(metalsSettings)
// .settings(metalsSettings)
.settings(
fullCrossVersionSettings,
name := "scalac-profiling",
Expand Down Expand Up @@ -90,8 +90,8 @@ lazy val plugin = project
// Enable debugging information when necessary
val debuggingPluginOptions =
if (!enableStatistics.value) Nil
else List("-Ystatistics") //, "-P:scalac-profiling:show-profiles")
//else List("-Xlog-implicits", "-Ystatistics:typer")
else List("-Ystatistics") // , "-P:scalac-profiling:show-profiles")
// else List("-Xlog-implicits", "-Ystatistics:typer")
Seq(addPlugin, dummy) ++ debuggingPluginOptions
},
Test / scalacOptions ++= optionsForSourceCompilerPlugin.value,
Expand Down Expand Up @@ -149,13 +149,18 @@ lazy val plugin = project
// .dependsOn(publishLocal)
// .value
// )
//
//
lazy val profilingSbtPlugin = project
.in(file("sbt-plugin"))
.settings(
name := "sbt-scalac-profiling",
scalaVersion := bin212.head,
scriptedLaunchOpts ++= Seq("-Xmx2048M", "-Xms1024M", "-Xss8M", s"-Dplugin.version=${version.value}"),
scriptedLaunchOpts ++= Seq(
"-Xmx2048M",
"-Xms1024M",
"-Xss8M",
s"-Dplugin.version=${version.value}"
),
scriptedBufferLog := false
)
.enablePlugins(SbtPlugin)
Expand All @@ -176,13 +181,13 @@ lazy val integrations = project
.sequential(
clean,
(BetterFilesCore / Compile / clean),
(WartremoverCore / Compile / clean),
(WartremoverCore / Compile / clean)
)
.value,
test := Def
.sequential(
(ThisBuild / showScalaInstances),
(Compile / compile),
(Compile / compile)
)
.value,
testOnly := Def.inputTaskDyn {
Expand Down
40 changes: 26 additions & 14 deletions plugin/src/main/scala/ch/epfl/scala/ProfilingPlugin.scala
Original file line number Diff line number Diff line change
Expand Up @@ -58,10 +58,9 @@ class ProfilingPlugin(val global: Global) extends Plugin { self =>
private final lazy val config = PluginConfig(
showProfiles = super.options.contains(ShowProfiles),
generateDb = super.options.contains(GenerateProfileDb),
sourceRoot =
findOption(SourceRoot, SourceRootRegex)
.map(AbsolutePath.apply)
.getOrElse(AbsolutePath.workingDirectory),
sourceRoot = findOption(SourceRoot, SourceRootRegex)
.map(AbsolutePath.apply)
.getOrElse(AbsolutePath.workingDirectory),
printSearchIds = findSearchIds(findOption(PrintSearchResult, PrintSearchRegex)),
generateMacroFlamegraph = super.options.contains(GenerateMacroFlamegraph),
generateGlobalFlamegraph = super.options.contains(GenerateGlobalFlamegraph),
Expand All @@ -75,13 +74,21 @@ class ProfilingPlugin(val global: Global) extends Plugin { self =>

override def init(ops: List[String], e: (String) => Unit): Boolean = true

override val optionsHelp: Option[String] = Some(s"""
|-P:$name:${pad20(GenerateGlobalFlamegraph)}: Creates a global flamegraph of implicit searches for all compilation units. Use the `-P:$name:$SourceRoot` option to manage the root directory, otherwise, a working directory (defined by the `user.dir` property) will be picked.
override val optionsHelp: Option[String] = Some(
s"""
|-P:$name:${pad20(
GenerateGlobalFlamegraph
)}: Creates a global flamegraph of implicit searches for all compilation units. Use the `-P:$name:$SourceRoot` option to manage the root directory, otherwise, a working directory (defined by the `user.dir` property) will be picked.
|-P:$name:${pad20(SourceRoot)}:_ Sets the source root for this project.
|-P:$name:${pad20(ShowProfiles)} Logs profile information for every call-site.
|-P:$name:${pad20(ShowConcreteImplicitTparams)} Shows types in flamegraphs of implicits with concrete type params.
|-P:$name:${pad20(PrintSearchResult)}:_ Print implicit search result trees for a list of search ids separated by a comma.
""".stripMargin)
|-P:$name:${pad20(
ShowConcreteImplicitTparams
)} Shows types in flamegraphs of implicits with concrete type params.
|-P:$name:${pad20(
PrintSearchResult
)}:_ Print implicit search result trees for a list of search ids separated by a comma.
""".stripMargin
)

lazy val implementation = new ProfilingImpl(ProfilingPlugin.this.global, config, logger)
implementation.registerProfilers()
Expand Down Expand Up @@ -112,8 +119,10 @@ class ProfilingPlugin(val global: Global) extends Plugin { self =>
else if (ScalaSettingsOps.isScala213)
"scala-2.13"
else
sys.error(s"Currently, only Scala 2.12 and 2.13 are supported, " +
s"but [${global.settings.source.value}] has been spotted")
sys.error(
s"Currently, only Scala 2.12 and 2.13 are supported, " +
s"but [${global.settings.source.value}] has been spotted"
)

val globalDir =
ProfileDbPath.toGraphsProfilePath(
Expand Down Expand Up @@ -280,8 +289,10 @@ class ProfilingPlugin(val global: Global) extends Plugin { self =>
ProfileDb.read(path).flatMap { oldDb =>
val oldDbType = oldDb.`type`
val newDbType = db.`type`
if (oldDbType.isGlobal && newDbType.isGlobal ||
(oldDbType.isPerCompilationUnit && newDbType.isPerCompilationUnit)) {
if (
oldDbType.isGlobal && newDbType.isGlobal ||
(oldDbType.isPerCompilationUnit && newDbType.isPerCompilationUnit)
) {
val updatedDb = oldDb.addAllEntries(db.entries)
ProfileDb.write(updatedDb, path)
} else Try(sys.error(s"Db type mismatch: $newDbType != $oldDbType"))
Expand All @@ -303,7 +314,8 @@ class ProfilingPlugin(val global: Global) extends Plugin { self =>
override def apply(unit: global.CompilationUnit): Unit = {
if (
SettingsOps.areStatisticsEnabled(global) &&
config.generateDb) {
config.generateDb
) {
val currentSourceFile = unit.source
val compilationUnitEntry = profileDbEntryFor(currentSourceFile)
dbPathFor(currentSourceFile) match {
Expand Down
70 changes: 40 additions & 30 deletions plugin/src/main/scala/ch/epfl/scala/profilers/ProfilingImpl.scala
Original file line number Diff line number Diff line change
Expand Up @@ -31,14 +31,14 @@ final class ProfilingImpl[G <: Global](
}

/**
* Represents the profiling information about expanded macros.
*
* Note that we could derive the value of expanded macros from the
* number of instances of [[MacroInfo]] if it were not by the fact
* that a macro can expand in the same position more than once. We
* want to be able to report/analyse such cases on their own, so
* we keep it as a paramater of this entity.
*/
* Represents the profiling information about expanded macros.
*
* Note that we could derive the value of expanded macros from the
* number of instances of [[MacroInfo]] if it were not by the fact
* that a macro can expand in the same position more than once. We
* want to be able to report/analyse such cases on their own, so
* we keep it as a paramater of this entity.
*/
case class MacroInfo(expandedMacros: Int, expandedNodes: Int, expansionNanos: Long) {
def +(other: MacroInfo): MacroInfo = {
val totalExpanded = expandedMacros + other.expandedMacros
Expand Down Expand Up @@ -76,7 +76,7 @@ final class ProfilingImpl[G <: Global](
}

lazy val macroProfiler: MacroProfiler = {
import ProfilingMacroPlugin.macroInfos //, repeatedTrees}
import ProfilingMacroPlugin.macroInfos // , repeatedTrees}
val perCallSite = macroInfos.toMap
val perFile = groupPerFile(perCallSite)(MacroInfo.Empty, _ + _)
.map {
Expand All @@ -91,9 +91,9 @@ final class ProfilingImpl[G <: Global](

// perFile and inTotal are already converted to millis
val callSiteNanos = perCallSite.map {
case (pos, mi) => pos -> mi.copy(expansionNanos = toMillis(mi.expansionNanos))
}
MacroProfiler(callSiteNanos, perFile, inTotal, Map.empty) //repeated)
case (pos, mi) => pos -> mi.copy(expansionNanos = toMillis(mi.expansionNanos))
}
MacroProfiler(callSiteNanos, perFile, inTotal, Map.empty) // repeated)
}

case class ImplicitInfo(count: Int) {
Expand Down Expand Up @@ -149,7 +149,10 @@ final class ProfilingImpl[G <: Global](
}
}

def generateGraphData(outputDir: AbsolutePath, globalDirMaybe: Option[AbsolutePath]): List[AbsolutePath] = {
def generateGraphData(
outputDir: AbsolutePath,
globalDirMaybe: Option[AbsolutePath]
): List[AbsolutePath] = {
Files.createDirectories(outputDir.underlying)

val randomId = java.lang.Long.toString(System.currentTimeMillis())
Expand Down Expand Up @@ -222,9 +225,12 @@ final class ProfilingImpl[G <: Global](
stackedNanos.foreach {
case (id, (nanos, _)) =>
val names =
stackedNames.getOrElse(id, sys.error(s"Stack name for search id ${id} doesn't exist!"))
stackedNames.getOrElse(
id,
sys.error(s"Stack name for search id ${id} doesn't exist!")
)
val stackName = names.mkString(";")
//val count = implicitSearchesByType.getOrElse(tpe, sys.error(s"No counter for ${tpe}"))
// val count = implicitSearchesByType.getOrElse(tpe, sys.error(s"No counter for ${tpe}"))
stacksJavaList.add(s"$stackName ${nanos / 1000}")
}
java.util.Collections.sort(stacksJavaList)
Expand Down Expand Up @@ -254,12 +260,12 @@ final class ProfilingImpl[G <: Global](
`type`,
sys.error {
s"""Id for ${`type`} doesn't exist.
|
| Information about the type:
| - `structure` -> ${global.showRaw(`type`)}
| - `safeToString` -> ${`type`.safeToString}
| - `toLongString` after typer -> ${typeToString(`type`)}
| - `typeSymbol` -> ${`type`.typeSymbol}
|
| Information about the type:
| - `structure` -> ${global.showRaw(`type`)}
| - `safeToString` -> ${`type`.safeToString}
| - `toLongString` after typer -> ${typeToString(`type`)}
| - `typeSymbol` -> ${`type`.typeSymbol}
""".stripMargin
}
)
Expand All @@ -281,10 +287,10 @@ final class ProfilingImpl[G <: Global](
}

val graph = s"""digraph "$graphName" {
| graph [ranksep=0, rankdir=LR];
|${nodeInfos.mkString(" ", "\n ", "\n ")}
|${connections.mkString(" ", "\n ", "\n ")}
|}""".stripMargin.getBytes
| graph [ranksep=0, rankdir=LR];
|${nodeInfos.mkString(" ", "\n ", "\n ")}
|${connections.mkString(" ", "\n ", "\n ")}
|}""".stripMargin.getBytes
Files.write(outputPath, graph, StandardOpenOption.WRITE, StandardOpenOption.CREATE)
}

Expand Down Expand Up @@ -400,7 +406,11 @@ final class ProfilingImpl[G <: Global](
else concreteTypeFromSearch(result.subst(result.tree), targetType)
}

if (config.printSearchIds.contains(searchId) || (result.isFailure && config.printFailedMacroImplicits)) {
if (
config.printSearchIds.contains(
searchId
) || (result.isFailure && config.printFailedMacroImplicits)
) {
logger.info(
s"""implicit search ${searchId}:
| -> valid ${result.isSuccess}
Expand Down Expand Up @@ -477,9 +487,9 @@ final class ProfilingImpl[G <: Global](
1 + tree.children.map(guessTreeSize).sum

type RepeatedKey = (String, String)
//case class RepeatedValue(original: Tree, result: Tree, count: Int)
//private final val EmptyRepeatedValue = RepeatedValue(EmptyTree, EmptyTree, 0)
//private[ProfilingImpl] val repeatedTrees = perRunCaches.newMap[RepeatedKey, RepeatedValue]
// case class RepeatedValue(original: Tree, result: Tree, count: Int)
// private final val EmptyRepeatedValue = RepeatedValue(EmptyTree, EmptyTree, 0)
// private[ProfilingImpl] val repeatedTrees = perRunCaches.newMap[RepeatedKey, RepeatedValue]

val macroInfos = perRunCaches.newAnyRefMap[Position, MacroInfo]()
val searchIdsToMacroStates = perRunCaches.newMap[Int, List[MacroState]]()
Expand Down Expand Up @@ -680,7 +690,7 @@ final class ProfilingImpl[G <: Global](
repeatedTrees.put(key, newValue)*/
val macroInfo = macroInfos.getOrElse(callSitePos, MacroInfo.Empty)
val expandedMacros = macroInfo.expandedMacros + 1
val treeSize = 0 //macroInfo.expandedNodes + guessTreeSize(expanded)
val treeSize = 0 // macroInfo.expandedNodes + guessTreeSize(expanded)

// Use 0L for the timer because it will be filled in by the caller `apply`
macroInfos.put(callSitePos, MacroInfo(expandedMacros, treeSize, 0L))
Expand Down
Loading

0 comments on commit 22ffa42

Please sign in to comment.