diff --git a/build.sbt b/build.sbt index e3b6780..49da24f 100644 --- a/build.sbt +++ b/build.sbt @@ -2,13 +2,13 @@ import sbt.Keys.thisProjectRef ThisBuild / organization := "io.waylay.kairosdb" -val playWsVersion = "2.1.11" +val playWsVersion = "3.0.3" val playJsonVersion = "3.0.3" val specs2Version = "4.20.6" val dockerTestkitVersion = "0.12.0" val scalaTestVersion = "3.2.18" -val playVersion = "2.8.21" // test only +val playVersion = "3.0.3" // test only val scala2_12 = "2.12.19" val scala2_13 = "2.13.14" @@ -40,17 +40,17 @@ lazy val root = (project in file(".")) "org.scala-lang.modules" %% "scala-collection-compat" % "2.12.0", "com.fasterxml.jackson.module" %% "jackson-module-scala" % "2.17.1", "org.playframework" %% "play-json" % playJsonVersion, - "com.typesafe.play" %% "play-ws-standalone" % playWsVersion, - "com.typesafe.play" %% "play-ws-standalone-json" % playWsVersion, + "org.playframework" %% "play-ws-standalone" % playWsVersion, + "org.playframework" %% "play-ws-standalone-json" % playWsVersion, "com.typesafe.scala-logging" %% "scala-logging" % "3.9.5", "io.lemonlabs" %% "scala-uri" % "4.0.3", // TEST "org.specs2" %% "specs2-core" % specs2Version % Test, "org.specs2" %% "specs2-junit" % specs2Version % Test, - "de.leanovate.play-mockws" %% "play-mockws" % "2.8.1" % Test, - "com.typesafe.play" %% "play-ahc-ws" % playVersion % TestAndIntegrationTest, // neede for play-mockws - "com.typesafe.play" %% "play-test" % playVersion % TestAndIntegrationTest, // play-mockws depends on some types in this dependency - "com.typesafe.play" %% "play-ahc-ws-standalone" % playWsVersion % TestAndIntegrationTest, + "de.leanovate.play-mockws" %% "play-mockws-3-0" % "3.0.4" % Test, + "org.playframework" %% "play-ahc-ws" % playVersion % TestAndIntegrationTest, // neede for play-mockws + "org.playframework" %% "play-test" % playVersion % TestAndIntegrationTest, // play-mockws depends on some types in this dependency + "org.playframework" %% "play-ahc-ws-standalone" % playWsVersion % TestAndIntegrationTest, // INTEGRATION TESTS // TODO investigate if we can do this with specs2 "org.scalatest" %% "scalatest-wordspec" % scalaTestVersion % TestAndIntegrationTest, @@ -85,7 +85,7 @@ val publishScalaDoc = (ref: ProjectRef) => val runIntegrationTest = (ref: ProjectRef) => ReleaseStep( - action = releaseStepTaskAggregated(test in IntegrationTest in ref) + action = releaseStepTaskAggregated(ref / IntegrationTest / test) ) releaseProcess := { @@ -114,6 +114,6 @@ lazy val examples = project .dependsOn(root) .settings( libraryDependencies ++= Seq( - "com.typesafe.play" %% "play-ahc-ws-standalone" % playWsVersion + "org.playframework" %% "play-ahc-ws-standalone" % playWsVersion ) ) diff --git a/src/it/scala/integration/IntegrationSpec.scala b/src/it/scala/integration/IntegrationSpec.scala index a9eae10..efad194 100644 --- a/src/it/scala/integration/IntegrationSpec.scala +++ b/src/it/scala/integration/IntegrationSpec.scala @@ -1,10 +1,11 @@ package integration -import akka.stream.testkit.NoMaterializer import com.spotify.docker.client.messages.HostConfig import com.typesafe.scalalogging.StrictLogging import com.whisk.docker.testkit.{ContainerGroup, ContainerSpec, DockerReadyChecker} import com.whisk.docker.testkit.scalatest.DockerTestKitForAll +import org.apache.pekko.stream.Materializer +import org.apache.pekko.stream.testkit.NoMaterializer import org.scalatest.BeforeAndAfterAll import org.scalatest.concurrent.ScalaFutures import org.scalatest.matchers.must.Matchers @@ -37,13 +38,13 @@ trait IntegrationSpec extends AnyWordSpec with Matchers with ScalaFutures with S .getOrElse(throw new IllegalStateException(s"Missing container mapped port for $DefaultKairosDbPort")) } - implicit val pc = PatienceConfig(Span(2000, Seconds), Span(1, Second)) + implicit val pc: PatienceConfig = PatienceConfig(Span(2000, Seconds), Span(1, Second)) //override def dockerInitPatienceInterval = PatienceConfig(scaled(Span(30, Seconds)), scaled(Span(10, Millis))) override val managedContainers: ContainerGroup = ContainerGroup(Seq(kairosdbContainer.toContainer)) - implicit val materializer = NoMaterializer - val wsClient = StandaloneAhcWSClient() + implicit val materializer: Materializer = NoMaterializer + val wsClient: StandaloneAhcWSClient = StandaloneAhcWSClient() override def afterAll(): Unit = { wsClient.close() diff --git a/src/main/scala/io/waylay/kairosdb/driver/KairosDB.scala b/src/main/scala/io/waylay/kairosdb/driver/KairosDB.scala index 152647c..814c7c4 100644 --- a/src/main/scala/io/waylay/kairosdb/driver/KairosDB.scala +++ b/src/main/scala/io/waylay/kairosdb/driver/KairosDB.scala @@ -1,27 +1,24 @@ package io.waylay.kairosdb.driver -import java.io.ByteArrayOutputStream -import java.util.zip.GZIPOutputStream - -import io.waylay.kairosdb.driver.models._ -import io.waylay.kairosdb.driver.models.HealthCheckResult._ -import io.lemonlabs.uri.typesafe.dsl._ import com.typesafe.scalalogging.StrictLogging +import io.lemonlabs.uri.typesafe.dsl._ import io.waylay.kairosdb.driver.KairosDB._ +import io.waylay.kairosdb.driver.models.HealthCheckResult._ import io.waylay.kairosdb.driver.models.QueryMetricTagsResponse.TagQueryResponse -import io.waylay.kairosdb.driver.models.json.Formats._ import io.waylay.kairosdb.driver.models.QueryResponse.Response +import io.waylay.kairosdb.driver.models._ +import io.waylay.kairosdb.driver.models.json.Formats._ import play.api.libs.json._ -import play.api.libs.ws.{StandaloneWSClient, StandaloneWSRequest, StandaloneWSResponse, WSAuthScheme} import play.api.libs.ws.DefaultBodyWritables._ -import play.api.libs.ws.JsonBodyWritables._ import play.api.libs.ws.JsonBodyReadables._ +import play.api.libs.ws.JsonBodyWritables._ +import play.api.libs.ws.{StandaloneWSClient, StandaloneWSRequest, StandaloneWSResponse, WSAuthScheme} +import java.io.ByteArrayOutputStream +import java.util.zip.GZIPOutputStream +import scala.concurrent.duration._ import scala.concurrent.{ExecutionContext, Future} import scala.util.{Failure, Success, Try} -import scala.concurrent.duration._ -import scala.collection.immutable.Seq -import scala.collection.compat._ object KairosDB { @@ -37,7 +34,7 @@ object KairosDB { } class KairosDB(wsClient: StandaloneWSClient, config: KairosDBConfig, executionContext: ExecutionContext) extends StrictLogging { - implicit val ec = executionContext + implicit val ec: ExecutionContext = executionContext val url = config.url def listMetricNames: Future[Seq[MetricName]] = { diff --git a/src/main/scala/io/waylay/kairosdb/driver/models/Models.scala b/src/main/scala/io/waylay/kairosdb/driver/models/Models.scala index 25626b3..36ce52a 100644 --- a/src/main/scala/io/waylay/kairosdb/driver/models/Models.scala +++ b/src/main/scala/io/waylay/kairosdb/driver/models/Models.scala @@ -1,13 +1,11 @@ package io.waylay.kairosdb.driver.models -import java.time.Instant -import io.lemonlabs.uri.{Uri, Url, UrlWithAuthority} +import io.lemonlabs.uri.{Uri, Url} import io.waylay.kairosdb.driver.models.KairosQuery.{Order, QueryTag} import io.waylay.kairosdb.driver.models.QueryResponse.TagResult import io.waylay.kairosdb.driver.models.TimeRange.KairosTimeUnit -import scala.collection.immutable.Seq -import scala.collection.compat._ +import java.time.Instant /** * Metric names are case sensitive and can only contain the following characters: alphanumeric characters, period ”.”, diff --git a/src/main/scala/io/waylay/kairosdb/driver/models/json/Formats.scala b/src/main/scala/io/waylay/kairosdb/driver/models/json/Formats.scala index 188e1e1..e3a3753 100644 --- a/src/main/scala/io/waylay/kairosdb/driver/models/json/Formats.scala +++ b/src/main/scala/io/waylay/kairosdb/driver/models/json/Formats.scala @@ -1,8 +1,5 @@ package io.waylay.kairosdb.driver.models.json -import java.time.Instant -import java.util.concurrent.TimeUnit - import io.waylay.kairosdb.driver.models.Aggregator._ import io.waylay.kairosdb.driver.models.GroupBy._ import io.waylay.kairosdb.driver.models.KairosCompatibleType.{KNull, KNumber, KString} @@ -10,25 +7,24 @@ import io.waylay.kairosdb.driver.models.KairosQuery.{Order, QueryTag} import io.waylay.kairosdb.driver.models.QueryMetricTagsResponse.{TagQueryResponse, TagsResponse, TagsResult} import io.waylay.kairosdb.driver.models.QueryResponse.{Response, ResponseQuery, Result, TagResult} import io.waylay.kairosdb.driver.models.RangeAggregator.Align.{AlignSampling, AlignStartTime} -import io.waylay.kairosdb.driver.models.TimeRange.{DAYS, HOURS, KairosTimeUnit, MILLISECONDS, MINUTES, SECONDS, _} +import io.waylay.kairosdb.driver.models.TimeRange._ import io.waylay.kairosdb.driver.models.TimeSpan._ -import io.waylay.kairosdb.driver.models.{Aggregator, KairosCompatibleType, RangeAggregator, _} +import io.waylay.kairosdb.driver.models._ import play.api.libs.functional.syntax._ import play.api.libs.json._ -import scala.collection.immutable.Seq +import java.time.Instant +import java.util.concurrent.TimeUnit import scala.util.{Failure, Success, Try} object Formats { implicit val datapointWrites: Writes[DataPoint] = new Writes[DataPoint] { - implicit val datapointWithTimeStampWrites = - new Writes[(Instant, KairosCompatibleType)] { - override def writes(point: (Instant, KairosCompatibleType)): JsValue = { - val (time, value) = point - Json.arr(time.toEpochMilli, value) - } + implicit val datapointWithTimeStampWrites: Writes[(Instant, KairosCompatibleType)] = + (point: (Instant, KairosCompatibleType)) => { + val (time, value) = point + Json.arr(time.toEpochMilli, value) } override def writes(datapoint: DataPoint): JsValue = { @@ -68,101 +64,95 @@ object Formats { } } - implicit def groupByWrites[T <: GroupBy]: Writes[T] = new Writes[T] { - override def writes(groupBy: T): JsValue = { - val base = Json.obj("name" -> groupBy.name) - - groupBy match { - case GroupByBins(bins) => - base ++ Json.obj("bins" -> bins) - case GroupByTags(tags) => - base ++ Json.obj("tags" -> tags) - case GroupByTime(rangeSize, groupCount) => - base ++ Json.obj( - "range_size" -> rangeSize, - "group_count" -> groupCount.toString - ) - case GroupByValue(rangeSize) => - base ++ Json.obj("range_size" -> rangeSize) - case GroupByType(typeName) => - base ++ Json.obj("type" -> typeName) - } + implicit def groupByWrites[T <: GroupBy]: Writes[T] = (groupBy: T) => { + val base = Json.obj("name" -> groupBy.name) + + groupBy match { + case GroupByBins(bins) => + base ++ Json.obj("bins" -> bins) + case GroupByTags(tags) => + base ++ Json.obj("tags" -> tags) + case GroupByTime(rangeSize, groupCount) => + base ++ Json.obj( + "range_size" -> rangeSize, + "group_count" -> groupCount.toString + ) + case GroupByValue(rangeSize) => + base ++ Json.obj("range_size" -> rangeSize) + case GroupByType(typeName) => + base ++ Json.obj("type" -> typeName) } } - implicit val kairosTimeUnitFormat:Format[KairosTimeUnit]= new Format[KairosTimeUnit] { + implicit val kairosTimeUnitFormat: Format[KairosTimeUnit] = new Format[KairosTimeUnit] { override def writes(o: KairosTimeUnit): JsValue = o match { - case YEARS => JsString("years") - case MONTHS => JsString("months") - case WEEKS => JsString("weeks") - case DAYS => JsString("days") - case HOURS => JsString("hours") - case MINUTES => JsString("minutes") - case SECONDS => JsString("seconds") - case MILLISECONDS => JsString("milliseconds") - } + case YEARS => JsString("years") + case MONTHS => JsString("months") + case WEEKS => JsString("weeks") + case DAYS => JsString("days") + case HOURS => JsString("hours") + case MINUTES => JsString("minutes") + case SECONDS => JsString("seconds") + case MILLISECONDS => JsString("milliseconds") + } override def reads(json: JsValue): JsResult[KairosTimeUnit] = json.validate[String].map(_.toLowerCase).flatMap { - case "years" => JsSuccess(YEARS) - case "months" => JsSuccess(MONTHS) - case "weeks" => JsSuccess(WEEKS) - case "days" => JsSuccess(DAYS) - case "hours" => JsSuccess(HOURS) - case "minutes" => JsSuccess(MINUTES) - case "seconds" => JsSuccess(SECONDS) - case "milliseconds" => JsSuccess(MILLISECONDS) - case _ => JsError( - "unit must be one of: milliseconds, seconds, minutes, hours, days, weeks, months or years" - ) - } + case "years" => JsSuccess(YEARS) + case "months" => JsSuccess(MONTHS) + case "weeks" => JsSuccess(WEEKS) + case "days" => JsSuccess(DAYS) + case "hours" => JsSuccess(HOURS) + case "minutes" => JsSuccess(MINUTES) + case "seconds" => JsSuccess(SECONDS) + case "milliseconds" => JsSuccess(MILLISECONDS) + case _ => JsError( + "unit must be one of: milliseconds, seconds, minutes, hours, days, weeks, months or years" + ) + } } - implicit val timeRangeFormat: Format[TimeRange] = new Format[TimeRange] { + implicit val timeRangeFormat: Format[TimeRange] = new Format[TimeRange] { override def writes(o: TimeRange): JsValue = Json.obj( "value" -> o.amount.toString, "unit" -> o.unit ) override def reads(json: JsValue): JsResult[TimeRange] = { - val unitRes = (json \ "unit").validate[KairosTimeUnit] - val valueRes = (json \ "value").validate[Long].orElse{ - (json \ "value").validate[String].flatMap{ s => - Try(s.toLong) match { - case Success(value) => JsSuccess(value) - case Failure(exception) => JsError("error.expected.jsnumber") - } + val unitRes = (json \ "unit").validate[KairosTimeUnit] + val valueRes = (json \ "value").validate[Long].orElse { + (json \ "value").validate[String].flatMap { s => + Try(s.toLong) match { + case Success(value) => JsSuccess(value) + case Failure(_) => JsError("error.expected.jsnumber") } } + } - for { - unit <- unitRes - value <- valueRes - } yield { - TimeRange(value, unit) - } + for { + unit <- unitRes + value <- valueRes + } yield { + TimeRange(value, unit) } } + } - - - implicit val groupByReads: Reads[GroupBy] = new Reads[GroupBy] { - override def reads(json: JsValue): JsResult[GroupBy] = { - (json \ "name").validate[String] flatMap { - case "tag" => - (json \ "tags").validate[Seq[String]] map GroupByTags - case "time" => - for { - rangeSize <- (json \ "range_size").validate[TimeRange] - groupCount <- (json \ "group_count").validate[String] // not sure if this is correct - } yield { - GroupByTime(rangeSize, groupCount.toInt) - } - case "value" => - (json \ "range_size").validate[Int] map GroupByValue - case "bin" => - (json \ "bins").validate[Seq[String]] map GroupByBins - case "type" => - (json \ "type").validate[String] map GroupByType - } + implicit val groupByReads: Reads[GroupBy] = (json: JsValue) => { + (json \ "name").validate[String] flatMap { + case "tag" => + (json \ "tags").validate[Seq[String]] map GroupByTags + case "time" => + for { + rangeSize <- (json \ "range_size").validate[TimeRange] + groupCount <- (json \ "group_count").validate[String] // not sure if this is correct + } yield { + GroupByTime(rangeSize, groupCount.toInt) + } + case "value" => + (json \ "range_size").validate[Int] map GroupByValue + case "bin" => + (json \ "bins").validate[Seq[String]] map GroupByBins + case "type" => + (json \ "type").validate[String] map GroupByType } } @@ -172,7 +162,7 @@ object Formats { o match { case KNumber(value) => JsNumber(value) case KString(value) => JsString(value) - case KNull => JsNull + case KNull => JsNull } } } @@ -228,191 +218,171 @@ object Formats { ) } - implicit val samplerAggregatorWrites = new Writes[Sampler] { - override def writes(sampler: Sampler): JsValue = { - val base = Json.obj( - "name" -> sampler.name, - "unit" -> unitName(sampler.unit) - ) - val tz = sampler.timezone.fold(Json.obj())(x => - Json.obj( - "time_zone" -> x - ) + implicit val samplerAggregatorWrites: Writes[Sampler] = (sampler: Sampler) => { + val base = Json.obj( + "name" -> sampler.name, + "unit" -> unitName(sampler.unit) + ) + val tz = sampler.timezone.fold(Json.obj())(x => + Json.obj( + "time_zone" -> x ) - base ++ tz - } + ) + base ++ tz } - implicit val rateAggregatorWrites = new Writes[Rate] { - override def writes(rate: Rate): JsValue = { - val base = Json.obj( - "name" -> rate.name, - "unit" -> unitName(rate.unit), - "sampling" -> Json.toJson(rate.sampling) - ) - val ts = rate.timezone.fold(Json.obj())(x => Json.obj("time_zone" -> x)) - base ++ ts - } + implicit val rateAggregatorWrites: Writes[Rate] = (rate: Rate) => { + val base = Json.obj( + "name" -> rate.name, + "unit" -> unitName(rate.unit), + "sampling" -> Json.toJson(rate.sampling) + ) + val ts = rate.timezone.fold(Json.obj())(x => Json.obj("time_zone" -> x)) + base ++ ts } - implicit val rangeAggregatorWrites = new Writes[RangeAggregator] { - override def writes(rangeAgg: RangeAggregator): JsValue = { - Json.obj( - "name" -> rangeAgg.name, - "sampling" -> Json.toJson(rangeAgg.sampling) - ) ++ - rangeAgg.timeZone - .map(tz => Json.obj("time_zone" -> tz)) - .getOrElse(Json.obj()) ++ - rangeAgg.align - .map { - // see https://github.com/kairosdb/kairosdb/issues/675 - case AlignStartTime => Json.obj("align_start_time" -> true, "align_sampling" -> false) - case AlignSampling => Json.obj("align_sampling" -> true) - } - .getOrElse(Json.obj()) ++ - rangeAgg.startTime - .map(x => Json.obj("start_time" -> Json.toJson(x))) - .getOrElse(Json.obj()) - } + implicit val rangeAggregatorWrites: Writes[RangeAggregator] = (rangeAgg: RangeAggregator) => { + Json.obj( + "name" -> rangeAgg.name, + "sampling" -> Json.toJson(rangeAgg.sampling) + ) ++ + rangeAgg.timeZone + .map(tz => Json.obj("time_zone" -> tz)) + .getOrElse(Json.obj()) ++ + rangeAgg.align + .map { + // see https://github.com/kairosdb/kairosdb/issues/675 + case AlignStartTime => Json.obj("align_start_time" -> true, "align_sampling" -> false) + case AlignSampling => Json.obj("align_sampling" -> true) + } + .getOrElse(Json.obj()) ++ + rangeAgg.startTime + .map(x => Json.obj("start_time" -> Json.toJson(x))) + .getOrElse(Json.obj()) } - implicit val percentileAggregatorWrites = new Writes[Percentile] { - override def writes(percentileAgg: Percentile): JsValue = { - Json.obj( - "name" -> percentileAgg.name, - "sampling" -> Json.toJson(percentileAgg.sampling), - "percentile" -> percentileAgg.percentile - ) ++ - percentileAgg.timeZone - .map(tz => Json.obj("time_zone" -> tz)) - .getOrElse(Json.obj()) ++ - percentileAgg.align - .map { - // see https://github.com/kairosdb/kairosdb/issues/675 - case AlignStartTime => Json.obj("align_start_time" -> true, "align_sampling" -> false) - case AlignSampling => Json.obj("align_sampling" -> true) - } - .getOrElse(Json.obj()) ++ - percentileAgg.startTime - .map(x => Json.obj("start_time" -> Json.toJson(x))) - .getOrElse(Json.obj()) - } + implicit val percentileAggregatorWrites: Writes[Percentile] = (percentileAgg: Percentile) => { + Json.obj( + "name" -> percentileAgg.name, + "sampling" -> Json.toJson(percentileAgg.sampling), + "percentile" -> percentileAgg.percentile + ) ++ + percentileAgg.timeZone + .map(tz => Json.obj("time_zone" -> tz)) + .getOrElse(Json.obj()) ++ + percentileAgg.align + .map { + // see https://github.com/kairosdb/kairosdb/issues/675 + case AlignStartTime => Json.obj("align_start_time" -> true, "align_sampling" -> false) + case AlignSampling => Json.obj("align_sampling" -> true) + } + .getOrElse(Json.obj()) ++ + percentileAgg.startTime + .map(x => Json.obj("start_time" -> Json.toJson(x))) + .getOrElse(Json.obj()) } - implicit def timePointWrites[T<: TimePoint]: Writes[T] = new Writes[T] { - override def writes(timePoint: T): JsValue = { - timePoint match { - case time: AbsoluteStartTime => JsNumber(time.startTime.toEpochMilli) - case time: AbsoluteEndTime => JsNumber(time.endTime.toEpochMilli) - case time: RelativeStartTime => Json.toJson(time.howLongAgo) - case time: RelativeEndTime => Json.toJson(time.howLongAgo) - } - } + implicit def timePointWrites[T <: TimePoint]: Writes[T] = { + case time: AbsoluteStartTime => JsNumber(time.startTime.toEpochMilli) + case time: AbsoluteEndTime => JsNumber(time.endTime.toEpochMilli) + case time: RelativeStartTime => Json.toJson(time.howLongAgo) + case time: RelativeEndTime => Json.toJson(time.howLongAgo) } implicit val queryMetricsWrites: Writes[QueryMetrics] = - new Writes[QueryMetrics] { - override def writes(queryMetrics: QueryMetrics): JsValue = { - val plugins: Seq[(String, JsValue)] = - if (queryMetrics.plugins.nonEmpty) { - Seq("plugins" -> Json.toJson(queryMetrics.plugins)) - } else { - Seq.empty - } + (queryMetrics: QueryMetrics) => { + val plugins: Seq[(String, JsValue)] = + if (queryMetrics.plugins.nonEmpty) { + Seq("plugins" -> Json.toJson(queryMetrics.plugins)) + } else { + Seq.empty + } - val fields: Seq[(String, JsValue)] = Seq( - queryMetrics.timeSpan.startTime.fieldName -> Json.toJson( - queryMetrics.timeSpan.startTime - ), - "metrics" -> JsArray(queryMetrics.metrics.map(x => Json.toJson(x))) - ) ++ Seq( - queryMetrics.timeSpan.endTime.map(x => x.fieldName -> Json.toJson(x)), - queryMetrics.timeZone.map("time_zone" -> JsString(_)), - queryMetrics.cacheTime.map("cache_time" -> JsNumber(_)) - ).flatten ++ plugins - - JsObject(fields) - } + val fields: Seq[(String, JsValue)] = Seq( + queryMetrics.timeSpan.startTime.fieldName -> Json.toJson( + queryMetrics.timeSpan.startTime + ), + "metrics" -> JsArray(queryMetrics.metrics.map(x => Json.toJson(x))) + ) ++ Seq( + queryMetrics.timeSpan.endTime.map(x => x.fieldName -> Json.toJson(x)), + queryMetrics.timeZone.map("time_zone" -> JsString(_)), + queryMetrics.cacheTime.map("cache_time" -> JsNumber(_)) + ).flatten ++ plugins + + JsObject(fields) } - implicit val orderWrites = new Writes[Order] { - override def writes(order: Order): JsValue = Json.toJson(order.value) - } + implicit val orderWrites: Writes[Order] = (order: Order) => Json.toJson(order.value) - implicit val queryTagFormat = Json.format[QueryTag] + implicit val queryTagFormat: OFormat[QueryTag] = Json.format[QueryTag] implicit val queryPluginWrites: Writes[QueryPlugin] = - new Writes[QueryPlugin] { - override def writes(plugin: QueryPlugin): JsValue = { - JsObject( - Seq("name" -> JsString(plugin.name)) ++ plugin.properties.map( - prop => { - val propValue: JsValue = prop._2 match { - case s: String => Json.toJson(s) - case l: Long => Json.toJson(l) - case i: Integer => Json.toJson(i.longValue()) - case d: Double => Json.toJson(d) - case stringSeq: Seq[String] => Json.toJson(stringSeq) - } - prop._1 -> propValue + (plugin: QueryPlugin) => { + JsObject( + Seq("name" -> JsString(plugin.name)) ++ plugin.properties.map( + prop => { + val propValue: JsValue = prop._2 match { + case s: String => Json.toJson(s) + case l: Long => Json.toJson(l) + case i: Integer => Json.toJson(i.longValue()) + case d: Double => Json.toJson(d) + case stringSeq: Seq[String] => Json.toJson(stringSeq) } - ) + prop._1 -> propValue + } ) - } + ) } - implicit val queryWrites: Writes[Query] = new Writes[Query] { - override def writes(query: Query): JsValue = { - val tags = if (query.tags.isEmpty) { - Json.obj() - } else { - Json.obj( - "tags" -> query.tags - .map(tag => Json.obj(tag.name -> tag.allowedValues)) - .reduce((x, y) => x ++ y) - ) - } - - val aggregators = if (query.aggregators.isEmpty) { - Json.obj() - } else { - Json.obj("aggregators" -> query.aggregators) - } + implicit val queryWrites: Writes[Query] = (query: Query) => { + val tags = if (query.tags.isEmpty) { + Json.obj() + } else { + Json.obj( + "tags" -> query.tags + .map(tag => Json.obj(tag.name -> tag.allowedValues)) + .reduce((x, y) => x ++ y) + ) + } - val limit = query.limit.fold(Json.obj())(lim => Json.obj("limit" -> lim)) + val aggregators = if (query.aggregators.isEmpty) { + Json.obj() + } else { + Json.obj("aggregators" -> query.aggregators) + } - val groupBys = if (query.groupBys.isEmpty) { - Json.obj() - } else { - Json.obj("group_by" -> Json.toJson(query.groupBys)) - } + val limit = query.limit.fold(Json.obj())(lim => Json.obj("limit" -> lim)) - val excludeTags = if (query.excludeTags) { - Json.obj("exclude_tags" -> query.excludeTags) - } else { - Json.obj() - } + val groupBys = if (query.groupBys.isEmpty) { + Json.obj() + } else { + Json.obj("group_by" -> Json.toJson(query.groupBys)) + } - val order = if (query.order == Order.defaultOrder) { - Json.obj() - } else { - Json.obj("order" -> query.order.value) - } + val excludeTags = if (query.excludeTags) { + Json.obj("exclude_tags" -> query.excludeTags) + } else { + Json.obj() + } - val name = Json.obj("name" -> query.metricName.name) + val order = if (query.order == Order.defaultOrder) { + Json.obj() + } else { + Json.obj("order" -> query.order.value) + } - val plugins = if (query.plugins.isEmpty) { - Json.obj() - } else { - Json.obj("plugins" -> query.plugins) - } + val name = Json.obj("name" -> query.metricName.name) - name ++ limit ++ tags ++ aggregators ++ groupBys ++ excludeTags ++ order ++ plugins + val plugins = if (query.plugins.isEmpty) { + Json.obj() + } else { + Json.obj("plugins" -> query.plugins) } + + name ++ limit ++ tags ++ aggregators ++ groupBys ++ excludeTags ++ order ++ plugins } - implicit val tagResultFormat = Json.format[TagResult] + implicit val tagResultFormat: OFormat[TagResult] = Json.format[TagResult] implicit val tagResultSeqReads = new Reads[Seq[TagResult]] { override def reads(json: JsValue) = { @@ -422,68 +392,56 @@ object Formats { Json.obj("name" -> key, "values" -> maybeValues) }).validate[Seq[TagResult]] } - } } implicit val kairosCompatibleTypeReads: Reads[KairosCompatibleType] = - new Reads[KairosCompatibleType] { - override def reads(json: JsValue): JsResult[KairosCompatibleType] = { - json.validate[String].map(KString) orElse json - .validate[BigDecimal] - .map(KNumber) orElse JsError("error.expected.jsstringOrJsnumber") - } + (json: JsValue) => { + json.validate[String].map(KString) orElse json + .validate[BigDecimal] + .map(KNumber) orElse JsError("error.expected.jsstringOrJsnumber") } implicit val dataPointValueReads: Reads[(Instant, KairosCompatibleType)] = - new Reads[(Instant, KairosCompatibleType)] { - override def reads( - json: JsValue - ): JsResult[(Instant, KairosCompatibleType)] = { - val millisRes = json(0).validate[Long] - val valueRes = json(1).validateOpt[KairosCompatibleType].map { - case None => KNull - case Some(value) => value - } - - for { - millis <- millisRes - value <- valueRes - } yield (Instant.ofEpochMilli(millis), value) + (json: JsValue) => { + val millisRes = json(0).validate[Long] + val valueRes = json(1).validateOpt[KairosCompatibleType].map { + case None => KNull + case Some(value) => value } + + for { + millis <- millisRes + value <- valueRes + } yield (Instant.ofEpochMilli(millis), value) } implicit val metricNameAsStringReads: Reads[MetricName] = - new Reads[MetricName] { - override def reads(json: JsValue) = json.validate[String].map(MetricName) - } + (json: JsValue) => json.validate[String].map(MetricName) - implicit val resultReads = ( + implicit val resultReads: Reads[Result] = ( (JsPath \ "name").read[MetricName] and (JsPath \ "group_by") .read[Seq[GroupBy]] - .orElse(new Reads[Seq[GroupBy]] { - // return empty seq if path not found - override def reads(json: JsValue) = JsSuccess(Seq.empty[GroupBy]) - }) and + .orElse((json: JsValue) => JsSuccess(Seq.empty[GroupBy])) and (JsPath \ "tags").read[Seq[TagResult]] and (JsPath \ "values").read[Seq[(Instant, KairosCompatibleType)]] - )(Result.apply _) + )(Result.apply _) - implicit val responseQueryReads = ( + implicit val responseQueryReads: Reads[ResponseQuery] = ( (JsPath \ "sample_size").read[Int] and (JsPath \ "results").read[Seq[Result]] - )(ResponseQuery.apply _) + )(ResponseQuery.apply _) - implicit val responseReads = Json.reads[Response] + implicit val responseReads: Reads[Response] = Json.reads[Response] - implicit val tagsResultReads = ( + implicit val tagsResultReads: Reads[TagsResult] = ( (JsPath \ "name").read[MetricName] and (JsPath \ "tags").read[Seq[TagResult]] - )(TagsResult.apply _) + )(TagsResult.apply _) - implicit val responseTagsReads = Json.reads[TagsResponse] - implicit val tagResponseReads = Json.reads[TagQueryResponse] + implicit val responseTagsReads: Reads[TagsResponse] = Json.reads[TagsResponse] + implicit val tagResponseReads: Reads[TagQueryResponse] = Json.reads[TagQueryResponse] private def instant2kairosLong(instant: Instant): Long = instant.toEpochMilli @@ -500,13 +458,13 @@ object Formats { } private def toJavaDuration(timeRange: TimeRange) = timeRange.unit match { - case TimeRange.YEARS => java.time.Duration.ofDays(timeRange.amount * 365) - case TimeRange.MONTHS => java.time.Duration.ofDays(timeRange.amount * 31) - case TimeRange.WEEKS => java.time.Duration.ofDays(timeRange.amount * 7) - case TimeRange.DAYS => java.time.Duration.ofDays(timeRange.amount) - case TimeRange.HOURS => java.time.Duration.ofHours(timeRange.amount) - case TimeRange.MINUTES => java.time.Duration.ofMinutes(timeRange.amount) - case TimeRange.SECONDS => java.time.Duration.ofSeconds(timeRange.amount) + case TimeRange.YEARS => java.time.Duration.ofDays(timeRange.amount * 365) + case TimeRange.MONTHS => java.time.Duration.ofDays(timeRange.amount * 31) + case TimeRange.WEEKS => java.time.Duration.ofDays(timeRange.amount * 7) + case TimeRange.DAYS => java.time.Duration.ofDays(timeRange.amount) + case TimeRange.HOURS => java.time.Duration.ofHours(timeRange.amount) + case TimeRange.MINUTES => java.time.Duration.ofMinutes(timeRange.amount) + case TimeRange.SECONDS => java.time.Duration.ofSeconds(timeRange.amount) case TimeRange.MILLISECONDS => java.time.Duration.ofMillis(timeRange.amount) } @@ -514,7 +472,7 @@ object Formats { // if value is > 0 and < 1 second, set it to a second or KairosDB will not set a TTL dur match { case TimeRange(x, MILLISECONDS) if Math.abs(x) < 1000 => 1 - case TimeRange(0, _) => 0 + case TimeRange(0, _) => 0 case x: TimeRange => toJavaDuration(x).getSeconds } diff --git a/src/test/scala/unit/MockHelper.scala b/src/test/scala/unit/MockHelper.scala index 88eec9e..b571203 100644 --- a/src/test/scala/unit/MockHelper.scala +++ b/src/test/scala/unit/MockHelper.scala @@ -1,7 +1,7 @@ package unit -import akka.actor.ActorSystem -import akka.stream.ActorMaterializer +import org.apache.pekko.actor.ActorSystem +import org.apache.pekko.stream.{ActorMaterializer, Materializer} import org.specs2.specification.AfterAll import play.api.mvc.{DefaultActionBuilder, PlayBodyParsers} @@ -12,14 +12,14 @@ import scala.concurrent.duration._ * See https://github.com/leanovate/play-mockws/issues/29 */ trait MockHelper extends AfterAll{ - private implicit val sys = ActorSystem("test") - private implicit val mat = ActorMaterializer() + private implicit val sys: ActorSystem = ActorSystem("test") + private implicit val mat:Materializer = ActorMaterializer() val BodyParser: PlayBodyParsers = PlayBodyParsers() val Action: DefaultActionBuilder = DefaultActionBuilder(BodyParser.anyContent)(mat.executionContext) - override def afterAll = { + override def afterAll() = { mat.shutdown() Await.result(sys.terminate(), 10.seconds) } -} \ No newline at end of file +}